Compare commits

..

5 Commits

36 changed files with 391 additions and 1108 deletions

View File

@ -1,24 +1,25 @@
name: Markdown Lint
name: Markdown check
on:
workflow_dispatch:
push:
branches:
- '**'
- '*'
paths:
- '*.md'
- 'develop/*.md'
- 'scripts/**.md'
- '.markdownlint.jsonc'
jobs:
markdownlint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: DavidAnson/markdownlint-cli2-action@v16
- uses: actions/checkout@v3
- uses: DavidAnson/markdownlint-cli2-action@v9
with:
command: config
globs: |
.markdownlint.jsonc
*.md
develop/*.md
scripts/**.md
scripts/**.md

View File

@ -3,13 +3,16 @@ name: Update hard-coded version
on: workflow_dispatch
jobs:
update-version:
if: >-
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
contains(github.ref, 'refs/heads/release/v') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Checkout
uses: actions/checkout@v3
with:
token: ${{ secrets.PUSH_TOKEN }}
@ -22,9 +25,9 @@ jobs:
run: echo "${{ env.version }}" > version
- name: Commit & push changes
uses: EndBug/add-and-commit@v9
uses: EndBug/add-and-commit@v8
with:
author_name: github-actions[bot]
author_email: github-actions[bot]@users.noreply.github.com
message: Update hard-coded version to ${{ env.version }}
message: Update hard-coded version to v${{ env.version }}
add: version

2
.gitignore vendored
View File

@ -12,5 +12,3 @@ tests/puppeteer/node_modules
tests/puppeteer/work_directory
tests/puppeteer/package.json
tests/puppeteer/package-lock.json
scripts/sdkjs_common/jsdoc/node_modules
scripts/sdkjs_common/jsdoc/package-lock.json

View File

@ -196,8 +196,9 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
**Note**: The created database must have **onlyoffice** both for user and password.
```bash
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
```
3. Configure the database:

View File

@ -21,7 +21,6 @@ parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp"
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")

View File

@ -61,6 +61,7 @@ Clone development modules to the work dir
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
```bash
cd ../..
git clone https://github.com/ONLYOFFICE/sdkjs.git
git clone https://github.com/ONLYOFFICE/web-apps.git
git clone https://github.com/ONLYOFFICE/server.git
@ -74,15 +75,14 @@ along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow.
The folders `server` is optional
**Note**: Run command with the current working directory
containing `sdkjs`, `web-apps`...
**Note**: ONLYOFFICE server uses port 80.
Look for another application using port 80 and stop it
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
and takes 20 minutes with `server`
**Note**: Run command from work dir with development modules
### docker run on Windows (PowerShell)
**Note**: Run PowerShell as administrator to fix EACCES error when installing

View File

@ -384,7 +384,7 @@ def cmd2(prog, args=[], is_no_errors=False):
sys.exit("Error (" + prog + "): " + str(ret))
return ret
def cmd_exe(prog, args, is_no_errors=False):
def cmd_exe(prog, args):
prog_dir = os.path.dirname(prog)
env_dir = os.environ
if ("linux" == host_platform()):
@ -406,7 +406,7 @@ def cmd_exe(prog, args, is_no_errors=False):
command += (" \"" + arg + "\"")
process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir)
ret = process.wait()
if ret != 0 and True != is_no_errors:
if ret != 0:
sys.exit("Error (" + prog + "): " + str(ret))
return ret
@ -1046,15 +1046,15 @@ def web_apps_addons_param():
def download(url, dst):
return cmd_exe("curl", ["-L", "-o", dst, url])
def extract(src, dst, is_no_errors=False):
def extract(src, dst):
app = "7za" if ("mac" == host_platform()) else "7z"
return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors)
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
def extract_unicode(src, dst, is_no_errors=False):
def extract_unicode(src, dst):
if "windows" == host_platform():
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
return
return extract(src, dst, is_no_errors)
return extract(src, dst)
def archive_folder(src, dst):
app = "7za" if ("mac" == host_platform()) else "7z"
@ -1354,7 +1354,7 @@ def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, i
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
plugins_list_config = config.option("sdkjs-plugin")
if isXp:
plugins_list_config="photoeditor, macros, highlightcode, doc2md"
plugins_list_config="photoeditor, macros, drawio, highlightcode, doc2md"
if ("" == plugins_list_config):
return
plugins_list = plugins_list_config.rsplit(", ")
@ -1743,14 +1743,3 @@ def check_tools():
create_dir(directory + "/qt")
cmd("python", [directory + "/arm/build_qt.py", "--arch", "arm64", directory + "/qt/arm64"])
return
def apply_patch(file, patch):
patch_content = readFile(patch)
index1 = patch_content.find("<<<<<<<")
index2 = patch_content.find("=======")
index3 = patch_content.find(">>>>>>>")
file_content_old = patch_content[index1 + 7:index2].strip()
file_content_new = patch_content[index2 + 7:index3].strip()
#file_content_new = "\n#if 0" + file_content_old + "#else" + file_content_new + "#endif\n"
replaceInFile(file, file_content_old, file_content_new)
return

View File

@ -146,25 +146,15 @@ def build_sdk_native(directory, minimize=True):
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
return
def build_sdkjs_develop(root_dir):
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
def build_js_develop(root_dir):
#_run_npm_cli(root_dir + "/sdkjs/build")
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
build_sdkjs_develop(root_dir)
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
_run_npm(root_dir + external_folder + "/web-apps/build")
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])

View File

@ -49,6 +49,7 @@ cpp_flags = [
"-DUCONFIG_NO_COLLATION=0",
"-DUCONFIG_NO_FORMATTING=0",
"-DUCONFIG_NO_LEGACY_CONVERSION=1",
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
"-DUCONFIG_NO_TRANSLITERATION=0",

View File

@ -62,7 +62,6 @@ def make():
vlc_dir = base_dir + "/vlc"
vlc_version = "3.0.18"
tools_dir = base.get_script_dir() + "/../tools"
old_cur = os.getcwd()
os.chdir(base_dir)
@ -74,7 +73,7 @@ def make():
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
if "windows" == base.host_platform():
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
base.create_dir("build")
base.copy_file("tools/ignore-cache-time.patch", "vlc")
@ -84,7 +83,7 @@ def make():
base.copy_file("tools/win_64/build.patch", "vlc")
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
if config.check_option("platform", "win_32"):
base.copy_file("tools/win_32/build.patch", "vlc")
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
@ -92,11 +91,9 @@ def make():
# linux
if config.check_option("platform", "linux_64"):
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
# mac
if "mac" == base.host_platform():
os.chdir(vlc_dir)

View File

@ -27,16 +27,8 @@ def make():
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
# patches
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
# no tls realization (remove if socket.io fix this)
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"

View File

@ -213,8 +213,6 @@ def make():
base.create_dir(root_dir + "/editors")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
if len(os.listdir(root_dir + "/editors/sdkjs")) == 0:
base.delete_dir(root_dir + "/editors/sdkjs") # delete empty folder. for bug 62528
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
base.delete_file(file)

View File

@ -107,8 +107,7 @@ def make():
server_addons = []
if (config.option("server-addons") != ""):
server_addons = config.option("server-addons").rsplit(", ")
#server-lockstorage is private
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
if ("server-lockstorage" in server_addons):
server_config["editorDataStorage"] = "editorDataRedis"
sdkjs_addons = []
@ -132,8 +131,6 @@ def make():
sql["type"] = config.option("sql-type")
if (config.option("db-port") != ""):
sql["dbPort"] = config.option("db-port")
if (config.option("db-name") != ""):
sql["dbName"] = config.option("db-name")
if (config.option("db-user") != ""):
sql["dbUser"] = config.option("db-user")
if (config.option("db-pass") != ""):

View File

@ -483,8 +483,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
mysqlPath += 'bin'
return mysqlPath
def get_mysqlLoginString():
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
def get_mysqlLoginSrting():
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
def get_mysqlServersInfo():
arrInfo = []
@ -511,14 +511,14 @@ def get_mysqlServersInfo():
def check_mysqlServer():
base.print_info('Check MySQL Server')
dependence = CDependencies()
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
if (host_platform != 'windows'):
result = os.system(mysqlLoginSrt + ' -e "exit"')
if (result == 0):
connectionResult = base.run_command(connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
print('MySQL configuration is valid')
dependence.sqlPath = 'mysql'
return dependence
@ -535,7 +535,7 @@ def check_mysqlServer():
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
print(mysql_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -559,43 +559,23 @@ def check_mysqlServer():
return dependence
def check_MySQLConfig(mysqlPath = ''):
result = True
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
print('Database "' + config.option("db-name") + '" not found')
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
if (not result):
return False
print('Creating ' + config.option("db-name") + ' tables ...')
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
print('Database onlyoffice not found')
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
print('Password encryption is not valid')
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
return result
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
mysqlLoginSrt = get_mysqlLoginString()
print('CREATE DATABASE ' + dbName + ';')
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
print('failed CREATE DATABASE ' + dbName + ';')
return False
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# return False
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# return False
return True
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
def execMySQLScript(mysql_path_to_bin, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
return False
@ -604,7 +584,7 @@ def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
print('Setting MySQL password encrypting...')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
if (code != 0):
print('Setting password encryption failed!')
return False
@ -631,7 +611,7 @@ def get_postrgre_path_to_bin(postgrePath = ''):
def get_postgreLoginSrting(userName):
if (host_platform == 'windows'):
return 'psql -U' + userName + ' '
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U' + userName + ' -hlocalhost '
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
def get_postgreSQLInfoByFlag(flag):
arrInfo = []
@ -667,7 +647,7 @@ def check_postgreSQL():
result = os.system(postgreLoginSrt + ' -c "\q"')
connectionResult = base.run_command(connectionString)['stdout']
if (result != 0 or connectionResult.find(config.option("db-port")) == -1):
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
print('Valid PostgreSQL not found!')
dependence.append_install('PostgreSQL')
dependence.append_uninstall('PostgreSQL')
@ -677,7 +657,7 @@ def check_postgreSQL():
return dependence
arrInfo = get_postgreSQLInfo()
base.set_env('PGPASSWORD', config.option("db-pass"))
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
for info in arrInfo:
if (base.is_dir(info['Location']) == False):
continue
@ -685,7 +665,7 @@ def check_postgreSQL():
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find(config.option("db-port")) != -1):
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
print(postgre_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -703,12 +683,12 @@ def check_postgreSQL():
def check_postgreConfig(postgrePath = ''):
result = True
if (host_platform == 'windows'):
base.set_env('PGPASSWORD', config.option("db-pass"))
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
rootUser = install_params['PostgreSQL']['root']
dbUser = config.option("db-user")
dbName = config.option("db-name")
dbPass = config.option("db-pass")
dbUser = install_params['PostgreSQL']['dbUser']
dbName = install_params['PostgreSQL']['dbName']
dbPass = install_params['PostgreSQL']['dbPass']
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
postgreLoginRoot = get_postgreLoginSrting(rootUser)
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
@ -725,7 +705,7 @@ def check_postgreConfig(postgrePath = ''):
base.print_info('Creating ' + dbName + ' user...')
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find(config.option("db-name")) == -1):
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
print('Database ' + dbName + ' not found')
base.print_info('Creating ' + dbName + ' database...')
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
@ -904,13 +884,13 @@ def install_gruntcli():
def install_mysqlserver():
if (host_platform == 'windows'):
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
elif (host_platform == 'linux'):
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
code = os.system('sudo apt-get -y install zsh htop') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
return os.system('yes | sudo apt install mysql-server') and code
return 1
@ -932,7 +912,7 @@ def install_postgresql():
file_name = "install.exe"
base.download(download_url, file_name)
base.print_info("Install PostgreSQL...")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
else:
base.print_info("Install PostgreSQL...")
install_command = 'sudo apt install postgresql -y'
@ -943,7 +923,7 @@ def install_postgresql():
if (host_platform == 'windows'):
base.delete_file(file_name)
else:
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
return code
@ -994,11 +974,18 @@ install_params = {
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
'Git': '/VERYSILENT /NORESTART',
'MySQLServer': {
'port': '3306',
'user': 'root',
'pass': 'onlyoffice',
'version': '8.0.21'
},
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
'PostgreSQL': {
'root': 'postgres'
'root': 'postgres',
'dbPort': '5432',
'dbName': 'onlyoffice',
'dbUser': 'onlyoffice',
'dbPass': 'onlyoffice'
}
}
uninstall_params = {

View File

@ -49,18 +49,7 @@ def run_integration_example():
def start_linux_services():
base.print_info('Restart MySQL Server')
def update_config(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
def make_start():
base.configure_common_apps()
@ -75,8 +64,15 @@ def make_start():
start_linux_services()
def make_configure(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
base.print_info('Build modules')
update_config(args)
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
def make_install():
platform = base.host_platform()

View File

@ -56,6 +56,13 @@ save text files with reports.
"reportFolder": "build_tools/scripts/license_checker/reports"
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "build_tools/scripts/license_checker/license_template.txt"
```
* `printChecking` specifies whether to output
information about which file is
being checked to the console.
@ -103,14 +110,36 @@ Possible array values:
```json
"fileExtensions": [".js"]
```
* `licensePath` specifies the path to the license template.
* `startMultiComm` the line that starts the multiline comment.
**For example:**
```json
"licensePath": "header.license"
"startMultiComm": "/*"
```
* `endMultiComm` the line that ends the multiline comment.
You should carefully consider the formatting
of the string, all spaces are taken into account.
This affects how the license check works.
**For example:**
```json
"endMultiComm": " */"
```
Space at the beginning for a prettier comment.
* `prefix` the line on which each comment
line will begin, except for the
beginning and end.
**For example:**
```json
"prefix": " *"
```
Space at the beginning for a prettier comment.
* `ignoreListDir` folder paths to ignore.
**For example:**

View File

@ -1,6 +1,7 @@
{
"basePath": "../../../",
"reportFolder": "build_tools/scripts/license_checker/reports",
"licensePath": "build_tools/scripts/license_checker/header.license",
"printChecking": false,
"printReports": false,
"fix": ["OUTDATED"],
@ -8,7 +9,9 @@
{
"dir": "core",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDir": [
"core/build",
"core/Common/cfcpp/test",
@ -56,7 +59,9 @@
{
"dir": "core-ext",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDir": [
"core-ext/AutoTester",
"core-ext/cell_android",
@ -78,7 +83,9 @@
{
"dir": "sdkjs",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
@ -99,7 +106,9 @@
{
"dir": "sdkjs-forms",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -108,7 +117,9 @@
{
"dir": "sdkjs-ooxml",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -117,7 +128,9 @@
{
"dir": "web-apps",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules",
"vendor",
@ -125,7 +138,6 @@
],
"ignoreListDir": [
"web-apps/apps/common/mobile",
"web-apps/apps/common/main/lib/mods",
"web-apps/apps/documenteditor/mobile",
"web-apps/apps/spreadsheeteditor/mobile",
"web-apps/apps/presentationeditor/mobile",
@ -133,6 +145,7 @@
],
"ignoreListFile": [
"web-apps/apps/api/documents/api.js",
"web-apps/apps/common/main/lib/mods/perfect-scrollbar.js",
"web-apps/apps/common/main/lib/core/application.js",
"web-apps/apps/common/main/lib/core/keymaster.js",
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
@ -141,7 +154,9 @@
{
"dir": "web-apps-mobile",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -150,7 +165,9 @@
{
"dir": "server",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDir": [
"server/FileConverter/bin"
],
@ -161,7 +178,9 @@
{
"dir": "server-lockstorage",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules"
]
@ -169,7 +188,9 @@
{
"dir": "server-license",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules"
]
@ -177,7 +198,9 @@
{
"dir": "server-license-key",
"fileExtensions": [".js"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"node_modules"
]
@ -185,7 +208,9 @@
{
"dir": "editors-ios",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"ignoreListDirName": [
"vendor",
"Vendor",

View File

@ -1,31 +1,28 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/
(c) Copyright Ascensio System SIA 2010-2023
This program is a free software product. You can redistribute it and/or
modify it under the terms of the GNU Affero General Public License (AGPL)
version 3 as published by the Free Software Foundation. In accordance with
Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
that Ascensio System SIA expressly excludes the warranty of non-infringement
of any third-party rights.
This program is distributed WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
street, Riga, Latvia, EU, LV-1050.
The interactive user interfaces in modified source and object code versions
of the Program must display Appropriate Legal Notices, as required under
Section 5 of the GNU AGPL version 3.
Pursuant to Section 7(b) of the License you must retain the original Product
logo when distributing the program. Pursuant to Section 7(e) we decline to
grant you any rights under trademark law for use of our trademarks.
All the Product's GUI elements, including illustrations and icon sets, as
well as technical writing content are licensed under the terms of the
Creative Commons Attribution-ShareAlike 4.0 International. See the License
terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode

View File

@ -25,6 +25,9 @@ class Config(object):
Attributes:
dir: Directory to check.
fileExtensions: file extensions to check.
startMultiComm: characters to start a multi-line comment.
endMultiComm: characters to end a multi-line comment.
prefix: prefix for multiline comments
ignoreListDir: Ignored folder paths.
ignoreListDirName: Ignored folder names.
ignoreListFile: Ignored file paths.
@ -33,7 +36,9 @@ class Config(object):
def __init__(self,
dir: str,
fileExtensions: list[str],
licensePath: str = 'header.license',
startMultiComm: str,
endMultiComm: str,
prefix: str = '',
allowListFile: list[str] = [],
ignoreListDir: list[str] = [],
ignoreListDirName: list[str] = [],
@ -41,19 +46,13 @@ class Config(object):
self._dir = dir
self._fileExtensions = fileExtensions
self._startMultiComm = startMultiComm
self._endMultiComm = endMultiComm
self._prefix = prefix
self._allowListFile = allowListFile
self._ignoreListDir = ignoreListDir
self._ignoreListDirName = ignoreListDirName
self._ignoreListFile = ignoreListFile
"""Read license template."""
with open(licensePath, 'r', encoding="utf8") as file:
lines = file.readlines()
if not lines:
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
non_empty_lines = [s for s in lines if not s.isspace()]
self._startMultiComm = non_empty_lines[0]
self._endMultiComm = non_empty_lines[-1]
self._license_lines = lines
def getDir(self) -> str:
return self._dir
@ -63,8 +62,8 @@ class Config(object):
return self._startMultiComm
def getEndMultiComm(self) -> str:
return self._endMultiComm
def getLicense(self) -> list[str]:
return self._license_lines
def getPrefix(self) -> str:
return self._prefix
def getAllowListFile(self) -> list[str]:
return self._allowListFile
def getIgnoreListDir(self) -> list[str]:
@ -78,6 +77,7 @@ with open(CONFIG_PATH, 'r') as j:
_json: dict = json.load(j)
BASE_PATH: str = _json.get('basePath') or '../../../'
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
LICENSE_TEMPLATE_PATH: str = _json.get('licensePath') or 'build_tools/scripts/license_checker/header.license'
if (_json.get('fix')):
try:
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
@ -93,6 +93,23 @@ with open(CONFIG_PATH, 'r') as j:
os.chdir(BASE_PATH)
with open(LICENSE_TEMPLATE_PATH, 'r') as f:
LICENSE: list[str] = f.readlines()
if not LICENSE:
raise Exception(f'Error getting license template. Cannot read {LICENSE_TEMPLATE_PATH} file. Is not it empty?')
def getLicense(start: str, prefix: str, end: str) -> list[str]:
"""Returns a valid license for any kind of comment prefix."""
result = [start]
for i in LICENSE:
if i == '\n':
result.append(prefix)
else:
result.append(f'{" ".join([prefix, i.strip()])}')
result.append(prefix)
result.append(end)
return result
class Error(object):
def __init__(self, errorType: ErrorType) -> None:
self._errorType = errorType
@ -127,6 +144,8 @@ class Checker(object):
self._reports: list[Report] = []
def getReports(self):
return self._reports
def getLicense(self):
return getLicense(start=self._config.getStartMultiComm(), prefix=self._config.getPrefix(), end=self._config.getEndMultiComm())
def _checkLine(self, line: str, prefix: str) -> bool:
"""Checks if a line has a prefix."""
"""Trim to catch invalid license without leading spaces"""
@ -153,7 +172,7 @@ class Checker(object):
break
return result
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
license = self._config.getLicense()
license = self.getLicense()
if len(license) != len(test):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.LEN_MISMATCH),
@ -161,29 +180,32 @@ class Checker(object):
invalidLinesCount = 0
lastWrongLine = 0
for i in range(len(license)):
if (license[i] != test[i]):
if (license[i] != test[i].strip('\n')):
invalidLinesCount += 1
lastWrongLine = i
if (invalidLinesCount == 1):
r = r'\d\d\d\d'
testDate = re.findall(r, test[lastWrongLine])
licenseDate = re.findall(r, license[lastWrongLine])
r = r'\d\d\d\d\-\d\d\d\d'
testDate = re.search(r, test[lastWrongLine])
licenseDate = re.search(r, license[lastWrongLine])
if not (testDate and licenseDate):
if testDate and licenseDate:
testDate = testDate.group()
licenseDate = licenseDate.group()
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Something wrong...')
testLastYear = int(testDate[-1])
licenseLastYear = int(licenseDate[-1])
if (testLastYear < licenseLastYear):
testLastYear = testDate.split('-')[1]
licenseLastYear = licenseDate.split('-')[1]
if (int(testLastYear) < int(licenseLastYear)):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.OUTDATED),
message=f'Found date {testLastYear}, expected {licenseLastYear}')
message=f'Found date {testDate}, expected {licenseDate}')
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
message=f"Found something similar to the date: {testDate}, but it's not correct. Expected: {licenseDate}")
elif (invalidLinesCount > 0):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
@ -264,9 +286,8 @@ class Fixer(object):
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
with open(pathToFile, 'w', encoding="utf8") as file:
license = self._config.getLicense()
file.writelines(license)
file.write('\n')
license = self._checker.getLicense()
file.writelines(map(lambda x: "".join([x, '\n']), license))
file.writelines(buffer)
return
def _fixLicense(self, pathToFile: str):
@ -280,8 +301,8 @@ class Fixer(object):
for i in oldLicense:
buffer.remove(i)
with open(pathToFile, 'w', encoding=writeEncoding) as file:
license = self._config.getLicense()
file.writelines(license)
license = self._checker.getLicense()
file.writelines(map(lambda x: "".join([x, '\n']), license))
file.writelines(buffer)
return
@ -307,7 +328,7 @@ def writeReports(reports: list[Report]) -> None:
for i in reports:
files[i.getError().getErrorType().name].append(i)
for i in ErrorType:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w') as f:
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
for config in CONFIGS:

View File

@ -30,59 +30,6 @@ if utils.is_macos():
builder_product_name = "Document Builder"
if utils.is_linux():
builder_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "builder/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "builder/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "builder/linux/rhel/"
}
]
desktop_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "desktop/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "desktop/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "desktop/linux/rhel/"
},
{
"make": "rpm-suse",
"src": "rpm-suse/build/RPMS/*/*.rpm",
"dst": "desktop/linux/suse/"
}
]
server_make_targets = [
{
"make": "deb",
"src": "deb/*.deb",
"dst": "server/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "server/linux/rhel/"
},
{
"make": "tar",
"src": "*.tar*",
"dst": "server/linux/snap/"
}
]
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
builder_make_targets = ["deb", "rpm"] # tar
server_make_targets = ["deb", "rpm", "tar"]

View File

@ -137,7 +137,7 @@ def make_linux():
utils.set_cwd("document-builder-package")
utils.log_h2("builder build")
make_args = [t["make"] for t in branding.builder_make_targets]
make_args = branding.builder_make_targets
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -146,10 +146,32 @@ def make_linux():
utils.set_summary("builder build", ret)
if common.deploy:
for t in branding.builder_make_targets:
utils.log_h2("builder " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("builder " + t["make"] + " deploy", ret)
if ret:
if "tar" in branding.builder_make_targets:
utils.log_h2("builder tar deploy")
ret = s3_upload(
utils.glob_path("tar/*.tar.gz"),
"builder/linux/generic/")
utils.set_summary("builder tar deploy", ret)
if "deb" in branding.builder_make_targets:
utils.log_h2("builder deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"builder/linux/debian/")
utils.set_summary("builder deb deploy", ret)
if "rpm" in branding.builder_make_targets:
utils.log_h2("builder rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
"builder/linux/rhel/")
utils.set_summary("builder rpm deploy", ret)
else:
if "tar" in branding.builder_make_targets:
utils.set_summary("builder tar deploy", False)
if "deb" in branding.builder_make_targets:
utils.set_summary("builder deb deploy", False)
if "rpm" in branding.builder_make_targets:
utils.set_summary("builder rpm deploy", False)
utils.set_cwd(common.workspace_dir)
return

View File

@ -73,6 +73,9 @@ def make_prepare():
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-BrandingDir", utils.get_abspath(common.workspace_dir + "\\" \
+ common.branding + "\\desktop-apps\\win-linux\\package\\windows")]
if xp:
args += ["-Target", "xp"]
if common.sign:
@ -89,6 +92,9 @@ def make_zip():
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-BrandingDir", utils.get_abspath(common.workspace_dir + "\\" \
+ common.branding + "\\desktop-apps\\win-linux\\package\\windows")]
if xp:
args += ["-Target", "xp"]
# if common.sign:
@ -113,6 +119,9 @@ def make_inno():
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-BrandingDir", utils.get_abspath(common.workspace_dir + "\\" \
+ common.branding + "\\desktop-apps\\win-linux\\package\\windows")]
if common.sign:
args += ["-Sign"]
@ -175,6 +184,9 @@ def make_advinst():
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-BrandingDir", utils.get_abspath(common.workspace_dir + "\\" \
+ common.branding + "\\desktop-apps\\win-linux\\package\\windows")]
if common.sign:
args += ["-Sign"]
@ -329,7 +341,7 @@ def make_linux():
utils.set_cwd("desktop-apps/win-linux/package/linux")
utils.log_h2("desktop build")
make_args = [t["make"] for t in branding.desktop_make_targets]
make_args = branding.desktop_make_targets
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -337,11 +349,68 @@ def make_linux():
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("desktop build", ret)
rpm_arch = "*"
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
if common.deploy:
for t in branding.desktop_make_targets:
utils.log_h2("desktop " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("desktop " + t["make"] + " deploy", ret)
if ret:
utils.log_h2("desktop tar deploy")
if "tar" in branding.desktop_make_targets:
ret = s3_upload(
utils.glob_path("tar/*.tar*"),
"desktop/linux/generic/")
utils.set_summary("desktop tar deploy", ret)
if "deb" in branding.desktop_make_targets:
utils.log_h2("desktop deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"desktop/linux/debian/")
utils.set_summary("desktop deb deploy", ret)
if "deb-astra" in branding.desktop_make_targets:
utils.log_h2("desktop deb astra deploy")
ret = s3_upload(
utils.glob_path("deb-astra/*.deb"),
"desktop/linux/astra/")
utils.set_summary("desktop deb astra deploy", ret)
if "rpm" in branding.desktop_make_targets:
utils.log_h2("desktop rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/build/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/rhel/")
utils.set_summary("desktop rpm deploy", ret)
if "suse-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop rpm suse deploy")
ret = s3_upload(
utils.glob_path("rpm-suse/build/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/suse/")
utils.set_summary("desktop rpm suse deploy", ret)
if "apt-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop rpm alt deploy")
ret = s3_upload(
utils.glob_path("rpm-alt/build/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/altlinux/")
utils.set_summary("desktop rpm alt deploy", ret)
if "urpmi" in branding.desktop_make_targets:
utils.log_h2("desktop rpm rosa deploy")
ret = s3_upload(
utils.glob_path("rpm-rosa/build/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/rosa/")
utils.set_summary("desktop rpm rosa deploy", ret)
else:
if "tar" in branding.desktop_make_targets:
utils.set_summary("desktop tar deploy", False)
if "deb" in branding.desktop_make_targets:
utils.set_summary("desktop deb deploy", False)
if "deb-astra" in branding.desktop_make_targets:
utils.set_summary("desktop deb astra deploy", False)
if "rpm" in branding.desktop_make_targets:
utils.set_summary("desktop rpm deploy", False)
if "rpm-suse" in branding.desktop_make_targets:
utils.set_summary("desktop rpm suse deploy", False)
if "rpm-alt" in branding.desktop_make_targets:
utils.set_summary("desktop rpm alt deploy", False)
if "rpm-rosa" in branding.desktop_make_targets:
utils.set_summary("desktop rpm rosa deploy", False)
utils.set_cwd(common.workspace_dir)
return

View File

@ -61,8 +61,7 @@ def make_linux(edition):
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
make_args = [t["make"] for t in branding.server_make_targets]
make_args += ["-e", "PRODUCT_NAME=" + product_name]
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -71,10 +70,40 @@ def make_linux(edition):
utils.set_summary("server " + edition + " build", ret)
if common.deploy:
for t in branding.server_make_targets:
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
if ret:
if "deb" in branding.server_make_targets:
utils.log_h2("server " + edition + " deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"server/linux/debian/")
utils.set_summary("server " + edition + " deb deploy", ret)
if "rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
"server/linux/rhel/")
utils.set_summary("server " + edition + " rpm deploy", ret)
if "apt-rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " apt-rpm deploy")
ret = s3_upload(
utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"),
"server/linux/altlinux/")
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
if "tar" in branding.server_make_targets:
utils.log_h2("server " + edition + " snap deploy")
ret = s3_upload(
utils.glob_path("*.tar.gz"),
"server/linux/snap/")
utils.set_summary("server " + edition + " snap deploy", ret)
else:
if "deb" in branding.server_make_targets:
utils.set_summary("server " + edition + " deb deploy", False)
if "rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " rpm deploy", False)
if "apt-rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " apt-rpm deploy", False)
if "tar" in branding.server_make_targets:
utils.set_summary("server " + edition + " snap deploy", False)
utils.set_cwd(common.workspace_dir)
return

View File

@ -2,8 +2,6 @@
import os
import shutil
import re
import argparse
def readFile(path):
with open(path, "r", errors='replace') as file:
filedata = file.read()
@ -114,8 +112,6 @@ class EditorApi(object):
line = line.replace("}", "")
lineWithoutSpaces = line.replace(" ", "")
if not is_found_function and 0 == line.find("function "):
if -1 == decoration.find("@constructor"):
return
codeCorrect += (line + addon_for_func + "\n")
is_found_function = True
if not is_found_function and -1 != line.find(".prototype."):
@ -181,7 +177,7 @@ class EditorApi(object):
def generate(self):
for file in self.files:
file_content = readFile(f'{sdkjs_dir}/{file}')
file_content = readFile(file)
arrRecords = file_content.split("/**")
arrRecords = arrRecords[1:-1]
for record in arrRecords:
@ -189,8 +185,8 @@ class EditorApi(object):
self.numfile += 1
correctContent = ''.join(self.records)
correctContent += "\n"
os.mkdir(args.destination + self.folder)
writeFile(args.destination + self.folder + "/api.js", correctContent)
os.mkdir('deploy/api_builder/' + self.folder)
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
return
def convert_to_interface(arrFiles, sEditorType):
@ -199,27 +195,12 @@ def convert_to_interface(arrFiles, sEditorType):
editor.generate()
return
sdkjs_dir = "../../../sdkjs"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
)
args = parser.parse_args()
old_cur = os.getcwd()
if True == os.path.isdir(args.destination):
shutil.rmtree(args.destination, ignore_errors=True)
os.mkdir(args.destination)
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)
old_cur = os.getcwd()
os.chdir("../../../sdkjs")
if True == os.path.isdir('deploy/api_builder'):
shutil.rmtree('deploy/api_builder', ignore_errors=True)
os.mkdir('deploy/api_builder')
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)

View File

@ -1,57 +0,0 @@
# Documentation Generation Guide
This guide explains how to generate documentation for Onlyoffice API using the provided Python scripts, `generate_docs_json.py` and `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
## Prerequisites
1. **Node.js and npm**: Ensure you have Node.js and npm installed on your machine. You can download them from [Node.js official website](https://nodejs.org/).
2. **jsdoc**: The scripts use `jsdoc` to generate documentation. Install it using npm:
```bash
npm install
```
## Scripts Overview
### `generate_docs_json.py`
This script generates JSON documentation based on the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/sdkjs/deploy/api_builder/json`.
### `generate_docs_md.py`
This script generates Markdown documentation from the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `Onlyoffice/office-js-api`.
## Example
To generate JSON documentation with the default output path:
```bash
python generate_docs_json.py /path/to/save/json
```
To generate Markdown documentation and specify a custom output path:
```bash
python generate_docs_md.py /path/to/save/markdown
```
## Notes
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
- The output directories will be created if they do not exist.

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,216 +0,0 @@
exports.handlers = {
processingComplete: function(e) {
// array for filtered doclets
let filteredDoclets = [];
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
const classesDocletsMap = {}; // doclets for classes write at the end
let passedClasses = []; // passed classes for current editor
// Remove dublicates doclets
const latestDoclets = {};
e.doclets.forEach(doclet => {
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
latestDoclets[doclet.longname] = doclet;
}
});
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
// check available classess for current editor
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAdd =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' &&
(!isMethod || hasTypeofEditorsTag);
if (shouldAdd) {
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
passedClasses.push(cleanName(doclet.memberof));
}
}
else if (doclet.kind == 'class') {
classesDocletsMap[cleanName(doclet.name)] = doclet;
}
}
// remove unavailave classes in current editor
passedClasses = passedClasses.filter(className => {
const doclet = classesDocletsMap[className];
if (!doclet) {
return true;
}
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
// class is passes if there is no editor tag or the current editor is among the tags
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
return isPassed;
});
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod) {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
continue;
}
// We leave only the necessary fields
doclet.memberof = cleanName(doclet.memberof);
doclet.longname = cleanName(doclet.longname);
doclet.name = cleanName(doclet.name);
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
memberof: cleanName(doclet.memberof),
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
name: doclet.name,
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: doclet.scope,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
see: doclet.see
};
// Add the filtered doclet to the array
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'class') {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.name))) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
augments: doclet.augments || undefined,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see || undefined
};
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'typedef') {
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type
};
filteredDoclets.push(filteredDoclet);
}
}
// Replace doclets with a filtered array
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,136 +0,0 @@
import os
import subprocess
import json
import argparse
import re
# Configuration files
configs = [
"./config/word.json",
"./config/cell.json",
"./config/slide.json",
"./config/forms.json"
]
editors_maps = {
"word": "CDE",
"cell": "CSE",
"slide": "CPE",
"forms": "CFE"
}
def generate(output_dir, md=False):
missing_examples_file = f'{output_dir}/missing_examples.txt'
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Recreate missing_examples.txt file
with open(missing_examples_file, 'w', encoding='utf-8') as f:
f.write('')
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command = f"set EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = '../../../../' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
# Format content for doclet['example']
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
if md == False:
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```'
else:
# Record missing examples in missing_examples.txt
with open(missing_examples_file, 'a', encoding='utf-8') as missing_file:
missing_file.write(f"{file_path}\n")
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
def get_current_branch(path):
try:
# Navigate to the specified directory and get the current branch name
result = subprocess.run(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
if result.returncode == 0:
return result.stdout.strip()
else:
print(f"Error: {result.stderr}")
return None
except Exception as e:
print(f"Exception: {e}")
return None
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../document-builder-declarations/document-builder" # Default value
)
args = parser.parse_args()
branch_name = get_current_branch("../../../../sdkjs")
if branch_name:
args.destination = f"{args.destination}/{branch_name}"
generate(args.destination)

View File

@ -1,266 +0,0 @@
import os
import json
import re
import shutil
import argparse
import generate_docs_json
# Configuration files
editors = [
"word",
"cell",
"slide",
"forms"
]
def load_json(file_path):
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
def write_markdown_file(file_path, content):
with open(file_path, 'w', encoding='utf-8') as md_file:
md_file.write(content)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
def correct_description(string):
if string is None:
return 'No description provided.'
# Replace opening <b> tag with **
string = re.sub(r'<b>', '**', string)
# Replace closing </b> tag with **
string = re.sub(r'</b>', '**', string)
# Note
return re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
def correct_default_value(value, enumerations, classes):
if value is None:
return ''
if value == True:
value = "true"
elif value == False:
value = "false"
else:
value = str(value)
return generate_data_types_markdown([value], enumerations, classes)
def remove_line_breaks(string):
return re.sub(r'[\r\n]', '', string)
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
param_types_md = ' &#124;'.join(types)
for enum in enumerations:
if enum['name'] in types:
param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)")
for cls in classes:
if cls in types:
param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)")
def replace_with_links(match):
element = match.group(1).strip()
base_type = element.split('.')[0] # Take only the first part before the dot, if any
if any(enum['name'] == base_type for enum in enumerations):
return f"<[{element}](../../Enumeration/{base_type}.md)>"
elif base_type in classes:
return f"<[{element}](../../{base_type}/{base_type}.md)>"
return f"<{element}>"
return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md)
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
content += generate_properties_markdown(properties, enumerations, classes, '../')
content += "## Methods\n\n"
for method in methods:
method_name = method['name']
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
return content
def generate_method_markdown(method, enumerations, classes):
method_name = method['name']
description = method.get('description', 'No description provided.')
description = correct_description(description)
params = method.get('params', [])
returns = method.get('returns', [])
example = method.get('example', '')
memberof = method.get('memberof', '')
content = f"# {method_name}\n\n{description}\n\n"
# Syntax section
param_list = ', '.join([param['name'] for param in params]) if params else ''
content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n"
if memberof:
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
content += "## Parameters\n\n"
if params:
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
for param in params:
param_name = param.get('name', 'Unnamed')
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
param_required = "Required" if not param.get('optional') else "Optional"
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
else:
content += "This method doesn't have any parameters.\n"
content += "\n## Returns\n\n"
if returns:
return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown'
# Check for enumerations and classes in return type and add links if they exist
return_type_md = generate_data_types_markdown([return_type], enumerations, classes)
content += return_type_md
else:
content += "This method doesn't return any data."
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def generate_properties_markdown(properties, enumerations, classes, root='../../'):
if (properties is None):
return ''
content = "## Properties\n\n"
content += "| Name | Type | Description |\n"
content += "| ---- | ---- | ----------- |\n"
for prop in properties:
prop_name = prop['name']
prop_description = prop.get('description', 'No description provided.')
prop_description = remove_line_breaks(correct_description(prop_description))
param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root)
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
content += "\n"
return content
def generate_enumeration_markdown(enumeration, enumerations, classes):
enum_name = enumeration['name']
description = enumeration.get('description', 'No description provided.')
description = correct_description(description)
example = enumeration.get('example', '')
content = f"# {enum_name}\n\n{description}\n\n"
if 'TypeUnion' == enumeration['type']['parsedType']['type']:
content += "## Type\n\nEnumeration\n\n"
content += "## Values\n\n"
elements = enumeration['type']['parsedType']['elements']
for element in elements:
element_name = element['name'] if element['type'] != 'NullLiteral' else 'null'
# Check if element is in enumerations or classes before adding link
if any(enum['name'] == element_name for enum in enumerations):
content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n"
elif element_name in classes:
content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n"
else:
content += f"- {element_name}\n"
elif enumeration['properties'] is not None:
content += "## Type\n\nObject\n\n"
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
else:
content += "## Type\n\n"
types = enumeration['type']['names']
for t in types:
t = generate_data_types_markdown([t], enumerations, classes)
content += t + "\n\n"
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def process_doclets(data, output_dir):
classes = {}
classes_props = {}
enumerations = []
for doclet in data:
if doclet['kind'] == 'class':
class_name = doclet['name']
classes[class_name] = []
classes_props[class_name] = doclet.get('properties', None)
elif doclet['kind'] == 'function':
class_name = doclet.get('memberof')
if class_name:
if class_name not in classes:
classes[class_name] = []
classes[class_name].append(doclet)
elif doclet['kind'] == 'typedef':
enumerations.append(doclet)
# Process classes
for class_name, methods in classes.items():
class_dir = os.path.join(output_dir, class_name)
methods_dir = os.path.join(class_dir, 'Methods')
os.makedirs(methods_dir, exist_ok=True)
# Write class file
class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes)
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
# Write method files
for method in methods:
method_content = generate_method_markdown(method, enumerations, classes)
write_markdown_file(os.path.join(methods_dir, f"{method['name']}.md"), method_content)
# Process enumerations
enum_dir = os.path.join(output_dir, 'Enumeration')
os.makedirs(enum_dir, exist_ok=True)
for enum in enumerations:
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
write_markdown_file(os.path.join(enum_dir, f"{enum['name']}.md"), enum_content)
def generate(output_dir):
print('Generating Markdown documentation...')
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
for editor_name in editors:
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True)
data = load_json(input_file)
process_doclets(data, output_dir + f'/{editor_name}')
shutil.rmtree(output_dir + 'tmp_json')
print('Done')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../office-js-api/" # Default value
)
args = parser.parse_args()
generate(args.destination)

View File

@ -1,7 +0,0 @@
{
"dependencies": {
"jsdoc-to-markdown": "7.1.1",
"dmd": "6.1.0",
"handlebars": "4.7.7"
}
}

View File

@ -7,9 +7,6 @@ import os
import glob
import shutil
sys.stdin.reconfigure(encoding='utf-8')
sys.stdout.reconfigure(encoding='utf-8')
params = sys.argv[1:]
if (3 > len(params)):
@ -23,14 +20,8 @@ directory_input = params[0].replace("\\", "/")
directory_output = params[1].replace("\\", "/")
author_name = params[2]
if not os.path.exists(directory_output):
os.mkdir(directory_output)
input_files = []
count = 1
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
print(count, file)
count += 1
input_files.append(file.replace("\\", "/"))
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
@ -39,7 +30,7 @@ def change_author_name(file_dist, output_file, author_name):
app = "7za" if ("mac" == base.host_platform()) else "7z"
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
with open(temp_dir + "/docProps/core.xml", 'r') as file:
data = file.read()
creator_open = "<dc:creator>"
@ -76,7 +67,7 @@ def change_author_name(file_dist, output_file, author_name):
else:
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
with open(temp_dir + "/docProps/core.xml", 'w') as file:
file.write(data)
shutil.copyfile(file_dist, output_file)
@ -89,12 +80,7 @@ for input_file in input_files:
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]).replace(' ', '_') + u"." + input_file.split(".")[-1]
try:
change_author_name(input_file, output_file, author_name)
except:
print("Error in converting document: ", input_file)
continue
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
change_author_name(input_file, output_file, author_name)
base.delete_dir(temp_dir)
output_cur += 1

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../../scripts')
@ -52,43 +51,19 @@ if base.is_file(directory_fonts_local + "/AllFonts.js"):
directory_fonts = directory_fonts_local
# ---------------------------------------------------
json_params = "{"
json_params += "'spreadsheetLayout':{"
# True for fit, False for 100%
isScaleSheetToPage = False
json_fit_text = "0"
if isScaleSheetToPage:
json_fit_text = "1"
json_params += "'fitToWidth':" + json_fit_text + ",'fitToHeight':" + json_fit_text + ","
if True:
json_params += "'orientation':'landscape',"
page_margins = "'pageMargins':{'bottom':10,'footer':5,'header':5,'left':5,'right':5,'top':10}"
page_setup = "'pageSetup':{'orientation':1,'width':210,'height':297,'paperUnits':0,'scale':100,'printArea':false,'horizontalDpi':600,'verticalDpi':600,'usePrinterDefaults':true,'fitToHeight':0,'fitToWidth':0}"
json_params += "'sheetsProps':{'0':{'headings':false,'printTitlesWidth':null,'printTitlesHeight':null," + page_margins + "," + page_setup + "}}},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}"
json_params += "}"
json_params = "{'spreadsheetLayout':{'fitToWidth':1,'fitToHeight':1},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}}"
json_params = json_params.replace("'", "&quot;")
output_len = len(input_files)
output_cur = 1
for input_file in input_files:
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file_tmp = os.path.join(output_dir, "temp")
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0].strip())
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0])
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
xml_convert += u"<TaskQueueDataConvert>"
xml_convert += (u"<m_sFileFrom>" + input_file + u"</m_sFileFrom>")
xml_convert += (u"<m_sFileTo>" + output_file_tmp + u".zip</m_sFileTo>")
xml_convert += (u"<m_sFileTo>" + output_file + u".zip</m_sFileTo>")
xml_convert += u"<m_nFormatTo>1029</m_nFormatTo>"
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
xml_convert += (u"<m_sFontDir>" + directory_fonts + u"</m_sFontDir>")
@ -108,9 +83,8 @@ for input_file in input_files:
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
base.extract_unicode(output_file_tmp + u".zip", output_file_tmp)
base.move_dir(str(output_file_tmp), str(output_file))
base.delete_file(output_file_tmp + u".zip")
base.extract_unicode(output_file + u".zip", output_file)
base.delete_file(output_dir + "/" + os.path.splitext(os.path.basename(input_file))[0] + ".zip")
output_cur += 1
base.delete_dir(temp_dir)

View File

@ -1 +1 @@
8.1.1
8.1.0