diff --git a/.github/workflows/git-operations.yml b/.github/workflows/git-operations.yml new file mode 100644 index 0000000..fd613e3 --- /dev/null +++ b/.github/workflows/git-operations.yml @@ -0,0 +1,88 @@ +name: Git Operations + +on: + workflow_dispatch: + inputs: + operation: + description: 'Operation to perform' + required: true + type: choice + options: + - create + - remove + default: 'create' + + branch_name: + description: 'Branch name to create or remove' + required: true + type: string + + base_branch: + description: 'Base branch to work from (for create operation)' + required: false + type: string + default: 'develop' + + branding: + description: 'Branding name' + required: false + type: string + default: 'onlyoffice' + + branding_url: + description: 'Branding repository URL (relative to git host)' + required: false + type: string + default: 'ONLYOFFICE/onlyoffice.git' + +jobs: + git-operations: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + path: ONLYOFFICE/build_tools + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + # Install any Python dependencies if requirements.txt exists + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Configure Git + run: | + git config --global user.name "GitHub Actions Bot" + git config --global user.email "actions@github.com" + + - name: Run Git Operations + run: | + cd ONLYOFFICE/build_tools/scripts/develop + python git_operations.py ${{ inputs.operation }} "${{ inputs.branch_name }}" \ + --base-branch="${{ inputs.base_branch }}" \ + --branding="${{ inputs.branding }}" \ + --branding-url="${{ inputs.branding_url }}" \ + --modules="${{ inputs.modules }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Operation Summary + run: | + echo "## Git Operations Summary" >> $GITHUB_STEP_SUMMARY + echo "- **Operation**: ${{ inputs.operation }}" >> $GITHUB_STEP_SUMMARY + echo "- **Branch Name**: ${{ inputs.branch_name }}" >> $GITHUB_STEP_SUMMARY + echo "- **Base Branch**: ${{ inputs.base_branch }}" >> $GITHUB_STEP_SUMMARY + echo "- **Branding**: ${{ inputs.branding }}" >> $GITHUB_STEP_SUMMARY + echo "- **Branding URL**: ${{ inputs.branding_url }}" >> $GITHUB_STEP_SUMMARY + echo "- **Modules**: ${{ inputs.modules }}" >> $GITHUB_STEP_SUMMARY + if [ "${{ inputs.operation }}" = "remove" ] && [ "${{ inputs.force_remove }}" = "true" ]; then + echo "- **Force Remove**: Yes" >> $GITHUB_STEP_SUMMARY + fi diff --git a/Dockerfile b/Dockerfile index 0cd5a0a..eb8cb99 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,11 +4,17 @@ ENV TZ=Etc/UTC RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone RUN apt-get -y update && \ - apt-get -y install python \ - python3 \ + apt-get -y install tar \ sudo -RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python + ADD . /build_tools WORKDIR /build_tools +RUN mkdir -p /opt/python3 && \ + tar -xzf /build_tools/tools/linux/python3.tar.gz -C /opt/python3 --strip-components=1 + +ENV PATH="/opt/python3/bin:${PATH}" + +RUN ln -s /opt/python3/bin/python3.10 /usr/bin/python + CMD ["sh", "-c", "cd tools/linux && python3 ./automate.py"] diff --git a/README.md b/README.md index 414e62b..a96a939 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,8 @@ necessary for the compilation process, all the dependencies required for the correct work, as well as to get the latest version of **ONLYOFFICE products** source code and build all their components. -**Important!** We can only guarantee the correct work of the products built from -the `master` branch. +**Important!** We can only guarantee the correct work of the products built +from the `master` branch. ## How to use - Linux diff --git a/configure.py b/configure.py index 7620b8e..59c8ade 100755 --- a/configure.py +++ b/configure.py @@ -14,7 +14,7 @@ parser.add_option("--clean", action="store", type="string", dest="clean", defaul parser.add_option("--module", action="store", type="string", dest="module", default="builder", help="defines what modules to build. You can specify several of them, e.g. --module 'core desktop builder server mobile'") parser.add_option("--develop", action="store", type="string", dest="develop", default="0", help="defines develop mode") parser.add_option("--beta", action="store", type="string", dest="beta", default="0", help="defines beta mode") -parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]") +parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'win_arm64', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]") parser.add_option("--config", action="store", type="string", dest="config", default="", help="provides ability to specify additional parameters for qmake") parser.add_option("--qt-dir", action="store", type="string", dest="qt-dir", default="", help="defines qmake directory path. qmake can be found in qt-dir/compiler/bin directory") parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp", default="", help="defines qmake directory path for Windows XP. qmake can be found in 'qt-dir/compiler/bin directory") diff --git a/defaults b/defaults index 7428cc6..1d71ba7 100644 --- a/defaults +++ b/defaults @@ -1,3 +1,3 @@ -sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, zotero" -sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition, drawio" +sdkjs-plugin="ai, photoeditor, ocr, translator, thesaurus, youtube, highlightcode" +sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition" sdkjs-addons="sdkjs-forms" diff --git a/develop/README.md b/develop/README.md index 8de92cd..17860f0 100644 --- a/develop/README.md +++ b/develop/README.md @@ -6,6 +6,10 @@ but don't want to compile pretty compilcated core product to make those changes. ## System requirements +**Note**: ARM-based architectures are currently **NOT** supported; +attempting to run the images on ARM devices may result in startup failures +or other runtime issues. + ### Windows You need the latest diff --git a/scripts/base.py b/scripts/base.py index 468f571..b5d4f66 100644 --- a/scripts/base.py +++ b/scripts/base.py @@ -15,6 +15,7 @@ import stat import json __file__script__path__ = os.path.dirname( os.path.realpath(__file__)) +icu_ver = "74" # common functions -------------------------------------- def get_script_dir(file=""): @@ -521,6 +522,27 @@ def git_get_origin(): os.chdir(cur_dir) return ret +def git_get_base_url(): + """Get the base URL for git operations, with fallback to GitHub""" + origin = git_get_origin() + if origin: + # Extract base URL from origin + if origin.startswith("https://"): + # For HTTPS URLs like https://git.example.com/owner/repo.git + parts = origin.split("/") + if len(parts) >= 4: + return "/".join(parts[:3]) + "/" + elif ":" in origin and "@" in origin: + # For SSH URLs like git@git.example.com:owner/repo.git + at_pos = origin.find("@") + colon_pos = origin.find(":", at_pos) + if at_pos != -1 and colon_pos != -1: + host = origin[at_pos+1:colon_pos] + return f"https://{host}/" + + # Fallback to GitHub + return "https://github.com/" + def git_is_ssh(): git_protocol = config.option("git-protocol") if (git_protocol == "https"): @@ -542,7 +564,7 @@ def get_ssh_base_url(): def git_update(repo, is_no_errors=False, is_current_dir=False, git_owner=""): print("[git] update: " + repo) owner = git_owner if git_owner else "ONLYOFFICE" - url = "https://github.com/" + owner + "/" + repo + ".git" + url = git_get_base_url() + owner + "/" + repo + ".git" if git_is_ssh(): url = get_ssh_base_url() + repo + ".git" folder = get_script_dir() + "/../../" + repo @@ -614,7 +636,7 @@ def get_branding_repositories(checker): def create_pull_request(branches_to, repo, is_no_errors=False, is_current_dir=False): print("[git] create pull request: " + repo) - url = "https://github.com/ONLYOFFICE/" + repo + ".git" + url = git_get_base_url() + "ONLYOFFICE/" + repo + ".git" if git_is_ssh(): url = get_ssh_base_url() + repo + ".git" folder = get_script_dir() + "/../../" + repo @@ -715,6 +737,14 @@ def qt_setup(platform): if ("gcc_arm" == compiler_platform): qt_dir = config.option("qt-dir") + "/gcc" + # OVERRIDE IF NEEDED + set_env("QT_QMAKE_ADDON", "") + if platform == "win_arm64" and not is_dir(qt_dir): + override_qt_directory = os.path.abspath(os.path.dirname(__file__) + "/../tools/win/arm64/qt_build/Qt-5.15.2/win_arm64") + if is_dir(override_qt_directory): + qt_dir = os.path.abspath(override_qt_directory).replace("\\", "/") + set_env("QT_QMAKE_ADDON", "-spec win32-arm64-msvc2017") + set_env("QT_DEPLOY", qt_dir + "/bin") if ("linux_arm64" == platform): @@ -808,6 +838,9 @@ def qt_config(platform): if ("linux_arm64" == platform): config_param += " linux_arm64" + + if ("win_arm64" == platform): + config_param += " win_arm64" config_param += qt_config_platform_addon(platform) return config_param @@ -833,6 +866,12 @@ def qt_config_as_param(value): def qt_copy_lib(lib, dir): qt_dir = get_env("QT_DEPLOY") + + # TODO: remove version from library name + qt_major = qt_major_version() + if ("5" != qt_major): + lib = lib.replace("Qt5", "Qt" + qt_major) + if ("windows" == host_platform()): if ("" == qt_dst_postfix()): copy_lib(qt_dir, dir, lib) @@ -841,7 +880,7 @@ def qt_copy_lib(lib, dir): else: src_file = qt_dir + "/../lib/lib" + lib + ".so." + qt_version() if (is_file(src_file)): - copy_file(src_file, dir + "/lib" + lib + ".so." + qt_major_version()) + copy_file(src_file, dir + "/lib" + lib + ".so." + qt_major) else: libFramework = lib libFramework = libFramework.replace("Qt5", "Qt") @@ -1224,12 +1263,13 @@ def mac_correct_rpath_binary(path, libs): def mac_correct_rpath_library(name, libs): return mac_correct_rpath_binary("./lib" + name + ".dylib", libs) +mac_icu_libs = ["icudata." + icu_ver, "icuuc." + icu_ver] def mac_correct_rpath_x2t(dir): cur_dir = os.getcwd() os.chdir(dir) - mac_correct_rpath_library("icudata.58", []) - mac_correct_rpath_library("icuuc.58", ["icudata.58"]) - mac_correct_rpath_library("UnicodeConverter", ["icuuc.58", "icudata.58"]) + mac_correct_rpath_library("icudata." + icu_ver, []) + mac_correct_rpath_library("icuuc." + icu_ver, ["icudata." + icu_ver]) + mac_correct_rpath_library("UnicodeConverter", mac_icu_libs) mac_correct_rpath_library("kernel", ["UnicodeConverter"]) mac_correct_rpath_library("kernel_network", ["UnicodeConverter", "kernel"]) mac_correct_rpath_library("graphics", ["UnicodeConverter", "kernel"]) @@ -1246,23 +1286,23 @@ def mac_correct_rpath_x2t(dir): mac_correct_rpath_library("HWPFile", ["UnicodeConverter", "kernel", "graphics"]) cmd("chmod", ["-v", "+x", "./x2t"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./x2t"], True) - mac_correct_rpath_binary("./x2t", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer", "IWorkFile", "HWPFile"]) + mac_correct_rpath_binary("./x2t", mac_icu_libs + ["UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer", "IWorkFile", "HWPFile"]) if is_file("./allfontsgen"): cmd("chmod", ["-v", "+x", "./allfontsgen"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allfontsgen"], True) - mac_correct_rpath_binary("./allfontsgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics"]) + mac_correct_rpath_binary("./allfontsgen", mac_icu_libs + ["UnicodeConverter", "kernel", "graphics"]) if is_file("./allthemesgen"): cmd("chmod", ["-v", "+x", "./allthemesgen"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allthemesgen"], True) - mac_correct_rpath_binary("./allthemesgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "kernel_network", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer"]) + mac_correct_rpath_binary("./allthemesgen", mac_icu_libs + ["UnicodeConverter", "kernel", "graphics", "kernel_network", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer"]) if is_file("./pluginsmanager"): cmd("chmod", ["-v", "+x", "./pluginsmanager"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./pluginsmanager"], True) - mac_correct_rpath_binary("./pluginsmanager", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network"]) + mac_correct_rpath_binary("./pluginsmanager", mac_icu_libs + ["UnicodeConverter", "kernel", "kernel_network"]) if is_file("./vboxtester"): cmd("chmod", ["-v", "+x", "./vboxtester"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./vboxtester"], True) - mac_correct_rpath_binary("./vboxtester", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network"]) + mac_correct_rpath_binary("./vboxtester", mac_icu_libs + ["UnicodeConverter", "kernel", "kernel_network"]) os.chdir(cur_dir) return @@ -1271,14 +1311,14 @@ def mac_correct_rpath_docbuilder(dir): os.chdir(dir) cmd("chmod", ["-v", "+x", "./docbuilder"]) cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True) - mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "IWorkFile", "HWPFile", "doctrenderer", "DocxRenderer"]) - mac_correct_rpath_library("docbuilder.c", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer"]) + mac_correct_rpath_binary("./docbuilder", mac_icu_libs + ["UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "IWorkFile", "HWPFile", "doctrenderer", "DocxRenderer"]) + mac_correct_rpath_library("docbuilder.c", mac_icu_libs + ["UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer"]) def add_loader_path_to_rpath(libs): for lib in libs: cmd("install_name_tool", ["-add_rpath", "@loader_path", "lib" + lib + ".dylib"], True) - add_loader_path_to_rpath(["icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer", "docbuilder.c"]) + add_loader_path_to_rpath(["icuuc." + icu_ver, "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "DocxRenderer", "docbuilder.c"]) os.chdir(cur_dir) return @@ -1288,9 +1328,9 @@ def mac_correct_rpath_desktop(dir): os.chdir(dir) mac_correct_rpath_library("hunspell", []) mac_correct_rpath_library("ooxmlsignature", ["kernel"]) - mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "kernel_network", "PdfFile", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"]) + mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "kernel_network", "PdfFile", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature", "doctrenderer"]) cmd("install_name_tool", ["-change", "@executable_path/../Frameworks/Chromium Embedded Framework.framework/Chromium Embedded Framework", "@rpath/Chromium Embedded Framework.framework/Chromium Embedded Framework", "libascdocumentscore.dylib"]) - mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "hunspell", "ooxmlsignature"]) + mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "OFDFile", "DjVuFile", "hunspell", "ooxmlsignature", "doctrenderer"]) cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Frameworks", "./editors_helper.app/Contents/MacOS/editors_helper"], True) cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Resources/converter", "./editors_helper.app/Contents/MacOS/editors_helper"], True) cmd("chmod", ["-v", "+x", "./editors_helper.app/Contents/MacOS/editors_helper"]) @@ -1324,7 +1364,7 @@ def linux_set_origin_rpath_libraries(dir, libs): return def linux_correct_rpath_docbuilder(dir): - linux_set_origin_rpath_libraries(dir, ["docbuilder.jni.so", "docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so", "PdfFile.so", "XpsFile.so", "OFDFile.so", "DjVuFile.so", "DocxRenderer.so"]) + linux_set_origin_rpath_libraries(dir, ["docbuilder.jni.so", "docbuilder.c.so", "icuuc.so." + icu_ver, "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so", "PdfFile.so", "XpsFile.so", "OFDFile.so", "DjVuFile.so", "DocxRenderer.so"]) return def common_check_version(name, good_version, clean_func): @@ -1407,7 +1447,7 @@ def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, i plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content" plugins_list_config = config.option("sdkjs-plugin") if isXp: - plugins_list_config="photoeditor, macros, highlightcode, doc2md" + plugins_list_config="photoeditor, highlightcode, doc2md" if ("" == plugins_list_config): return plugins_list = plugins_list_config.rsplit(", ") @@ -1662,7 +1702,7 @@ def convert_ios_framework_to_xcframework_folder(folder, libs): def change_elf_rpath(path, origin): # excludes --- - if (-1 != path.find("libicudata.so.58")): + if (-1 != path.find("libicudata.so." + icu_ver)): return # ------------ tools_dir = get_script_dir() + "/../tools/linux/elf/" @@ -1849,6 +1889,9 @@ def create_x2t_js_cache(dir, product, platform): if ((platform == "linux_arm64") and not is_os_arm()): cmd_in_dir_qemu(platform, dir, "./x2t", ["-create-js-snapshots"], True) return + + if platform == "win_arm64": # copying sdkjs later + return cmd_in_dir(dir, "./x2t", ["-create-js-snapshots"], True) return @@ -1857,4 +1900,25 @@ def setup_local_qmake(dir_qmake): dir_base = os.path.dirname(dir_qmake) writeFile(dir_base + "/onlyoffice_qt.conf", "Prefix = " + dir_base) return - \ No newline at end of file + +def deploy_icu(core_dir, dst_dir, platform): + if (0 == platform.find("android")): + src_dir = core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + copy_file(src_dir + "/icudt" + icu_ver + "l.dat", root_dir + "/icudt" + icu_ver + "l.dat") + return + + src_dir = core_dir + "/Common/3dParty/icu/" + platform + "/build" + + if (0 == platform.find("win")): + copy_file(src_dir + "/icudt" + icu_ver + ".dll", dst_dir + "/icudt" + icu_ver + ".dll") + copy_file(src_dir + "/icuuc" + icu_ver + ".dll", dst_dir + "/icuuc" + icu_ver + ".dll") + + if (0 == platform.find("linux")): + copy_file(src_dir + "/libicudata.so." + icu_ver, dst_dir + "/libicudata.so." + icu_ver) + copy_file(src_dir + "/libicuuc.so." + icu_ver, dst_dir + "/libicuuc.so." + icu_ver) + + if (0 == platform.find("mac")): + copy_file(src_dir + "/libicudata." + icu_ver + ".dylib", dst_dir + "/libicudata." + icu_ver + ".dylib") + copy_file(src_dir + "/libicuuc." + icu_ver + ".dylib", dst_dir + "/libicuuc." + icu_ver + ".dylib") + + return diff --git a/scripts/build_js.py b/scripts/build_js.py index aab8864..ed3d6c6 100644 --- a/scripts/build_js.py +++ b/scripts/build_js.py @@ -65,10 +65,6 @@ def make(): deldirs = ['ie', 'mobile', 'embed'] [base.delete_dir(root + "/" + d) for root, dirs, f in os.walk(out_dir + "/desktop/web-apps/apps") for d in dirs if d in deldirs] - # for bug 62528. remove empty folders - walklist = list(os.walk(out_dir + "/desktop/sdkjs")) - [os.remove(p) for p, _, _ in walklist[::-1] if len(os.listdir(p)) == 0] - base.copy_file(base_dir + "/../web-apps/apps/api/documents/index.html.desktop", out_dir + "/desktop/web-apps/apps/api/documents/index.html") build_interface(base_dir + "/../desktop-apps/common/loginpage/build") @@ -109,7 +105,10 @@ def make(): # JS build def _run_npm(directory): - return base.cmd_in_dir(directory, "npm", ["install"]) + retValue = base.cmd_in_dir(directory, "npm", ["install"], True) + if (0 != retValue): + retValue = base.cmd_in_dir(directory, "npm", ["install", "--verbose"]) + return retValue def _run_npm_ci(directory): return base.cmd_in_dir(directory, "npm", ["ci"]) @@ -122,7 +121,7 @@ def _run_grunt(directory, params=[]): def build_interface(directory): _run_npm(directory) - _run_grunt(directory, ["--force"] + base.web_apps_addons_param()) + _run_grunt(directory, ["--force", "--verbose"] + base.web_apps_addons_param()) return def get_build_param(minimize=True): diff --git a/scripts/config.py b/scripts/config.py index 4258368..4580112 100644 --- a/scripts/config.py +++ b/scripts/config.py @@ -24,7 +24,7 @@ def parse(): # all platforms global platforms - platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", + platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "win_arm64", "linux_64", "linux_32", "linux_arm64", "mac_64", "mac_arm64", "ios", @@ -56,6 +56,10 @@ def parse(): if ("mac" == host_platform) and check_option("platform", "mac_arm64") and not base.is_os_arm(): if not check_option("platform", "mac_64"): options["platform"] = "mac_64 " + options["platform"] + + if ("windows" == host_platform) and check_option("platform", "win_arm64") and not base.is_os_arm(): + if not check_option("platform", "win_64"): + options["platform"] = "win_64 " + options["platform"] if ("linux" == host_platform) and check_option("platform", "linux_arm64") and not base.is_os_arm(): if not check_option("platform", "linux_64"): @@ -163,6 +167,9 @@ def check_compiler(platform): if (0 == platform.find("win")): compiler["compiler"] = "msvc" + options["vs-version"] compiler["compiler_64"] = "msvc" + options["vs-version"] + "_64" + if (0 == platform.find("win_arm")): + compiler["compiler"] = "msvc" + options["vs-version"] + "_arm" + compiler["compiler_64"] = "msvc" + options["vs-version"] + "_arm64" elif (0 == platform.find("linux")): compiler["compiler"] = "gcc" compiler["compiler_64"] = "gcc_64" diff --git a/scripts/core_common/make_common.py b/scripts/core_common/make_common.py index 8801094..f8e8384 100755 --- a/scripts/core_common/make_common.py +++ b/scripts/core_common/make_common.py @@ -21,6 +21,7 @@ import md import hunspell import glew import harfbuzz +import oo_brotli import hyphen import googletest import libvlc @@ -51,6 +52,7 @@ def make(): glew.make() hyphen.make() googletest.make() + oo_brotli.make() if config.check_option("build-libvlc", "1"): libvlc.make() diff --git a/scripts/core_common/modules/android/icu_android.py b/scripts/core_common/modules/android/icu_android.py index fb37b64..bfa06a1 100755 --- a/scripts/core_common/modules/android/icu_android.py +++ b/scripts/core_common/modules/android/icu_android.py @@ -57,14 +57,9 @@ cpp_flags = [ def fetch_icu(): if not base.is_dir(current_dir + "icu"): - base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + icu_major, "https://github.com/unicode-org/icu.git", current_dir + "icu2"]) + base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", current_dir + "icu2"]) base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu") base.delete_dir_with_access_error(current_dir + "icu2") - - if ("linux" == base.host_platform()): - base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale") - if False and ("mac" == base.host_platform()): - base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",") return def build_host(): diff --git a/scripts/core_common/modules/boost.py b/scripts/core_common/modules/boost.py index fb6a843..3b75d08 100644 --- a/scripts/core_common/modules/boost.py +++ b/scripts/core_common/modules/boost.py @@ -85,8 +85,18 @@ def make(): base.cmd("b2.exe", ["headers"]) base.cmd("b2.exe", ["--clean"]) base.cmd("b2.exe", ["--prefix=./../build/win_32", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=32", "install"]) + if (-1 != config.option("platform").find("win_arm64") and not base.is_file("../build/win_arm64/lib/libboost_system-" + win_vs_version + "-mt-a64-1_72.lib")): + boost_bat = [] + boost_bat.append("call bootstrap.bat " + win_boot_arg) # first build b2 for win64, so vcvarsall_call with arm64 later + vcvarsall_call = ("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + "x64_arm64") + boost_bat.append(vcvarsall_call) + boost_bat.append("call b2.exe headers") + boost_bat.append("call b2.exe --clean") + boost_bat.append("call b2.exe --prefix=./../build/win_arm64 architecture=arm link=static --with-filesystem --with-system --with-date_time --with-regex --toolset=" + win_toolset + " address-model=64 install") + base.run_as_bat(boost_bat) correct_install_includes_win(base_dir, "win_64") - correct_install_includes_win(base_dir, "win_32") + correct_install_includes_win(base_dir, "win_32") + correct_install_includes_win(base_dir, "win_arm64") if config.check_option("platform", "linux_64") and not base.is_dir("../build/linux_64"): if config.option("custom-sysroot") == "": diff --git a/scripts/core_common/modules/cef.py b/scripts/core_common/modules/cef.py index 6dda910..0556086 100644 --- a/scripts/core_common/modules/cef.py +++ b/scripts/core_common/modules/cef.py @@ -13,13 +13,12 @@ def make(): old_cur = os.getcwd() os.chdir(base_dir) - platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64"] + platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64", "win_arm64"] for platform in platforms: if not config.check_option("platform", platform): continue - # url = "http://d2ettrnqo7v976.cloudfront.net/cef/" url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/cef/" archive_name = "./cef_binary.7z" diff --git a/scripts/core_common/modules/icu.py b/scripts/core_common/modules/icu.py index 81074d1..8e40111 100755 --- a/scripts/core_common/modules/icu.py +++ b/scripts/core_common/modules/icu.py @@ -12,10 +12,9 @@ import icu_android def fetch_icu(major, minor): if (base.is_dir("./icu2")): base.delete_dir_with_access_error("icu2") - base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + major, "https://github.com/unicode-org/icu.git", "./icu2"]) + base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", "./icu2"]) base.copy_dir("./icu2/icu4c", "./icu") base.delete_dir_with_access_error("icu2") - #base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"]) return def clear_module(): @@ -36,15 +35,15 @@ def make(): old_cur = os.getcwd() os.chdir(base_dir) - base.check_module_version("3", clear_module) + base.check_module_version("6", clear_module) if (-1 != config.option("platform").find("android")): icu_android.make() os.chdir(base_dir) - icu_major = "58" - icu_minor = "3" + icu_major = "74" + icu_minor = "2" if not base.is_dir("icu"): fetch_icu(icu_major, icu_minor) @@ -58,36 +57,56 @@ def make(): need_platforms.append("win_64") if (-1 != config.option("platform").find("win_32")): need_platforms.append("win_32") + if (-1 != config.option("platform").find("win_arm64")): + need_platforms.append("win_64") # for exe files + need_platforms.append("win_arm64") + for platform in need_platforms: if not config.check_option("platform", platform) and not config.check_option("platform", platform + "_xp"): continue + if not base.is_dir(platform + "/build"): base.create_dir(platform) compile_bat = [] compile_bat.append("setlocal") - compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64")) - compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + ("Win32" if base.platform_is_32(platform) else "X64")) + + args = { + "win_32" : { + "msbuild_platfrom" : "Win32", + "vcvarsall_arch" : "x86", + "out_bin_dir" : "icu/bin/", + "out_lib_dir" : "icu/lib/" + }, + "win_64" : { + "msbuild_platfrom" : "X64", + "vcvarsall_arch" : "x64", + "out_bin_dir" : "icu/bin64/", + "out_lib_dir" : "icu/lib64/" + }, + "win_arm64" : { + "msbuild_platfrom" : "ARM64", + "vcvarsall_arch" : "x64_arm64", + "out_bin_dir" : "icu/binARM64/", + "out_lib_dir" : "icu/libARM64/" + } + } + + platform_args = args[platform] + + compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + platform_args['vcvarsall_arch']) + compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + platform_args['msbuild_platfrom']) compile_bat.append("endlocal") base.run_as_bat(compile_bat) - bin_dir = "icu/bin64/" if ("win_64" == platform) else "icu/bin/" - lib_dir = "icu/lib64/" if ("win_64" == platform) else "icu/lib/" + base.create_dir(platform + "/build") - base.copy_file(bin_dir + "icudt" + icu_major + ".dll", platform + "/build/") - base.copy_file(bin_dir + "icuuc" + icu_major + ".dll", platform + "/build/") - base.copy_file(lib_dir + "icudt.lib", platform + "/build/") - base.copy_file(lib_dir + "icuuc.lib", platform + "/build/") + base.copy_file(platform_args['out_bin_dir'] + "icudt" + icu_major + ".dll", platform + "/build/") + base.copy_file(platform_args['out_bin_dir'] + "icuuc" + icu_major + ".dll", platform + "/build/") + base.copy_file(platform_args['out_lib_dir'] + "icudt.lib", platform + "/build/") + base.copy_file(platform_args['out_lib_dir'] + "icuuc.lib", platform + "/build/") os.chdir(old_cur) return if ("linux" == base.host_platform()): - if not base.is_file("./icu/source/i18n/digitlst.cpp.bak"): - base.copy_file("./icu/source/i18n/digitlst.cpp", "./icu/source/i18n/digitlst.cpp.bak") - base.replaceInFile("./icu/source/i18n/digitlst.cpp", "xlocale", "locale") - if base.is_dir(base_dir + "/linux_64"): - base.delete_dir(base_dir + "/linux_64") - if base.is_dir(base_dir + "/linux_arm64"): - base.delete_dir(base_dir + "/linux_arm64") - if not base.is_dir(base_dir + "/linux_64"): base.create_dir(base_dir + "/icu/cross_build") os.chdir("icu/cross_build") diff --git a/scripts/core_common/modules/icu_mac.py b/scripts/core_common/modules/icu_mac.py index 5441c8d..53a65d1 100755 --- a/scripts/core_common/modules/icu_mac.py +++ b/scripts/core_common/modules/icu_mac.py @@ -5,37 +5,29 @@ sys.path.append('../..') import base import os -def change_icu_defs(current_dir, arch): - icudef_file = current_dir + "/icudefs.mk" - icudef_file_old = current_dir + "/icudefs.mk.back" +def change_icu_defs(arch): + old_env = dict(os.environ) param = "-arch x86_64" if arch == "arm64": - param = "-arch arm64 -isysroot " + base.find_mac_sdk() - + param = "-arch arm64" + + param += " -isysroot " + base.find_mac_sdk() param += " -mmacosx-version-min=10.12" - base.copy_file(icudef_file, icudef_file_old) + os.environ["CFLAGS"] = param + os.environ["CXXFLAGS"] = param + " --std=c++11" + os.environ["LDFLAGS"] = param - base.replaceInFile(icudef_file, "CFLAGS = ", "CFLAGS = " + param + " ") - base.replaceInFile(icudef_file, "CXXFLAGS = ", "CXXFLAGS = " + param + " ") - base.replaceInFile(icudef_file, "RPATHLDFLAGS =", "RPATHLDFLAGS2 =") - base.replaceInFile(icudef_file, "LDFLAGS = ", "LDFLAGS = " + param + " ") - base.replaceInFile(icudef_file, "RPATHLDFLAGS2 =", "RPATHLDFLAGS =") + return old_env +def restore_icu_defs(old_env): + os.environ.clear() + os.environ.update(old_env) return -def restore_icu_defs(current_dir): - icudef_file = current_dir + "/icudefs.mk" - icudef_file_old = current_dir + "/icudefs.mk.back" - - base.delete_file(icudef_file) - base.copy_file(icudef_file_old, icudef_file) - base.delete_file(icudef_file_old) - return - -icu_major = "58" -icu_minor = "3" +icu_major = "74" +icu_minor = "2" current_dir_old = os.getcwd() current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu" @@ -46,29 +38,33 @@ if not base.is_dir(current_dir + "/mac_cross_64"): base.create_dir(current_dir + "/mac_cross_64") os.chdir(current_dir + "/mac_cross_64") + old_env = change_icu_defs("x86_64") + base.cmd("../icu/source/runConfigureICU", ["MacOSX", - "--prefix=" + current_dir + "/mac_cross_64", "CFLAGS=-Os CXXFLAGS=--std=c++11"]) - - change_icu_defs(current_dir + "/mac_cross_64", "x86_64") + "--prefix=" + current_dir + "/mac_cross_64"]) base.cmd("make", ["-j4"]) base.cmd("make", ["install"], True) - restore_icu_defs(current_dir + "/mac_cross_64") + restore_icu_defs(old_env) os.chdir(current_dir) os.chdir(current_dir + "/icu/source") -base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm_64", - "--with-cross-build=" + current_dir + "/mac_cross_64", "VERBOSE=1"]) +old_env = change_icu_defs("arm64") -change_icu_defs(current_dir + "/icu/source", "arm64") +addon = [] +if not base.is_os_arm(): + addon = ["--host=aarch64-apple-darwin"] + +base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm_64", + "--with-cross-build=" + current_dir + "/mac_cross_64", "VERBOSE=1"] + addon) base.cmd("make", ["-j4"]) base.cmd("make", ["install"]) -restore_icu_defs(current_dir + "/icu/source") +restore_icu_defs(old_env) os.chdir(current_dir) diff --git a/scripts/core_common/modules/oo_brotli.py b/scripts/core_common/modules/oo_brotli.py new file mode 100644 index 0000000..d672885 --- /dev/null +++ b/scripts/core_common/modules/oo_brotli.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python + +import sys +sys.path.append('../..') +import base +import os + +def make(): + print("[fetch & build]: brotli") + base.cmd_in_dir(base.get_script_dir() + "/../../core/Common/3dParty/brotli", "./make.py") + return + +if __name__ == '__main__': + # manual compile + make() diff --git a/scripts/core_common/modules/openssl.py b/scripts/core_common/modules/openssl.py index 2fd89ef..f9eb434 100644 --- a/scripts/core_common/modules/openssl.py +++ b/scripts/core_common/modules/openssl.py @@ -54,6 +54,14 @@ def make(): qmake_bat.append("call nmake clean") qmake_bat.append("call nmake build_libs install") base.run_as_bat(qmake_bat, True) + if (-1 != config.option("platform").find("win_arm64")) and not base.is_dir("../build/win_arm64"): + base.create_dir("./../build/win_arm64") + qmake_bat = [] + qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64_arm64") + qmake_bat.append("perl Configure VC-WIN64-ARM --prefix=" + old_cur_dir + "\\build\\win_arm64 --openssldir=" + old_cur_dir + "\\build\\win_arm64 no-shared no-asm enable-md2") + qmake_bat.append("call nmake clean") + qmake_bat.append("call nmake build_libs install") + base.run_as_bat(qmake_bat, True) os.chdir(old_cur) # xp ---------------------------------------------------------------------------------------------------- os.chdir(base_dir + "/openssl") diff --git a/scripts/core_common/modules/v8.py b/scripts/core_common/modules/v8.py index 2d45efa..9aac2e8 100644 --- a/scripts/core_common/modules/v8.py +++ b/scripts/core_common/modules/v8.py @@ -26,6 +26,8 @@ def clean(): def is_main_platform(): if (config.check_option("platform", "win_64") or config.check_option("platform", "win_32")): return True + if (config.check_option("platform", "win_arm64")): + return True if (config.check_option("platform", "linux_64") or config.check_option("platform", "linux_32") or config.check_option("platform", "linux_arm64")): return True if config.check_option("platform", "mac_64"): diff --git a/scripts/core_common/modules/v8_89.py b/scripts/core_common/modules/v8_89.py index 5e8df9a..ca1fa2b 100644 --- a/scripts/core_common/modules/v8_89.py +++ b/scripts/core_common/modules/v8_89.py @@ -44,7 +44,13 @@ def make_args(args, platform, is_64=True, is_debug=False): args_copy.append("target_cpu=\\\"arm64\\\"") args_copy.append("v8_target_cpu=\\\"arm64\\\"") args_copy.append("use_sysroot=true") - + + if (platform == "win_arm64"): + args_copy = args[:] + args_copy.append("target_cpu=\\\"arm64\\\"") + args_copy.append("v8_target_cpu=\\\"arm64\\\"") + args_copy.append("is_clang=false") + if is_debug: args_copy.append("is_debug=true") if (platform == "windows"): @@ -73,12 +79,21 @@ def make_args(args, platform, is_64=True, is_debug=False): return "--args=\"" + " ".join(args_copy) + "\"" -def ninja_windows_make(args, is_64=True, is_debug=False): +def ninja_windows_make(args, is_64=True, is_debug=False, is_arm=False): directory_out = "out.gn/" - directory_out += ("win_64/" if is_64 else "win_32/") + + if is_arm: + directory_out += "win_arm64/" + else: + directory_out += ("win_64/" if is_64 else "win_32/") + directory_out += ("debug" if is_debug else "release") - base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)]) + if is_arm: + base.cmd2("gn", ["gen", directory_out, make_args(args, "win_arm64", is_64, is_debug)]) + else: + base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)]) + base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak") base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc") base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj") @@ -88,7 +103,10 @@ def ninja_windows_make(args, is_64=True, is_debug=False): if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")): base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n") + base.cmd("ninja", ["-C", directory_out, "v8_wrappers"]) + if is_arm: + base.copy_file('./' + directory_out + '/obj/v8_wrappers.lib', './' + directory_out + '/x64/obj/v8_wrappers.lib') base.cmd("ninja", ["-C", directory_out]) base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja") base.move_file("./" + directory_out + "/obj/v8_wrappers.ninja.bak", "./" + directory_out + "/obj/v8_wrappers.ninja") @@ -154,8 +172,19 @@ def make(): if ("windows" == base.host_platform()): base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt") + + # fix for new depot_tools and vs2019, as VC folder contains a folder with a symbol in the name + # sorting is done by increasing version, so 0 is a dummy value + replace_src = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return x" + replace_dst = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return 0" + base.replaceInFile("v8/build/vs_toolchain.py", replace_src, replace_dst) + + if not base.is_file("v8/src/base/platform/wrappers.cc"): base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n") + + if config.check_option("platform", "win_arm64"): + base.replaceInFile("v8/build/toolchain/win/setup_toolchain.py", "SDK_VERSION = \'10.0.26100.0\'", "SDK_VERSION = \'10.0.22621.0\'") else: base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "") @@ -207,6 +236,9 @@ def make(): if config.check_option("platform", "mac_64"): base.cmd2("gn", ["gen", "out.gn/mac_64", make_args(gn_args, "mac")]) base.cmd("ninja", ["-C", "out.gn/mac_64"]) + + if config.check_option("platform", "win_arm64") and not base.is_file("out.gn/win_arm64/release/obj/v8_monolith.lib"): + ninja_windows_make(gn_args, True, False, True) if config.check_option("platform", "win_64"): if (-1 != config.option("config").lower().find("debug")): diff --git a/scripts/deploy_builder.py b/scripts/deploy_builder.py index a54a2c1..f4df6e2 100644 --- a/scripts/deploy_builder.py +++ b/scripts/deploy_builder.py @@ -15,6 +15,7 @@ def make(): continue root_dir = base_dir + ("/" + native_platform + "/" + branding + ("/DocumentBuilder" if base.is_windows() else "/documentbuilder")) + root_dir_win64 = base_dir + "/win_64/" + branding + "/DocumentBuilder" if (base.is_dir(root_dir)): base.delete_dir(root_dir) base.create_dir(root_dir) @@ -57,18 +58,8 @@ def make(): # base.generate_check_linux_system(git_dir + "/build_tools", root_dir) # icu - if (0 == platform.find("win")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll") - - if (0 == platform.find("linux")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58") - - if (0 == platform.find("mac")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib") - + base.deploy_icu(core_dir, root_dir, platform) + # doctrenderer if isWindowsXP: base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir, "doctrenderer") @@ -127,6 +118,18 @@ def make(): base.mac_correct_rpath_docbuilder(root_dir) base.create_x2t_js_cache(root_dir, "builder", platform) + + base.create_dir(root_dir + "/fonts") + base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana") + base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea") + base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra") + base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice") + base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf") + + if native_platform == "win_arm64": + base.delete_dir(root_dir + "/sdkjs") + base.copy_dir(root_dir_win64 + "/sdkjs", root_dir + "/sdkjs") + return # delete unnecessary builder files def delete_files(files): @@ -143,7 +146,7 @@ def make(): base.delete_dir(root_dir + "/sdkjs/cell/css") base.delete_file(root_dir + "/sdkjs/pdf/src/engine/viewer.js") base.delete_file(root_dir + "/sdkjs/common/spell/spell/spell.js.mem") - base.delete_dir(root_dir + "/sdkjs/common/Images") + base.delete_dir(root_dir + "/sdkjs/common/Images") return diff --git a/scripts/deploy_core.py b/scripts/deploy_core.py index cd653d3..fee9bb5 100644 --- a/scripts/deploy_core.py +++ b/scripts/deploy_core.py @@ -70,6 +70,11 @@ def make(): base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries") base.create_x2t_js_cache(archive_dir, "core", platform) base.delete_file(archive_dir + "/DoctRenderer.config") + + # just copy sdkjs to avoid executing arm64 x2t on non-arm system + if native_platform == "win_arm64" : + base.delete_dir(archive_dir + "/sdkjs") + base.copy_dir(archive_dir + "/../../../win_64/" + branding + "/core/sdkjs", archive_dir + "/sdkjs") # dictionaries base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False) diff --git a/scripts/deploy_desktop.py b/scripts/deploy_desktop.py index 74afbba..80b2e19 100644 --- a/scripts/deploy_desktop.py +++ b/scripts/deploy_desktop.py @@ -82,19 +82,8 @@ def make(): # base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter") # icu - if (0 == platform.find("win")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/converter/icudt58.dll") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/converter/icuuc58.dll") - #base.copy_file(git_dir + "/desktop-apps/common/converter/package.config", root_dir + "/converter/package.config") - - if (0 == platform.find("linux")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/converter/libicudata.so.58") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/converter/libicuuc.so.58") - - if (0 == platform.find("mac")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/converter/libicudata.58.dylib") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/converter/libicuuc.58.dylib") - + base.deploy_icu(core_dir, root_dir + "/converter", platform) + # doctrenderer if isWindowsXP: base.copy_lib(build_libraries_path + "/xp", root_dir + "/converter", "doctrenderer") @@ -116,8 +105,6 @@ def make(): base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice") base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf") - base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE") - # cef build_dir_name = "build" if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")): @@ -145,7 +132,7 @@ def make(): base.copy_dir(core_build_dir + "/bin/" + platform_postfix + "/editors_helper.app", root_dir + "/editors_helper.app") else: base.copy_exe(core_build_dir + "/bin/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "editors_helper") - + if isUseQt: base.qt_copy_lib("Qt5Core", root_dir) base.qt_copy_lib("Qt5Gui", root_dir) @@ -269,9 +256,14 @@ def make(): base.delete_file(root_dir + "/cef_sandbox.lib") base.delete_file(root_dir + "/libcef.lib") - isMacArmPlaformOnIntel = False - if (platform == "mac_arm64") and not base.is_os_arm(): - isMacArmPlaformOnIntel = True + is_host_not_arm = False + host_platform = "" + if (platform == "mac_arm64" or platform == "win_arm64") and not base.is_os_arm(): + is_host_not_arm = True + if platform == "mac_arm64": + host_platform = "mac_64" + elif platform == "win_arm64": + host_platform = "win_64" # all themes generate ---- base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allfontsgen") @@ -280,12 +272,12 @@ def make(): if (0 == platform.find("mac")): base.mac_correct_rpath_desktop(root_dir) - if isMacArmPlaformOnIntel: + if is_host_not_arm: sdkjs_dir = root_dir + "/editors/sdkjs" - end_find_platform = sdkjs_dir.rfind("/mac_arm64/") - sdkjs_dir_mac64 = sdkjs_dir[0:end_find_platform] + "/mac_64/" + sdkjs_dir[end_find_platform+11:] + end_find_platform = sdkjs_dir.rfind("/" + platform + "/") + sdkjs_dir_64 = sdkjs_dir[0:end_find_platform] + "/" + host_platform + "/" + sdkjs_dir[end_find_platform+11:] base.delete_dir(sdkjs_dir) - base.copy_dir(sdkjs_dir_mac64, sdkjs_dir) + base.copy_dir(sdkjs_dir_64, sdkjs_dir) else: themes_params = [] if ("" != config.option("themesparams")): diff --git a/scripts/deploy_mobile.py b/scripts/deploy_mobile.py index 64a4058..4dc1d1b 100644 --- a/scripts/deploy_mobile.py +++ b/scripts/deploy_mobile.py @@ -73,23 +73,8 @@ def make(): base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t") # icu - if (0 == platform.find("win")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll") - - if (0 == platform.find("linux")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58") - - if (0 == platform.find("mac")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib") + base.deploy_icu(core_dir, root_dir, platform) - if (0 == platform.find("android")): - #base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicudata.so", root_dir + "/libicudata.so") - #base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicuuc.so", root_dir + "/libicuuc.so") - base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/icudt58l.dat", root_dir + "/icudt58l.dat") - # js base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs") diff --git a/scripts/deploy_server.py b/scripts/deploy_server.py index 5c2c52d..5e07504 100644 --- a/scripts/deploy_server.py +++ b/scripts/deploy_server.py @@ -94,18 +94,8 @@ def make(): base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server", "", "../../../dictionaries") # icu - if (0 == platform.find("win")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", converter_dir + "/icudt58.dll") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", converter_dir + "/icuuc58.dll") - - if (0 == platform.find("linux")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", converter_dir + "/libicudata.so.58") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", converter_dir + "/libicuuc.so.58") - - if (0 == platform.find("mac")): - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", converter_dir + "/libicudata.58.dylib") - base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", converter_dir + "/libicuuc.58.dylib") - + base.deploy_icu(core_dir, converter_dir, platform) + base.copy_v8_files(core_dir, converter_dir, platform) # builder diff --git a/scripts/develop/build_lo_linux.py b/scripts/develop/build_lo_linux.py new file mode 100644 index 0000000..6540663 --- /dev/null +++ b/scripts/develop/build_lo_linux.py @@ -0,0 +1,104 @@ +# This script was successfully executed on Ubuntu 22.04.5 LTS + +# Before starting, make sure that: +# 1. Python >= 3.9 +# 2. The current working folder with the script and its path do not contain spaces and use Latin characters. +# 3. Antivirus is turned off +# 4. There is enough free space on the disk (50GB Libre Office and during the unpacking of packages, it's recommended that you allocate at least 80 gigabytes of free space) +# 5. The current working folder with the script and its path do not contain spaces and use Latin characters. + +# If the error "You must put some 'source' URIs in your sources.list" occurs, you need to run the command: + # software-properties-gtk +# in the terminal, and then under the "Ubuntu Software" tab, click "Source code" if it's not turned on and submit + +# after completion, the file will appear: +# current_folder_with_script/libreoffice_build/instdir/soffice +# debugging can be done via MVS 2022 +# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio +# or via VS Code with c/c++ tools +# https://wiki.documentfoundation.org/Development/IDE#Visual_Studio_Code_(VSCode) +# or via Qt Creator +# https://wiki.documentfoundation.org/Development/IDE#Qt_Creator +# or via attatch to the soffice.bin process +# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb + +import subprocess +import sys +import os + +CONFIGURE_PARAMS = [ + "--enable-dbgutil", + "--without-doxygen", + "--enable-pch", + "--disable-ccache", + # "--with-visual-studio=2022", + '--enable-symbols="all"' + ] + +SUDO_DEPENDENCIES = [ + "git", "build-essential", "zip", "ccache", "junit4", "libkrb5-dev", "nasm", "graphviz", "python3", + "python3-dev", "python3-setuptools", "qtbase5-dev", "libkf5coreaddons-dev", "libkf5i18n-dev", + "libkf5config-dev", "libkf5windowsystem-dev", "libkf5kio-dev", "libqt5x11extras5-dev", "autoconf", + "libcups2-dev", "libfontconfig1-dev", "gperf", "openjdk-17-jdk", "doxygen", "libxslt1-dev", + "xsltproc", "libxml2-utils", "libxrandr-dev", "libx11-dev", "bison", "flex", "libgtk-3-dev", + "libgstreamer-plugins-base1.0-dev", "libgstreamer1.0-dev", "ant", "ant-optional", "libnss3-dev", + "libavahi-client-dev", "libxt-dev" + ] + +DIR_NAME = "libreoffice" +OFFICE_PATH = "instdir/program/soffice" + +class bcolors: + OKBLUE = '\033[94m' + OKCYAN = '\033[96m' + OKGREEN = '\033[92m' + FAIL = '\033[91m' + RESET = '\033[0m' + +def run_command(command, exit_on_error=True): + try: + subprocess.run(command, shell=True, check=True) + except subprocess.CalledProcessError as e: + print(f"{bcolors.FAIL}Error executing command: {command}{bcolors.RESET}") + if exit_on_error: + sys.exit(1) + +def install_dependencies(): + print("Updating package list...") + run_command("sudo apt update") + + print("Adding PPA for GCC/G++ update...") + run_command("sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test") + run_command("sudo apt update") + + print("Installing dependencies for LibreOffice...") + run_command("sudo apt-get build-dep -y libreoffice") + run_command(f"sudo apt-get install {' '.join(map(str, SUDO_DEPENDENCIES))}") + + print("Updating GCC/G++ to v12...") + run_command("sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 60 --slave /usr/bin/g++ g++ /usr/bin/g++-12", exit_on_error=False) + + print(bcolors.OKGREEN + "All dependencies successfully installed!" + bcolors.RESET) + +def build_libreoffice(): + print("Cloning LibreOffice repository...") + run_command(f"git clone https://git.libreoffice.org/core {DIR_NAME}", exit_on_error=False) + + print("Changing to build directory...") + os.chdir(f"./{DIR_NAME}") + + print("Start configurator autogen.sh...") + run_command(f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}") + + print(bcolors.OKCYAN + "Starting libreoffice build, this may take up to 24 hours and takes up about 20 GB of drive space. You will also most likely need at least 8 GBs of RAM, otherwise the machine might fall into swap and appear to freeze up..." + bcolors.RESET) + run_command("make") + + print(bcolors.OKGREEN + "LibreOffice build completed!" + bcolors.RESET) + + # print(bcolors.OKCYAN + "Running LibreOffice..." + bcolors.RESET) + # run_command(OFFICE_PATH, exit_on_error=False) + + +if __name__ == "__main__": + install_dependencies() + build_libreoffice() diff --git a/scripts/develop/build_lo_windows.py b/scripts/develop/build_lo_windows.py new file mode 100644 index 0000000..064e34b --- /dev/null +++ b/scripts/develop/build_lo_windows.py @@ -0,0 +1,202 @@ +# Before starting, make sure that: +# 1. MVS 2022 is installed and the necessary individual components are in its installer +# � Windows Universal C Runtime +# � .NET Framework 4.x SDK (.NET Framework 5.x SDK and later are currently not supported. These don't register their information to registry, don't have csc.exe and they use dotnet command with csc.dll instead for compiling.) +# � C++ 20xx Redistributable MSMs (only required to build MSI installer) +# � C++ Clang Compiler for Windows (x.x.x) +# 2. Java JDK >= 17 +# 3. Antivirus is turned off +# 4. There is enough free space on the disk (50GB Libre Office, 50Gb cygwin64) + +# after completion, the files will appear: +# {LO_BUILD_PATH}/sources/libo-core/instdir/program/soffice.exe +# {LO_BUILD_PATH}/sources/libo-core/LibreOffice.sln +# debugging can be done via MVS 2022 +# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio +# or via attatch to the soffice.bin process +# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb + +import sys + +sys.path.append('../../scripts') +import threading + +import os +import subprocess +import shutil +import argparse +import base + +CYGWIN_DOWNLOAD_URL = 'https://cygwin.com/setup-x86_64.exe' +CYGWIN_TEMP_PATH = './tmp' +CYGWIN_SETUP_FILENAME = 'setup-x86_64.exe' +CYGWIN_SETUP_PARAMS = [ + "-P", "autoconf", + "-P", "automake", + "-P", "bison", + "-P", "cabextract", + "-P", "doxygen", + "-P", "flex", + "-P", "gawk=5.2.2-1", + "-P", "gcc-g++", + "-P", "gettext-devel", + "-P", "git", + "-P", "gnupg", + "-P", "gperf", + "-P", "make", + "-P", "mintty", + "-P", "nasm", + "-P", "openssh", + "-P", "openssl", + "-P", "patch", + "-P", "perl", + "-P", "python", + "-P", "python3", + "-P", "pkg-config", + "-P", "rsync", + "-P", "unzip", + "-P", "vim", + "-P", "wget", + "-P", "zip", + "-P", "perl-Archive-Zip", + "-P", "perl-Font-TTF", + "-P", "perl-IO-String", + "--no-admin", + "--quiet-mode" +] +CYGWIN_BAT_PATH = 'C:/cygwin64/Cygwin.bat' +LO_BUILD_PATH = os.path.normpath(os.path.join(os.getcwd(), '../../../LO')) + +CONFIGURE_PARAMS = [f'--with-external-tar="{LO_BUILD_PATH}/sources/lo-externalsrc"', + f'--with-junit="{LO_BUILD_PATH}/sources/junit-4.10.jar"', + f'--with-ant-home="{LO_BUILD_PATH}/sources/apache-ant-1.9.5"', + "--enable-pch", + "--disable-ccache", + "--with-visual-studio=2022", + "--enable-dbgutil", + '--enable-symbols="all"'] + + +def create_folder_safe(folder_path): + if not os.path.exists(folder_path): + try: + os.mkdir(folder_path) + print(f"Folder '{folder_path}' created successfully.") + except Exception as e: + print(f"Error creating folder: {e}") + else: + print(f"Folder '{folder_path}' already exists.") + + +class CygwinRunner: + @staticmethod + def process_commands(commands: list[str]): + proc = subprocess.Popen( + [CYGWIN_BAT_PATH], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, + shell=True, creationflags=subprocess.CREATE_NEW_CONSOLE + ) + + def read_stdout(): + for line in iter(proc.stdout.readline, ''): + sys.stdout.write(line) + proc.stdout.close() + + def read_stderr(): + for line in iter(proc.stderr.readline, ''): + sys.stderr.write(line) + proc.stderr.close() + + stdout_thread = threading.Thread(target=read_stdout) + stderr_thread = threading.Thread(target=read_stderr) + + stdout_thread.start() + stderr_thread.start() + + for command in commands: + proc.stdin.write(command + '\n') + proc.stdin.flush() + + stdout_thread.join() + stderr_thread.join() + + proc.stdin.close() + + proc.wait() + + @staticmethod + def install_gnu_make(): + base.print_info("install_gnu_make") + commands = ['mkdir -p /opt/lo/bin', + 'cd /opt/lo/bin', + 'wget https://dev-www.libreoffice.org/bin/cygwin/make-4.2.1-msvc.exe', + 'cp make-4.2.1-msvc.exe make', + 'chmod +x make', + 'exit'] + CygwinRunner.process_commands(commands) + + @staticmethod + def install_ant_and_junit(): + base.print_info("install_ant_and_junit") + commands = [f'mkdir -p {LO_BUILD_PATH}/sources', + f'cd {LO_BUILD_PATH}/sources', + 'wget https://archive.apache.org/dist/ant/binaries/apache-ant-1.9.5-bin.tar.bz2', + 'tar -xjvf apache-ant-1.9.5-bin.tar.bz2', + 'wget http://downloads.sourceforge.net/project/junit/junit/4.10/junit-4.10.jar', + 'exit'] + CygwinRunner.process_commands(commands) + + @staticmethod + def clone_lo(): + base.print_info("clone_lo") + commands = [f'cd {LO_BUILD_PATH}/sources', + 'git clone https://gerrit.libreoffice.org/core libo-core', + 'exit'] + CygwinRunner.process_commands(commands) + + @staticmethod + def build_autogen(): + base.print_info("build_autogen") + commands = [f'cd {LO_BUILD_PATH}/sources/libo-core', + f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}", + 'exit'] + CygwinRunner.process_commands(commands) + + @staticmethod + def run_make_build(): + base.print_info("run_make") + commands = [f'cd {LO_BUILD_PATH}/sources/libo-core', + f'/opt/lo/bin/make gb_COLOR=1', + "exit"] + CygwinRunner.process_commands(commands) + + @staticmethod + def build_vs_integration(): + base.print_info("run_make") + commands = [f'cd {LO_BUILD_PATH}/sources/libo-core', + f'/opt/lo/bin/make gb_COLOR=1 vs-ide-integration', + "exit"] + CygwinRunner.process_commands(commands) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description="options") + parser.add_argument("--lo_build_path", dest="build_path", default=f'../../../LO') + parser.add_argument("--disable_sln", dest="disable_sln", action=argparse.BooleanOptionalAction) + args = parser.parse_args() + + LO_BUILD_PATH = args.build_path + DISABLE_SLN = args.disable_sln + create_folder_safe(f'{LO_BUILD_PATH}/sources/lo-externalsrc') + create_folder_safe(CYGWIN_TEMP_PATH) + os.chdir(CYGWIN_TEMP_PATH) + base.download(CYGWIN_DOWNLOAD_URL, CYGWIN_SETUP_FILENAME) + subprocess.run([CYGWIN_SETUP_FILENAME] + CYGWIN_SETUP_PARAMS) + os.chdir('..') + shutil.rmtree(CYGWIN_TEMP_PATH) + CygwinRunner.install_gnu_make() + CygwinRunner.install_ant_and_junit() + CygwinRunner.clone_lo() + CygwinRunner.build_autogen() + CygwinRunner.run_make_build() + if not DISABLE_SLN: + CygwinRunner.build_vs_integration() diff --git a/scripts/develop/git_operations.py b/scripts/develop/git_operations.py new file mode 100644 index 0000000..ce346ae --- /dev/null +++ b/scripts/develop/git_operations.py @@ -0,0 +1,342 @@ +#!/usr/bin/env python3 +""" +Git Operations Script +Provides functionality to clone repositories and create branches. +Uses existing methods from base module and integrates with release.py patterns. +""" + +import sys +import argparse +import logging +from typing import Dict + +# Add parent directory to path to import modules +sys.path.append('../') +import base +import config +import dependence + +# Setup logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + + +class GitOperations: + """Class to handle git clone and branch creation using existing base module methods.""" + + def __init__(self, branding: str = "onlyoffice", base_branch: str = "develop", + branding_url: str = "ONLYOFFICE/onlyoffice.git", branch_name: str = None, + modules: str = "core desktop builder server mobile"): + """ + Initialize GitOperations with branding configuration and configure repositories. + + Args: + branding: Branding name (default: onlyoffice) + base_branch: Base branch to work from (default: develop) + branding_url: Relative path from git host base (default: ONLYOFFICE/onlyoffice.git) + branch_name: Name of the branch to create (required for branch operations) + modules: Modules to include (default: core desktop builder server mobile) + """ + self.branding = branding + self.base_branch = base_branch + self.branding_url = branding_url + self.branch_name = branch_name + self.modules = modules + self.work_dir = None + + # Configure repositories immediately + self._configure() + + # Update repositories after configuration + repositories = self.get_configured_repositories() + #base.update_repositories(repositories) + + def create_branch(self, branch_name: str, repo_dir: str = None) -> bool: + """ + Create a new branch using base.cmd_in_dir. + + Args: + branch_name: Name of the new branch + repo_dir: Repository directory (optional, uses current if not specified) + from_branch: Branch to create from (optional, uses current if not specified) + + Returns: + bool: True if successful, False otherwise + """ + work_dir = repo_dir or self.work_dir + logger.info(f"Creating branch '{branch_name}' in {work_dir}") + + try: + # Create and checkout new branch + base.cmd_in_dir(work_dir, "git", ["checkout", "-b", branch_name], True) + logger.info(f"Successfully created branch: {branch_name}") + return True + except SystemExit: + logger.error(f"Failed to create branch: {branch_name}") + return False + + def push_branch(self, branch_name: str, repo_dir: str = None, set_upstream: bool = True) -> bool: + """ + Push a branch to remote repository using base.cmd_in_dir. + + Args: + branch_name: Name of the branch to push + repo_dir: Repository directory (optional, uses current if not specified) + set_upstream: Whether to set upstream tracking (default: True) + + Returns: + bool: True if successful, False otherwise + """ + work_dir = repo_dir or self.work_dir + logger.info(f"Pushing branch '{branch_name}' in {work_dir}") + + try: + if set_upstream: + # Push branch and set upstream tracking + base.cmd_in_dir(work_dir, "git", ["push", "-u", "origin", branch_name], True) + else: + # Just push the branch + base.cmd_in_dir(work_dir, "git", ["push", "origin", branch_name], True) + + logger.info(f"Successfully pushed branch: {branch_name}") + return True + except SystemExit: + logger.error(f"Failed to push branch: {branch_name}") + return False + + def _configure(self) -> bool: + """ + Configure repositories using existing configure.py pattern from release.py. + + Returns: + bool: True if successful, False otherwise + """ + logger.info(f"Configuring and cloning repositories for branch: {self.base_branch}") + + try: + # Get build_tools origin and construct branding URL from git host base + build_tools_origin = base.git_get_origin() + # Extract git host base (everything up to the host) + # For https://github.com/ORG/build_tools.git -> https://github.com/ + # For git@github.com:ORG/build_tools.git -> git@github.com: + if '://' in build_tools_origin: # HTTPS + host_base = build_tools_origin.split('/', 3)[0] + '//' + build_tools_origin.split('/', 3)[2] + '/' + else: # SSH + host_base = build_tools_origin.split(':', 1)[0] + ':' + + branding_url = host_base + self.branding_url + + logger.info(f"Build tools origin: {build_tools_origin}") + logger.info(f"Git host base: {host_base}") + logger.info(f"Using branding URL: {branding_url}") + + # Check platform and dependencies like in release.py + platform = base.host_platform() + if platform == "windows": + dependence.check_pythonPath() + dependence.check_gitPath() + + # Run configure.py like in release.py + configure_args = [ + 'configure.py', + '--branding', self.branding, + '--branding-url', branding_url, + '--branch', self.base_branch, + '--module', self.modules, + '--update', '1', + '--clean', '0' + ] + + base.cmd_in_dir('../../', 'python', configure_args) + + # Parse configuration like in release.py + config.parse() + + # Update build_tools repository + base.git_update('build_tools') + + # Update branding repository + base.git_update(self.branding) + + # Correct defaults (the branding repo is already updated) + config.parse_defaults() + + logger.info("Successfully configured") + return True + + except Exception as e: + logger.error(f"Failed to configure and clone: {e}") + return False + + def get_configured_repositories(self) -> Dict: + """Get repositories using existing base.get_repositories() pattern from release.py.""" + repositories = base.get_repositories() + repositories['core-ext'] = [True, False] + repositories['build_tools'] = [True, False] + repositories[self.branding] = [True, False] + return repositories + + def _iterate_repositories(self, operation_func, operation_name: str) -> bool: + """ + Iterate over all repositories and apply the given operation function. + + Args: + operation_func: Function to apply to each repository (takes repo_name and repo_path) + operation_name: Name of the operation for logging + + Returns: + bool: True if at least one operation succeeded, False otherwise + """ + repositories = self.get_configured_repositories() + success_count = 0 + total_count = len(repositories) + + for repo_name in repositories: + current_dir = repositories[repo_name][1] + repo_path = f"../../../{repo_name}" if current_dir == False else current_dir + + if base.is_dir(repo_path): + if operation_func(repo_name, repo_path): + success_count += 1 + else: + logger.warning(f"✗ Failed to {operation_name} in {repo_name}") + else: + logger.warning(f"Repository {repo_name} not found at {repo_path}") + + logger.info(f"{operation_name.capitalize()} completed in {success_count}/{total_count} repositories") + return success_count > 0 + + def delete_branch(self, branch_name: str, repo_dir: str = None, force: bool = False) -> bool: + """ + Delete a branch using base.cmd_in_dir. + + Args: + branch_name: Name of the branch to delete + repo_dir: Repository directory (optional, uses current if not specified) + force: Whether to force delete the branch (default: False) + + Returns: + bool: True if successful, False otherwise + """ + work_dir = repo_dir or self.work_dir + logger.info(f"Deleting branch '{branch_name}' in {work_dir}") + + try: + # Switch to base branch first to avoid deleting current branch + base.cmd_in_dir(work_dir, "git", ["checkout", self.base_branch], True) + + # Delete local branch + delete_flag = "-D" if force else "-d" + base.cmd_in_dir(work_dir, "git", ["branch", delete_flag, branch_name], True) + logger.info(f"Successfully deleted local branch: {branch_name}") + + # Delete remote branch + try: + base.cmd_in_dir(work_dir, "git", ["push", "origin", "--delete", branch_name], True) + logger.info(f"Successfully deleted remote branch: {branch_name}") + except SystemExit: + logger.warning(f"Failed to delete remote branch: {branch_name} (may not exist)") + + return True + except SystemExit: + logger.error(f"Failed to delete branch: {branch_name}") + return False + + def create_branches(self) -> bool: + """ + Create a branch with the given name in all repositories. + + Returns: + bool: True if successful, False otherwise + """ + logger.info(f"Creating branch '{self.branch_name}' in all repositories") + + def create_and_push_branch(repo_name: str, repo_path: str) -> bool: + """Create and push branch for a single repository.""" + if self.create_branch(self.branch_name, repo_path): + logger.info(f"✓ Created branch '{self.branch_name}' in {repo_name}") + # Push the created branch + if self.push_branch(self.branch_name, repo_path): + logger.info(f"✓ Pushed branch '{self.branch_name}' in {repo_name}") + return True + else: + logger.warning(f"✗ Failed to push branch '{self.branch_name}' in {repo_name}") + return False + else: + logger.warning(f"✗ Failed to create branch '{self.branch_name}' in {repo_name}") + return False + + try: + return self._iterate_repositories(create_and_push_branch, f"create and push branch '{self.branch_name}'") + except Exception as e: + logger.error(f"Failed to create branch in all repositories: {e}") + return False + + def remove_branches(self, force: bool = False) -> bool: + """ + Remove a branch with the given name from all repositories. + + Args: + force: Whether to force delete the branch (default: False) + + Returns: + bool: True if successful, False otherwise + """ + logger.info(f"Removing branch '{self.branch_name}' from all repositories") + + def delete_branch_operation(repo_name: str, repo_path: str) -> bool: + """Delete branch for a single repository.""" + if self.delete_branch(self.branch_name, repo_path, force): + logger.info(f"✓ Removed branch '{self.branch_name}' from {repo_name}") + return True + else: + logger.warning(f"✗ Failed to remove branch '{self.branch_name}' from {repo_name}") + return False + + try: + return self._iterate_repositories(delete_branch_operation, f"remove branch '{self.branch_name}'") + except Exception as e: + logger.error(f"Failed to remove branch from all repositories: {e}") + return False + + +def main(): + """Main function to handle command line arguments.""" + parser = argparse.ArgumentParser(description='Git Operations Tool - Create and Remove Branches') + subparsers = parser.add_subparsers(dest='command', help='Available commands') + + # Create branch command (configure, clone and create branch in all repositories) + branch_parser = subparsers.add_parser('create', help='Configure, clone and create branch in all repositories') + branch_parser.add_argument('branch_name', help='Name of the branch to create') + branch_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)') + branch_parser.add_argument('--branding', default='onlyoffice', help='Branding name') + branch_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)') + branch_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include') + + # Remove branch command (configure, clone and remove branch from all repositories) + remove_parser = subparsers.add_parser('remove', help='Configure, clone and remove branch from all repositories') + remove_parser.add_argument('branch_name', help='Name of the branch to remove') + remove_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)') + remove_parser.add_argument('--branding', default='onlyoffice', help='Branding name') + remove_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)') + remove_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include') + remove_parser.add_argument('--force', action='store_true', help='Force delete the branch (equivalent to git branch -D)') + + args = parser.parse_args() + + if not args.command: + parser.print_help() + return + + git_ops = GitOperations(args.branding, args.base_branch, args.branding_url, args.branch_name, args.modules) + + if args.command == 'create': + success = git_ops.create_branches() + sys.exit(0 if success else 1) + elif args.command == 'remove': + success = git_ops.remove_branches(args.force) + sys.exit(0 if success else 1) + + +if __name__ == '__main__': + main() diff --git a/scripts/develop/run_server.py b/scripts/develop/run_server.py index c742888..8db3517 100644 --- a/scripts/develop/run_server.py +++ b/scripts/develop/run_server.py @@ -8,6 +8,13 @@ import dependence import traceback import develop +# if (sys.version_info[0] >= 3): + # unicode = str + +# host_platform = base.host_platform() +# if (host_platform == 'windows'): + # import libwindows + base_dir = base.get_script_dir(__file__) def install_module(path): @@ -21,11 +28,18 @@ def find_rabbitmqctl(base_path): return base.find_file(os.path.join(base_path, 'RabbitMQ Server'), 'rabbitmqctl.bat') def restart_win_rabbit(): + # todo maybe restarting is not relevant after many years and versions? base.print_info('restart RabbitMQ node to prevent "Erl.exe high CPU usage every Monday morning on Windows" https://groups.google.com/forum/#!topic/rabbitmq-users/myl74gsYyYg') rabbitmqctl = find_rabbitmqctl(os.environ['PROGRAMW6432']) or find_rabbitmqctl(os.environ['ProgramFiles(x86)']) if rabbitmqctl is not None: - base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app']) - base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app']) + try: + # code = libwindows.sudo(unicode(sys.executable), ['net', 'stop', 'rabbitmq']) + # code = libwindows.sudo(unicode(sys.executable), ['net', 'start', 'rabbitmq']) + base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app']) + base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app']) + except SystemExit: + base.print_error('Perhaps Erlang cookies are different: Replace %userprofile%/.erlang.cookie with %WINDIR%/System32/config/systemprofile/.erlang.cookie') + raise else: base.print_info('Missing rabbitmqctl.bat') diff --git a/scripts/license_checker/config.json b/scripts/license_checker/config.json index 6eb9cb5..f6c0237 100644 --- a/scripts/license_checker/config.json +++ b/scripts/license_checker/config.json @@ -198,6 +198,11 @@ "editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h", "editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m" ] + }, + { + "dir": "editors-webview-ios", + "fileExtensions": [".swift", ".xcconfig"], + "licensePath": "header.license", } ] } \ No newline at end of file diff --git a/scripts/license_checker/header.license b/scripts/license_checker/header.license index a2faf53..6286700 100644 --- a/scripts/license_checker/header.license +++ b/scripts/license_checker/header.license @@ -1,5 +1,5 @@ /* - * (c) Copyright Ascensio System SIA 2010-2024 + * (c) Copyright Ascensio System SIA 2010-2025 * * This program is a free software product. You can redistribute it and/or * modify it under the terms of the GNU Affero General Public License (AGPL) diff --git a/scripts/package_desktop.py b/scripts/package_desktop.py index 9545abc..8ec8d16 100644 --- a/scripts/package_desktop.py +++ b/scripts/package_desktop.py @@ -35,7 +35,7 @@ def s3_upload(files, dst): # def make_windows(): - global package_name, package_version, arch, xp, suffix + global package_name, package_version, arch, xp utils.set_cwd("desktop-apps\\win-linux\\package\\windows") package_name = branding.desktop_package_name @@ -47,7 +47,6 @@ def make_windows(): "windows_x86_xp": "x86" }[common.platform] xp = common.platform.endswith("_xp") - suffix = arch + ("-xp" if xp else "") if common.clean: utils.log_h2("desktop clean") @@ -60,10 +59,22 @@ def make_windows(): utils.delete_files("data\\*.exe") make_prepare() - make_zip() - make_inno() - make_advinst() - make_online() + if not xp: + make_zip() + make_zip("commercial") + make_inno() + make_inno("commercial") + if branding.onlyoffice: + make_inno("standalone") + make_inno("update") + make_advinst() + make_advinst("commercial") + else: + make_zip("xp") + make_inno("xp") + # Disable build online installer + # if common.platform == "windows_x86_xp": + # make_online() utils.set_cwd(common.workspace_dir) return @@ -83,101 +94,82 @@ def make_prepare(): utils.set_summary("desktop prepare", ret) return -def make_zip(): - zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix) +def make_zip(edition = "opensource"): + if edition == "commercial": zip_file = "%s-Commercial-%s-%s.zip" + elif edition == "xp": zip_file = "%s-XP-%s-%s.zip" + else: zip_file = "%s-%s-%s.zip" + zip_file = zip_file % (package_name, package_version, arch) args = [ "-Version", package_version, - "-Arch", arch + "-Arch", arch, + "-Target", edition ] - if xp: - args += ["-Target", "xp"] # if common.sign: # args += ["-Sign"] - utils.log_h2("desktop zip build") + utils.log_h2("desktop zip " + edition + " build") ret = utils.ps1("make_zip.ps1", args, verbose=True) - utils.set_summary("desktop zip build", ret) + utils.set_summary("desktop zip " + edition + " build", ret) if common.deploy and ret: - utils.log_h2("desktop zip deploy") + utils.log_h2("desktop zip " + edition + " deploy") ret = s3_upload([zip_file], "desktop/win/generic/") - utils.set_summary("desktop zip deploy", ret) + utils.set_summary("desktop zip " + edition + " deploy", ret) return -def make_inno(): - inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix) - inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix) - inno_update_file = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix) - update_wrapper = not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper) +def make_inno(edition = "opensource"): + if edition == "commercial": inno_file = "%s-Commercial-%s-%s.exe" + elif edition == "standalone": inno_file = "%s-Standalone-%s-%s.exe" + elif edition == "update": inno_file = "%s-Update-%s-%s.exe" + elif edition == "xp": inno_file = "%s-XP-%s-%s.exe" + else: inno_file = "%s-%s-%s.exe" + inno_file = inno_file % (package_name, package_version, arch) args = [ "-Version", package_version, - "-Arch", arch + "-Arch", arch, + "-Target", edition ] if common.sign: args += ["-Sign"] - utils.log_h2("desktop inno build") if xp: - ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp"], verbose=True) - else: - ret = utils.ps1("make_inno.ps1", args, verbose=True) - utils.set_summary("desktop inno build", ret) + args += ["-TimestampServer", "http://timestamp.comodoca.com/authenticode"] - if branding.onlyoffice and not xp: - utils.log_h2("desktop inno standalone") - ret = utils.ps1("make_inno.ps1", args + ["-Target", "standalone"], verbose=True) - utils.set_summary("desktop inno standalone build", ret) - - if update_wrapper and not xp: - utils.log_h2("desktop inno update build") - ret = utils.ps1("make_inno.ps1", args + ["-Target", "update"], verbose=True) - utils.set_summary("desktop inno update build", ret) - - if common.deploy: - utils.log_h2("desktop inno deploy") - ret = s3_upload([inno_file], "desktop/win/inno/") - utils.set_summary("desktop inno deploy", ret) - - if branding.onlyoffice and not xp: - utils.log_h2("desktop inno standalone deploy") - ret = s3_upload([inno_sa_file], "desktop/win/inno/") - utils.set_summary("desktop inno standalone deploy", ret) - - utils.log_h2("desktop inno update deploy") - if utils.is_file(inno_update_file): - ret = s3_upload([inno_update_file], "desktop/win/inno/") - elif utils.is_file(inno_file): - ret = s3_upload([inno_file], "desktop/win/inno/" + inno_update_file) - else: - ret = False - utils.set_summary("desktop inno update deploy", ret) - return - -def make_advinst(): - if not common.platform in ["windows_x64", "windows_x86"]: - return - advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix) - args = [ - "-Version", package_version, - "-Arch", arch - ] - if common.sign: - args += ["-Sign"] - - utils.log_h2("desktop advinst build") - ret = utils.ps1("make_advinst.ps1", args, verbose=True) - utils.set_summary("desktop advinst build", ret) + utils.log_h2("desktop inno " + edition + " build") + ret = utils.ps1("make_inno.ps1", args, verbose=True) + utils.set_summary("desktop inno " + edition + " build", ret) if common.deploy and ret: - utils.log_h2("desktop advinst deploy") + utils.log_h2("desktop inno " + edition + " deploy") + ret = s3_upload([inno_file], "desktop/win/inno/") + utils.set_summary("desktop inno " + edition + " deploy", ret) + return + +def make_advinst(edition = "opensource"): + if edition == "commercial": advinst_file = "%s-Commercial-%s-%s.msi" + else: advinst_file = "%s-%s-%s.msi" + advinst_file = advinst_file % (package_name, package_version, arch) + args = [ + "-Version", package_version, + "-Arch", arch, + "-Target", edition + ] + if common.sign: + args += ["-Sign"] + + utils.log_h2("desktop advinst " + edition + " build") + ret = utils.ps1("make_advinst.ps1", args, verbose=True) + utils.set_summary("desktop advinst " + edition + " build", ret) + + if common.deploy and ret: + utils.log_h2("desktop advinst " + edition + " deploy") ret = s3_upload([advinst_file], "desktop/win/advinst/") - utils.set_summary("desktop advinst deploy", ret) + utils.set_summary("desktop advinst " + edition + " deploy", ret) return def make_online(): - if not common.platform in ["windows_x86_xp"]: - return - online_file = "%s-%s-%s.exe" % ("OnlineInstaller", package_version, suffix) + online_file = utils.glob_file("OnlineInstaller-" + package_version + "*.exe") + utils.log_h2("desktop online installer build") ret = utils.is_file(online_file) utils.set_summary("desktop online installer build", ret) @@ -333,20 +325,23 @@ def make_sparkle_updates(): def make_linux(): utils.set_cwd("desktop-apps/win-linux/package/linux") - utils.log_h2("desktop build") - make_args = [t["make"] for t in branding.desktop_make_targets] - if common.platform == "linux_aarch64": - make_args += ["-e", "UNAME_M=aarch64"] - if not branding.onlyoffice: - make_args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"] - ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True) - utils.set_summary("desktop build", ret) + for edition in ["opensource", "commercial"]: + utils.log_h2("desktop " + edition + " build") + make_args = [t["make"] for t in branding.desktop_make_targets] + if edition == "commercial": + make_args += ["-e", "PACKAGE_EDITION=commercial"] + if common.platform == "linux_aarch64": + make_args += ["-e", "UNAME_M=aarch64"] + if not branding.onlyoffice: + make_args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"] + ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True) + utils.set_summary("desktop " + edition + " build", ret) - if common.deploy: - for t in branding.desktop_make_targets: - utils.log_h2("desktop " + t["make"] + " deploy") - ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) - utils.set_summary("desktop " + t["make"] + " deploy", ret) + if common.deploy: + for t in branding.desktop_make_targets: + utils.log_h2("desktop " + edition + " " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("desktop " + edition + " " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/qmake.py b/scripts/qmake.py index 1fddf9b..6598429 100644 --- a/scripts/qmake.py +++ b/scripts/qmake.py @@ -25,7 +25,7 @@ def get_j_num(): def check_support_platform(platform): qt_dir = base.qt_setup(platform) - if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"): + if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe") and not base.is_file(qt_dir + "/bin/qmake.bat"): return False return True @@ -91,6 +91,7 @@ def make(platform, project, qmake_config_addon="", is_no_errors=False): build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon qmake_app = qt_dir + "/bin/qmake" + # non windows platform if not base.is_windows(): if base.is_file(qt_dir + "/onlyoffice_qt.conf"): @@ -122,12 +123,17 @@ def make(platform, project, qmake_config_addon="", is_no_errors=False): if ("" != qmake_addon_string): qmake_addon_string = " " + qmake_addon_string + vcvarsall_arch = ("x86" if base.platform_is_32(platform) else "x64") + qmake_env_addon = base.get_env("QT_QMAKE_ADDON") + if (qmake_env_addon != ""): + qmake_env_addon += " " + qmake_bat = [] - qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64")) + qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + vcvarsall_arch) qmake_addon_string = "" if ("" != config.option("qmake_addon")): qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon]) - qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string) + qmake_bat.append("call \"" + qmake_app + "\" -nocache " + qmake_env_addon + file_pro + config_params_string + qmake_addon_string) if ("1" == config.option("clean")): qmake_bat.append("call nmake " + " ".join(clean_params)) qmake_bat.append("call nmake " + " ".join(distclean_params)) diff --git a/scripts/sdkjs_common/generate_builder_intarface.py b/scripts/sdkjs_common/generate_builder_intarface.py index 3bf1e42..ede609d 100644 --- a/scripts/sdkjs_common/generate_builder_intarface.py +++ b/scripts/sdkjs_common/generate_builder_intarface.py @@ -13,7 +13,7 @@ def writeFile(path, content): if (os.path.isfile(path)): os.remove(path) - with open(path, "w") as file: + with open(path, "w", encoding='utf-8') as file: file.write(content) return @@ -160,6 +160,12 @@ class EditorApi(object): editors_support = decoration[index_type_editors:index_type_editors_end] if -1 == editors_support.find(self.type): return + + decoration = "\n".join( + line for line in decoration.splitlines() + if "@typeofeditors" not in line and "@see" not in line + ) + # optimizations for first file if 0 == self.numfile: self.records.append(decoration + "\n" + code + "\n") @@ -208,7 +214,7 @@ if __name__ == "__main__": type=str, help="Destination directory for the generated documentation", nargs='?', # Indicates the argument is optional - default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value + default="../../../web-apps/vendor/monaco/libs/" # Default value ) args = parser.parse_args() @@ -217,7 +223,7 @@ if __name__ == "__main__": if True == os.path.isdir(args.destination): shutil.rmtree(args.destination, ignore_errors=True) os.mkdir(args.destination) - convert_to_interface(["word/apiBuilder.js"], "word") + convert_to_interface(["word/apiBuilder.js", "../sdkjs-forms/apiBuilder.js"], "word") convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide") convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell") os.chdir(old_cur) diff --git a/scripts/sdkjs_common/jsdoc/README.md b/scripts/sdkjs_common/jsdoc/README.md index ba77eaf..9432882 100644 --- a/scripts/sdkjs_common/jsdoc/README.md +++ b/scripts/sdkjs_common/jsdoc/README.md @@ -1,7 +1,14 @@ - # Documentation Generation Guide -This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors. +This guide explains how to generate documentation for Onlyoffice Builder +and Plugins (Methods/Events) API using the following Python scripts: + +- `office-api/generate_docs_json.py` +- `office-api/generate_docs_md.py` +- `plugins/generate_docs_methods_json.py` +- `plugins/generate_docs_methods_md.py` +- `plugins/generate_docs_events_json.py` +- `plugins/generate_docs_events_md.py` ## Requirements @@ -20,61 +27,112 @@ npm install ## Scripts Overview -### `generate_docs_json.py` +### `office-api/generate_docs_json.py` This script generates JSON documentation based on the `apiBuilder.js` files. - **Usage**: + ```bash python generate_docs_json.py output_path ``` - **Parameters**: - - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api`. + - `output_path` (optional): The directory where the JSON documentation + will be saved. If not specified, the default path is + `../../../../office-js-api-declarations/office-js-api`. -### `generate_docs_plugins_json.py` - -This script generates JSON documentation based on the `api_plugins.js` files. - -- **Usage**: - ```bash - python generate_docs_plugins_json.py output_path - ``` - -- **Parameters**: - - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api-plugins`. - -### `generate_docs_md.py` +### `office-api/generate_docs_md.py` This script generates Markdown documentation from the `apiBuilder.js` files. - **Usage**: + ```bash python generate_docs_md.py output_path ``` - **Parameters**: - - `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `../../../../office-js-api/`. + - `output_path` (optional): The directory where the Markdown documentation + will be saved. If not specified, the default path is + `../../../../office-js-api/`. + +### `plugins/generate_docs_methods_json.py` + +This script generates JSON documentation based on the `api_plugins.js` files. + +- **Usage**: + + ```bash + python generate_docs_methods_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation + will be saved. If not specified, the default path is + `../../../../office-js-api-declarations/office-js-api-plugins`. + +### `plugins/generate_docs_events_json.py` + +This script generates JSON documentation based on the `plugin-events.js` files. + +- **Usage**: + + ```bash + python generate_docs_events_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation + will be saved. If not specified, the default path is + `../../../../office-js-api-declarations/office-js-api-plugins`. + +### `plugins/generate_docs_methods_md.py` + +This script generates Markdown documentation from the `api_plugins.js` files. + +- **Usage**: + + ```bash + python generate_docs_methods_md.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the Markdown documentation + will be saved. If not specified, the default path is + `../../../../office-js-api/`. + +### `plugins/generate_docs_events_md.py` + +This script generates Markdown documentation from the `plugin-events.js` files. + +- **Usage**: + + ```bash + python generate_docs_events_md.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the Markdown documentation + will be saved. If not specified, the default path is + `../../../../office-js-api/`. ## Example To generate JSON documentation with the default output path: + ```bash python generate_docs_json.py /path/to/save/json ``` -To generate JSON documentation with the default output path: -```bash -python generate_docs_plugins_json.py /path/to/save/json -``` - To generate Markdown documentation and specify a custom output path: + ```bash python generate_docs_md.py /path/to/save/markdown ``` ## Notes -- Make sure to have all necessary permissions to run these scripts and write to the specified directories. +- Make sure to have all necessary permissions to run these scripts and write + to the specified directories. - The output directories will be created if they do not exist. - diff --git a/scripts/sdkjs_common/jsdoc/config/builder/slide.json b/scripts/sdkjs_common/jsdoc/config/builder/slide.json deleted file mode 100644 index 96b5dbf..0000000 --- a/scripts/sdkjs_common/jsdoc/config/builder/slide.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "source": { - "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"] - }, - "plugins": ["./correct_doclets.js"], - "opts": { - "destination": "./out", - "recurse": true, - "encoding": "utf8" - }, - "templates": { - "json": { - "pretty": true - } - } -} diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md_site.py b/scripts/sdkjs_common/jsdoc/generate_docs_md_site.py deleted file mode 100644 index 147f72f..0000000 --- a/scripts/sdkjs_common/jsdoc/generate_docs_md_site.py +++ /dev/null @@ -1,586 +0,0 @@ -import os -import json -import re -import shutil -import argparse -import generate_docs_json - -# Configuration files -editors = { - "word": "text-document-api", - "cell": "spreadsheet-api", - "slide": "presentation-api", - "forms": "form-api" -} - -missing_examples = [] -used_enumerations = set() - -cur_editor_name = None - -_CODE_BLOCK_RE = re.compile(r'(```.*?```)', re.DOTALL) -_QSTRING_RE = re.compile(r'(["\'])(.*?)(? 1 else None - - if ref.startswith('/'): - # Handle reserved links using mapping - if ref in reserved_links: - url = reserved_links[ref] - display_text = label if label else ref - return f"[{display_text}]({url})" - else: - # If the link is not in the mapping, return the original construction - return match.group(0) - elif ref.startswith("global#"): - # Handle links to typedef (similar logic as before) - typedef_name = ref.split("#")[1] - used_enumerations.add(typedef_name) - display_text = label if label else typedef_name - return f"[{display_text}]({root}Enumeration/{typedef_name}.md)" - else: - # Handle links to class methods like ClassName#MethodName - try: - class_name, method_name = ref.split("#") - except ValueError: - return match.group(0) - display_text = label if label else ref # Keep the full notation, e.g., "Api#CreateSlide" - return f"[{display_text}]({root}{class_name}/Methods/{method_name}.md)" - - return re.sub(r'{@link\s+([^}]+)}', replace_link, text) - -def correct_description(string, root=''): - """ - Cleans or transforms specific tags in the doclet description: - - => ** (bold text) - - ... => 💡 ... - - {@link ...} is replaced with a Markdown link - - If the description is missing, returns a default value. - - All '\r' characters are replaced with '\n'. - """ - if string is None: - return 'No description provided.' - - # Line breaks - string = string.replace('\r', '\\\n') - - # Replace tags with Markdown bold formatting - string = re.sub(r'', '-**', string) - string = re.sub(r'', '**', string) - - # Replace tags with an icon and text - string = re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) - - # Process {@link ...} constructions - string = process_link_tags(string, root) - - return string - -def correct_default_value(value, enumerations, classes): - if value is None or value == '': - return '' - - if isinstance(value, bool): - value = "true" if value else "false" - else: - value = str(value) - - return generate_data_types_markdown([value], enumerations, classes) - -def remove_line_breaks(string): - return re.sub(r'[\r\n]+', ' ', string) - -# Convert Array. => T[] (including nested arrays). -def convert_jsdoc_array_to_ts(type_str: str) -> str: - """ - Recursively replaces 'Array.' with 'T[]', - handling nested arrays like 'Array.>' => 'string[][]'. - """ - pattern = re.compile(r'Array\.<([^>]+)>') - - while True: - match = pattern.search(type_str) - if not match: - break - - inner_type = match.group(1).strip() - # Recursively convert inner parts - inner_type = convert_jsdoc_array_to_ts(inner_type) - - # Replace the outer Array.<...> with ...[] - type_str = ( - type_str[:match.start()] - + f"{inner_type}[]" - + type_str[match.end():] - ) - - return type_str - -def escape_text_outside_code_blocks(markdown: str) -> str: - """ - Splits content by fenced code blocks, escapes MDX-unsafe characters - (<, >, {, }) only in the text outside those code blocks. - """ - # A regex to capture fenced code blocks with ``` - parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL) - - # Even indices (0, 2, 4, ...) are outside code blocks, - # odd indices (1, 3, 5, ...) are actual code blocks. - for i in range(0, len(parts), 2): - text = (parts[i] - .replace('<', '<') - .replace('>', '>') - .replace('{', '{') - .replace('}', '}')) - parts[i] = escape_brackets_in_quotes(text) - - return "".join(parts) - -def escape_brackets_in_quotes(text: str) -> str: - return re.sub( - r"(['\"])(.*?)(? str: - """ - Given a TypeScript-like type (e.g. "Drawing[][]"), return the - 'base' portion by stripping trailing "[]". For "Drawing[][]", - returns "Drawing". For "Array.", you'd convert it first - to "Drawing[]" then return "Drawing". - """ - while ts_type.endswith('[]'): - ts_type = ts_type[:-2] - return ts_type - -def generate_data_types_markdown(types, enumerations, classes, root='../../'): - """ - 1) Converts each type from JSDoc (e.g., Array.) to T[]. - 2) Processes union types by splitting them using '|'. - 3) Supports multidimensional arrays, e.g., (string|ApiRange|number)[]. - 4) If the base type matches the name of an enumeration or class, generates a link. - 5) The final types are joined using " | ". - """ - # Convert each type from JSDoc format to TypeScript format (e.g., T[]) - converted = [convert_jsdoc_array_to_ts(t) for t in types] - - # Set of primitive types - primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "{}", "date"} - - def is_primitive(type): - if (type.lower() in primitive_types or - (type.startswith('"') and type.endswith('"')) or - (type.startswith("'") and type.endswith("'")) or - type.replace('.', '', 1).isdigit() or - (type.startswith('-') and type[1:].replace('.', '', 1).isdigit())): - return True - return False - - def link_if_known(ts_type): - ts_type = ts_type.strip() - # Count the number of array dimensions, e.g., "[][]" has 2 dimensions - array_dims = 0 - while ts_type.endswith("[]"): - array_dims += 1 - ts_type = ts_type[:-2].strip() - - # Process generic types, e.g., Object. - if ".<" in ts_type and ts_type.endswith(">"): - import re - m = re.match(r'^(.*?)\.<(.*)>$', ts_type) - if m: - base_part = m.group(1).strip() - generic_args_str = m.group(2).strip() - # Process the base part of the type - found = False - for enum in enumerations: - if enum['name'] == base_part: - used_enumerations.add(base_part) - base_result = f"[{base_part}]({root}Enumeration/{base_part}.md)" - found = True - break - if not found: - if base_part in classes: - base_result = f"[{base_part}]({root}{base_part}/{base_part}.md)" - elif is_primitive(base_part): - base_result = base_part - elif cur_editor_name == "forms": - base_result = f"[{base_part}]({root}../text-document-api/{base_part}/{base_part}.md)" - else: - print(f"Unknown type encountered: {base_part}") - base_result = base_part - # Split the generic parameters by commas and process each recursively - generic_args = [link_if_known(x) for x in generic_args_str.split(",")] - result = base_result + ".<" + ", ".join(generic_args) + ">" - result += "[]" * array_dims - return result - - # Process union types: if the type is enclosed in parentheses - if ts_type.startswith("(") and ts_type.endswith(")"): - inner = ts_type[1:-1].strip() - subtypes = [sub.strip() for sub in inner.split("|")] - if len(subtypes) == 1: - result = link_if_known(subtypes[0]) - else: - processed = [link_if_known(subtype) for subtype in subtypes] - result = "(" + " | ".join(processed) + ")" - result += "[]" * array_dims - return result - - # If not a generic or union type – process the base type - else: - base = ts_type - found = False - for enum in enumerations: - if enum['name'] == base: - used_enumerations.add(base) - result = f"[{base}]({root}Enumeration/{base}.md)" - found = True - break - if not found: - if base in classes: - result = f"[{base}]({root}{base}/{base}.md)" - elif is_primitive(base): - result = base - elif cur_editor_name == "forms": - result = f"[{base}]({root}../text-document-api/{base}/{base}.md)" - else: - print(f"Unknown type encountered: {base}") - result = base - result += "[]" * array_dims - return result - - # Apply link_if_known to each converted type - linked = [link_if_known(ts_t) for ts_t in converted] - - # Join results using " | " - param_types_md = r' | '.join(linked) - param_types_md = param_types_md.replace("|", r"\|") - - # Escape remaining angle brackets for generics - def replace_leftover_generics(match): - element = match.group(1).strip() - return f"<{element}>" - - param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md) - - return param_types_md - - -def generate_class_markdown(class_name, methods, properties, enumerations, classes): - content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" - - content += generate_properties_markdown(properties, enumerations, classes) - - content += "\n## Methods\n\n" - content += "| Method | Returns | Description |\n" - content += "| ------ | ------- | ----------- |\n" - - for method in sorted(methods, key=lambda m: m['name']): - method_name = method['name'] - - # Get the type of return values - returns = method.get('returns', []) - if returns: - return_type_list = returns[0].get('type', {}).get('names', []) - returns_markdown = generate_data_types_markdown(return_type_list, enumerations, classes, '../') - else: - returns_markdown = "None" - - # Processing the method description - description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../')) - - # Form a link to the method document - method_link = f"[{method_name}](./Methods/{method_name}.md)" - - content += f"| {method_link} | {returns_markdown} | {description} |\n" - - return escape_text_outside_code_blocks(content) - -def generate_method_markdown(method, enumerations, classes, example_editor_name): - method_name = method['name'] - description = method.get('description', 'No description provided.') - description = correct_description(description, '../../') - params = method.get('params', []) - returns = method.get('returns', []) - example = method.get('example', '') - memberof = method.get('memberof', '') - - content = f"# {method_name}\n\n{description}\n\n" - - # Syntax - param_list = ', '.join([param['name'] for param in params if '.' not in param['name']]) if params else '' - content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n" - if memberof: - content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n" - - # Parameters - content += "## Parameters\n\n" - if params: - content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n" - content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n" - for param in params: - param_name = param.get('name', 'Unnamed') - param_types = param.get('type', {}).get('names', []) if param.get('type') else [] - param_types_md = generate_data_types_markdown(param_types, enumerations, classes) - param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../../')) - param_required = "Required" if not param.get('optional') else "Optional" - param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) - - content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n" - else: - content += "This method doesn't have any parameters.\n" - - # Returns - content += "\n## Returns\n\n" - if returns: - return_type_list = returns[0].get('type', {}).get('names', []) - return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes) - content += return_type_md - else: - content += "This method doesn't return any data." - - # Example - if example: - # Separate comment and code, remove JS comments - if '```js' in example: - comment, code = example.split('```js', 1) - comment = remove_js_comments(comment) - content += f"\n\n## Example\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n" - else: - # If there's no triple-backtick structure, just show it as code - cleaned_example = remove_js_comments(example) - content += f"\n\n## Example\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n" - - return escape_text_outside_code_blocks(content) - -def generate_properties_markdown(properties, enumerations, classes, root='../'): - if properties is None: - return '' - - content = "## Properties\n\n" - content += "| Name | Type | Description |\n" - content += "| ---- | ---- | ----------- |\n" - - for prop in sorted(properties, key=lambda m: m['name']): - prop_name = prop['name'] - prop_description = prop.get('description', 'No description provided.') - prop_description = remove_line_breaks(correct_description(prop_description, root)) - prop_types = prop['type']['names'] if prop.get('type') else [] - param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root) - content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" - - # Escape outside code blocks - return escape_text_outside_code_blocks(content) - -def generate_enumeration_markdown(enumeration, enumerations, classes, example_editor_name): - enum_name = enumeration['name'] - - if enum_name not in used_enumerations: - return None - - description = enumeration.get('description', 'No description provided.') - description = correct_description(description, '../') - example = enumeration.get('example', '') - - content = f"# {enum_name}\n\n{description}\n\n" - - ptype = enumeration['type']['parsedType'] - if ptype['type'] == 'TypeUnion': - enum_empty = True # is empty enum - - content += "## Type\n\nEnumeration\n\n" - content += "## Values\n\n" - # Each top-level name in the union - for raw_t in enumeration['type']['names']: - ts_t = convert_jsdoc_array_to_ts(raw_t) - - # Attempt linking: we compare the raw type to enumerations/classes - if any(enum['name'] == raw_t for enum in enumerations): - used_enumerations.add(raw_t) - content += f"- [{ts_t}](../Enumeration/{raw_t}.md)\n" - enum_empty = False - elif raw_t in classes: - content += f"- [{ts_t}](../{raw_t}/{raw_t}.md)\n" - enum_empty = False - elif ts_t.find('Api') == -1: - content += f"- {ts_t}\n" - enum_empty = False - - if enum_empty == True: - return None - elif enumeration['properties'] is not None: - content += "## Type\n\nObject\n\n" - content += generate_properties_markdown(enumeration['properties'], enumerations, classes) - else: - content += "## Type\n\n" - # If it's not a union and has no properties, simply print the type(s). - types = enumeration['type']['names'] - t_md = generate_data_types_markdown(types, enumerations, classes) - content += t_md + "\n\n" - - # Example - if example: - if '```js' in example: - comment, code = example.split('```js', 1) - comment = remove_js_comments(comment) - content += f"\n\n## Example\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n" - else: - # If there's no triple-backtick structure - cleaned_example = remove_js_comments(example) - content += f"\n\n## Example\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n" - - return escape_text_outside_code_blocks(content) - -def process_doclets(data, output_dir, editor_name): - global cur_editor_name - cur_editor_name = editor_name - - classes = {} - classes_props = {} - enumerations = [] - editor_dir = os.path.join(output_dir, editors[editor_name]) - example_editor_name = 'editor-' - - if editor_name == 'word': - example_editor_name += 'docx' - elif editor_name == 'forms': - example_editor_name += 'pdf' - elif editor_name == 'slide': - example_editor_name += 'pptx' - elif editor_name == 'cell': - example_editor_name += 'xlsx' - - for doclet in data: - if doclet['kind'] == 'class': - class_name = doclet['name'] - if class_name: - if class_name not in classes: - classes[class_name] = [] - classes_props[class_name] = doclet.get('properties', None) - elif doclet['kind'] == 'function': - class_name = doclet.get('memberof') - if class_name: - if class_name not in classes: - classes[class_name] = [] - classes[class_name].append(doclet) - elif doclet['kind'] == 'typedef': - enumerations.append(doclet) - - # Process classes - for class_name, methods in classes.items(): - if (len(methods) == 0): - continue - - class_dir = os.path.join(editor_dir, class_name) - methods_dir = os.path.join(class_dir, 'Methods') - os.makedirs(methods_dir, exist_ok=True) - - # Write class file - class_content = generate_class_markdown( - class_name, - methods, - classes_props[class_name], - enumerations, - classes - ) - write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) - - # Write method files - for method in methods: - method_file_path = os.path.join(methods_dir, f"{method['name']}.md") - method_content = generate_method_markdown(method, enumerations, classes, example_editor_name) - write_markdown_file(method_file_path, method_content) - - if not method.get('example', ''): - missing_examples.append(os.path.relpath(method_file_path, output_dir)) - - # Process enumerations - enum_dir = os.path.join(editor_dir, 'Enumeration') - os.makedirs(enum_dir, exist_ok=True) - - # idle run - prev_used_count = -1 - while len(used_enumerations) != prev_used_count: - prev_used_count = len(used_enumerations) - for enum in [e for e in enumerations if e['name'] in used_enumerations]: - enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name) - - for enum in enumerations: - enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md") - enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name) - if enum_content is None: - continue - - write_markdown_file(enum_file_path, enum_content) - if not enum.get('example', ''): - missing_examples.append(os.path.relpath(enum_file_path, output_dir)) - -def generate(output_dir): - print('Generating Markdown documentation...') - - generate_docs_json.generate(output_dir + 'tmp_json', md=True) - for editor_name, folder_name in editors.items(): - input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json") - - editor_folder_path = os.path.join(output_dir, folder_name) - for folder_name in os.listdir(editor_folder_path): - folder_path_to_del = os.path.join(editor_folder_path, folder_name) - if os.path.isdir(folder_path_to_del): - shutil.rmtree(folder_path_to_del, ignore_errors=True) - - data = load_json(input_file) - used_enumerations.clear() - process_doclets(data, output_dir, editor_name) - - shutil.rmtree(output_dir + 'tmp_json') - print('Done') - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Generate documentation") - parser.add_argument( - "destination", - type=str, - help="Destination directory for the generated documentation", - nargs='?', # Indicates the argument is optional - default="../../../../api.onlyoffice.com/site/docs/office-api/usage-api/" # Default value - ) - args = parser.parse_args() - generate(args.destination) - print("START_MISSING_EXAMPLES") - print(",".join(missing_examples)) - print("END_MISSING_EXAMPLES") diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_md_site.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_md_site.py deleted file mode 100644 index 4de086a..0000000 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_md_site.py +++ /dev/null @@ -1,634 +0,0 @@ -import os -import json -import re -import shutil -import argparse -import generate_docs_plugins_json - -# Configuration files -editors = { - "word": "text-document-api", - "cell": "spreadsheet-api", - "slide": "presentation-api", - "forms": "form-api" -} - -missing_examples = [] -used_enumerations = set() - -cur_editor_name = None - -_CODE_BLOCK_RE = re.compile(r'(```.*?```)', re.DOTALL) -_QSTRING_RE = re.compile(r'(["\'])(.*?)(? 1 else None - - if ref.startswith('/'): - # Handle reserved links using mapping - if ref in reserved_links: - url = reserved_links[ref] - display_text = label if label else ref - return f"[{display_text}]({url})" - else: - # If the link is not in the mapping, return the original construction - return match.group(0) - elif ref.startswith("global#"): - # Handle links to typedef (similar logic as before) - typedef_name = ref.split("#")[1] - used_enumerations.add(typedef_name) - display_text = label if label else typedef_name - return f"[{display_text}]({root}Enumeration/{typedef_name}.md)" - else: - # Handle links to class methods like ClassName#MethodName - try: - class_name, method_name = ref.split("#") - except ValueError: - return match.group(0) - display_text = label if label else ref # Keep the full notation, e.g., "Api#CreateSlide" - return f"[{display_text}]({root}{class_name}/Methods/{method_name}.md)" - - return re.sub(r'{@link\s+([^}]+)}', replace_link, text) - -def correct_description(string, root=''): - """ - Cleans or transforms specific tags in the doclet description: - - => ** (bold text) - - ... => 💡 ... - - {@link ...} is replaced with a Markdown link - - If the description is missing, returns a default value. - - All '\r' characters are replaced with '\n'. - """ - if string is None: - return 'No description provided.' - - # Line breaks - string = string.replace('\r', '\\\n') - - # Replace tags with Markdown bold formatting - string = re.sub(r'', '-**', string) - string = re.sub(r'', '**', string) - - # Replace tags with an icon and text - string = re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) - - # Process {@link ...} constructions - string = process_link_tags(string, root) - - return string - -def correct_default_value(value, enumerations, classes): - if value is None or value == '': - return '' - - if value == True: - value = "true" - elif value == False: - value = "false" - else: - value = str(value) - - return generate_data_types_markdown([value], enumerations, classes) - -def remove_line_breaks(string): - return re.sub(r'[\r\n]+', ' ', string) - -# Convert Array. => T[] (including nested arrays). -def convert_jsdoc_array_to_ts(type_str: str) -> str: - """ - Recursively replaces 'Array.' with 'T[]', - handling nested arrays like 'Array.>' => 'string[][]'. - """ - pattern = re.compile(r'Array\.<([^>]+)>') - - while True: - match = pattern.search(type_str) - if not match: - break - - inner_type = match.group(1).strip() - # Recursively convert inner parts - inner_type = convert_jsdoc_array_to_ts(inner_type) - - # Replace the outer Array.<...> with ...[] - type_str = ( - type_str[:match.start()] - + f"{inner_type}[]" - + type_str[match.end():] - ) - - return type_str - -def escape_text_outside_code_blocks(markdown: str) -> str: - """ - Splits content by fenced code blocks, escapes MDX-unsafe characters - (<, >, {, }) only in the text outside those code blocks. - """ - # A regex to capture fenced code blocks with ``` - parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL) - - # Even indices (0, 2, 4, ...) are outside code blocks, - # odd indices (1, 3, 5, ...) are actual code blocks. - for i in range(0, len(parts), 2): - text = (parts[i] - .replace('<', '<') - .replace('>', '>') - .replace('{', '{') - .replace('}', '}')) - parts[i] = escape_brackets_in_quotes(text) - - return "".join(parts) - -def escape_brackets_in_quotes(text: str) -> str: - return re.sub( - r"(['\"])(.*?)(? str: - """ - Given a TypeScript-like type (e.g. "Drawing[][]"), return the - 'base' portion by stripping trailing "[]". For "Drawing[][]", - returns "Drawing". For "Array.", you'd convert it first - to "Drawing[]" then return "Drawing". - """ - while ts_type.endswith('[]'): - ts_type = ts_type[:-2] - return ts_type - -def generate_data_types_markdown(types, enumerations, classes, root='../../'): - """ - 1) Converts each type from JSDoc (e.g., Array.) to T[]. - 2) Processes union types by splitting them using '|'. - 3) Supports multidimensional arrays, e.g., (string|ApiRange|number)[]. - 4) If the base type matches the name of an enumeration or class, generates a link. - 5) The final types are joined using " | ". - """ - # Convert each type from JSDoc format to TypeScript format (e.g., T[]) - converted = [convert_jsdoc_array_to_ts(t) for t in types] - - # Set of primitive types - primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "{}"} - - def is_primitive(type): - if (type.lower() in primitive_types or - (type.startswith('"') and type.endswith('"')) or - (type.startswith("'") and type.endswith("'")) or - type.replace('.', '', 1).isdigit() or - (type.startswith('-') and type[1:].replace('.', '', 1).isdigit())): - return True - return False - - def link_if_known(ts_type): - ts_type = ts_type.strip() - # Count the number of array dimensions, e.g., "[][]" has 2 dimensions - array_dims = 0 - while ts_type.endswith("[]"): - array_dims += 1 - ts_type = ts_type[:-2].strip() - - # Process generic types, e.g., Object. - if ".<" in ts_type and ts_type.endswith(">"): - import re - m = re.match(r'^(.*?)\.<(.*)>$', ts_type) - if m: - base_part = m.group(1).strip() - generic_args_str = m.group(2).strip() - # Process the base part of the type - found = False - for enum in enumerations: - if enum['name'] == base_part: - used_enumerations.add(base_part) - base_result = f"[{base_part}]({root}Enumeration/{base_part}.md)" - found = True - break - if not found: - if base_part in classes: - base_result = f"[{base_part}]({root}{base_part}/{base_part}.md)" - elif is_primitive(base_part): - base_result = base_part - elif cur_editor_name == "forms": - base_result = f"[{base_part}]({root}../text-document-api/{base_part}/{base_part}.md)" - else: - print(f"Unknown type encountered: {base_part}") - base_result = base_part - # Split the generic parameters by commas and process each recursively - generic_args = [link_if_known(x) for x in generic_args_str.split(",")] - result = base_result + ".<" + ", ".join(generic_args) + ">" - result += "[]" * array_dims - return result - - # Process union types: if the type is enclosed in parentheses - if ts_type.startswith("(") and ts_type.endswith(")"): - inner = ts_type[1:-1].strip() - subtypes = [sub.strip() for sub in inner.split("|")] - if len(subtypes) == 1: - result = link_if_known(subtypes[0]) - else: - processed = [link_if_known(subtype) for subtype in subtypes] - result = "(" + " | ".join(processed) + ")" - result += "[]" * array_dims - return result - - # If not a generic or union type – process the base type - else: - base = ts_type - found = False - for enum in enumerations: - if enum['name'] == base: - used_enumerations.add(base) - result = f"[{base}]({root}Enumeration/{base}.md)" - found = True - break - if not found: - if base in classes: - result = f"[{base}]({root}{base}/{base}.md)" - elif is_primitive(base): - result = base - elif cur_editor_name == "forms": - result = f"[{base}]({root}../text-document-api/{base}/{base}.md)" - else: - print(f"Unknown type encountered: {base}") - result = base - result += "[]" * array_dims - return result - - # Apply link_if_known to each converted type - linked = [link_if_known(ts_t) for ts_t in converted] - - # Join results using " | " - param_types_md = r' | '.join(linked) - param_types_md = param_types_md.replace("|", r"\|") - - # Escape remaining angle brackets for generics - def replace_leftover_generics(match): - element = match.group(1).strip() - return f"<{element}>" - - param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md) - - return param_types_md - -def generate_class_markdown(class_name, methods, properties, enumerations, classes): - content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" - - content += generate_properties_markdown(properties, enumerations, classes) - - content += "\n## Methods\n\n" - content += "| Method | Returns | Description |\n" - content += "| ------ | ------- | ----------- |\n" - - for method in sorted(methods, key=lambda m: m['name']): - method_name = method['name'] - - # Get the type of return values - returns = method.get('returns', []) - if returns: - return_type_list = returns[0].get('type', {}).get('names', []) - returns_markdown = generate_data_types_markdown(return_type_list, enumerations, classes, '../') - else: - returns_markdown = "None" - - # Processing the method description - description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../')) - - # Form a link to the method document - method_link = f"[{method_name}](./Methods/{method_name}.md)" - - content += f"| {method_link} | {returns_markdown} | {description} |\n" - - return escape_text_outside_code_blocks(content) - -def generate_method_markdown(method, enumerations, classes): - """ - Generates Markdown for a method doclet, relying only on `method['examples']` - (array of strings). Ignores any single `method['example']` field. - """ - - method_name = method['name'] - description = method.get('description', 'No description provided.') - description = correct_description(description, '../../') - params = method.get('params', []) - returns = method.get('returns', []) - memberof = method.get('memberof', '') - - # Use the 'examples' array only - examples = method.get('examples', []) - - content = f"# {method_name}\n\n{description}\n\n" - - # Syntax - param_list = ', '.join([param['name'] for param in params if '.' not in param['name']]) if params else '' - content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n" - if memberof: - content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n" - - # Parameters - content += "## Parameters\n\n" - if params: - content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n" - content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n" - for param in params: - param_name = param.get('name', 'Unnamed') - param_types = param.get('type', {}).get('names', []) if param.get('type') else [] - param_types_md = generate_data_types_markdown(param_types, enumerations, classes) - param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../../')) - param_required = "Required" if not param.get('optional') else "Optional" - param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) - - content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n" - else: - content += "This method doesn't have any parameters.\n" - - # Returns - content += "\n## Returns\n\n" - if returns: - return_type_list = returns[0].get('type', {}).get('names', []) - return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes) - content += return_type_md - else: - content += "This method doesn't return any data." - - # Process examples array - if examples: - if len(examples) > 1: - content += "\n\n## Examples\n\n" - else: - content += "\n\n## Example\n\n" - - for i, ex_line in enumerate(examples, start=1): - # Remove JS comments - cleaned_example = remove_js_comments(ex_line).strip() - - # Attempt splitting if the user used ```js - if '```js' in cleaned_example: - comment, code = cleaned_example.split('```js', 1) - comment = comment.strip() - code = code.strip() - if len(examples) > 1: - content += f"**Example {i}:**\n\n{comment}\n\n" - - content += f"```javascript\n{code}\n```\n" - else: - if len(examples) > 1: - content += f"**Example {i}:**\n\n{comment}\n\n" - # No special fences, just show as code - content += f"```javascript\n{cleaned_example}\n```\n" - - return escape_text_outside_code_blocks(content) - -def generate_properties_markdown(properties, enumerations, classes, root='../'): - if properties is None: - return '' - - content = "## Properties\n\n" - content += "| Name | Type | Description |\n" - content += "| ---- | ---- | ----------- |\n" - - for prop in sorted(properties, key=lambda m: m['name']): - prop_name = prop['name'] - prop_description = prop.get('description', 'No description provided.') - prop_description = remove_line_breaks(correct_description(prop_description)) - prop_types = prop['type']['names'] if prop.get('type') else [] - param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root) - content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" - - # Escape outside code blocks - return escape_text_outside_code_blocks(content) - -def generate_enumeration_markdown(enumeration, enumerations, classes): - """ - Generates Markdown documentation for a 'typedef' doclet. - This version only works with `enumeration['examples']` (an array of strings), - ignoring any single `enumeration['examples']` field. - """ - enum_name = enumeration['name'] - - if enum_name not in used_enumerations: - return None - - description = enumeration.get('description', 'No description provided.') - description = correct_description(description, '../') - - # Only use the 'examples' array - examples = enumeration.get('examples', []) - - content = f"# {enum_name}\n\n{description}\n\n" - - parsed_type = enumeration['type'].get('parsedType') - if not parsed_type: - # If parsedType is missing, just list 'type.names' if available - type_names = enumeration['type'].get('names', []) - if type_names: - content += "## Type\n\n" - t_md = generate_data_types_markdown(type_names, enumerations, classes) - content += t_md + "\n\n" - else: - ptype = parsed_type['type'] - - # 1) Handle TypeUnion - if ptype == 'TypeUnion': - content += "## Type\n\nEnumeration\n\n" - content += "## Values\n\n" - for raw_t in enumeration['type']['names']: - # Attempt linking - if any(enum['name'] == raw_t for enum in enumerations): - used_enumerations.add(raw_t) - content += f"- [{raw_t}](../Enumeration/{raw_t}.md)\n" - elif raw_t in classes: - content += f"- [{raw_t}](../{raw_t}/{raw_t}.md)\n" - else: - content += f"- {raw_t}\n" - - # 2) Handle TypeApplication (e.g. Object.) - elif ptype == 'TypeApplication': - content += "## Type\n\nObject\n\n" - type_names = enumeration['type'].get('names', []) - if type_names: - t_md = generate_data_types_markdown(type_names, enumerations, classes) - content += f"**Type:** {t_md}\n\n" - - # 3) If properties are present, treat it like an object - if enumeration.get('properties') is not None: - content += generate_properties_markdown(enumeration['properties'], enumerations, classes) - - # 4) If it's neither TypeUnion nor TypeApplication, just output the type names - if ptype not in ('TypeUnion', 'TypeApplication'): - type_names = enumeration['type'].get('names', []) - if type_names: - content += "## Type\n\n" - t_md = generate_data_types_markdown(type_names, enumerations, classes) - content += t_md + "\n\n" - - # Process examples array - if examples: - if len(examples) > 1: - content += "\n\n## Examples\n\n" - else: - content += "\n\n## Example\n\n" - - for i, ex_line in enumerate(examples, start=1): - # Remove JS comments - cleaned_example = remove_js_comments(ex_line).strip() - - # Attempt splitting if the user used ```js - if '```js' in cleaned_example: - comment, code = cleaned_example.split('```js', 1) - comment = comment.strip() - code = code.strip() - if len(examples) > 1: - content += f"**Example {i}:**\n\n{comment}\n\n" - - content += f"```javascript\n{code}\n```\n" - else: - if len(examples) > 1: - content += f"**Example {i}:**\n\n{comment}\n\n" - # No special fences, just show as code - content += f"```javascript\n{cleaned_example}\n```\n" - - return escape_text_outside_code_blocks(content) - -def process_doclets(data, output_dir, editor_name): - global cur_editor_name - cur_editor_name = editor_name - - classes = {} - classes_props = {} - enumerations = [] - editor_dir = os.path.join(output_dir, editors[editor_name]) - - for doclet in data: - if doclet['kind'] == 'class': - class_name = doclet['name'] - if class_name: - if class_name not in classes: - classes[class_name] = [] - classes_props[class_name] = doclet.get('properties', None) - elif doclet['kind'] == 'function': - class_name = doclet.get('memberof') - if class_name: - if class_name not in classes: - classes[class_name] = [] - classes[class_name].append(doclet) - elif doclet['kind'] == 'typedef': - enumerations.append(doclet) - - # Process classes - for class_name, methods in classes.items(): - if (len(methods) == 0): - continue - - class_dir = os.path.join(editor_dir, class_name) - methods_dir = os.path.join(class_dir, 'Methods') - os.makedirs(methods_dir, exist_ok=True) - - # Write class file - class_content = generate_class_markdown( - class_name, - methods, - classes_props[class_name], - enumerations, - classes - ) - write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) - - # Write method files - for method in methods: - method_file_path = os.path.join(methods_dir, f"{method['name']}.md") - method_content = generate_method_markdown(method, enumerations, classes) - write_markdown_file(method_file_path, method_content) - - if not method.get('examples', ''): - missing_examples.append(os.path.relpath(method_file_path, output_dir)) - - # Process enumerations - enum_dir = os.path.join(editor_dir, 'Enumeration') - os.makedirs(enum_dir, exist_ok=True) - - # idle run - prev_used_count = -1 - while len(used_enumerations) != prev_used_count: - prev_used_count = len(used_enumerations) - for enum in [e for e in enumerations if e['name'] in used_enumerations]: - enum_content = generate_enumeration_markdown(enum, enumerations, classes) - - for enum in enumerations: - enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md") - enum_content = generate_enumeration_markdown(enum, enumerations, classes) - if enum_content is None: - continue - - write_markdown_file(enum_file_path, enum_content) - if not enum.get('examples', ''): - missing_examples.append(os.path.relpath(enum_file_path, output_dir)) - -def generate(output_dir): - print('Generating Markdown documentation...') - - if output_dir[-1] == '/': - output_dir = output_dir[:-1] - - generate_docs_plugins_json.generate(output_dir + '/tmp_json', md=True) - for editor_name, folder_name in editors.items(): - input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json") - - editor_folder_path = os.path.join(output_dir, folder_name) - for folder_name in os.listdir(editor_folder_path): - folder_path_to_del = os.path.join(editor_folder_path, folder_name) - if os.path.isdir(folder_path_to_del): - shutil.rmtree(folder_path_to_del, ignore_errors=True) - - data = load_json(input_file) - used_enumerations.clear() - process_doclets(data, output_dir, editor_name) - - shutil.rmtree(output_dir + '/tmp_json') - print('Done') - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Generate documentation") - parser.add_argument( - "destination", - type=str, - help="Destination directory for the generated documentation", - nargs='?', # Indicates the argument is optional - default="../../../../api.onlyoffice.com/site/docs/plugin-and-macros/interacting-with-editors/methods/" # Default value - ) - args = parser.parse_args() - generate(args.destination) - print("START_MISSING_EXAMPLES") - print(",".join(missing_examples)) - print("END_MISSING_EXAMPLES") diff --git a/scripts/sdkjs_common/jsdoc/config/builder/cell.json b/scripts/sdkjs_common/jsdoc/office-api/config/cell.json similarity index 59% rename from scripts/sdkjs_common/jsdoc/config/builder/cell.json rename to scripts/sdkjs_common/jsdoc/office-api/config/cell.json index 2403a82..5b86ac8 100644 --- a/scripts/sdkjs_common/jsdoc/config/builder/cell.json +++ b/scripts/sdkjs_common/jsdoc/office-api/config/cell.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"] + "include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js", "../../../../../sdkjs/cell/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js b/scripts/sdkjs_common/jsdoc/office-api/config/correct_doclets.js similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js rename to scripts/sdkjs_common/jsdoc/office-api/config/correct_doclets.js diff --git a/scripts/sdkjs_common/jsdoc/config/builder/forms.json b/scripts/sdkjs_common/jsdoc/office-api/config/forms.json similarity index 67% rename from scripts/sdkjs_common/jsdoc/config/builder/forms.json rename to scripts/sdkjs_common/jsdoc/office-api/config/forms.json index d39d531..bfbda89 100644 --- a/scripts/sdkjs_common/jsdoc/config/builder/forms.json +++ b/scripts/sdkjs_common/jsdoc/office-api/config/forms.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"] + "include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs-forms/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/forms.json b/scripts/sdkjs_common/jsdoc/office-api/config/pdf.json similarity index 78% rename from scripts/sdkjs_common/jsdoc/config/plugins/forms.json rename to scripts/sdkjs_common/jsdoc/office-api/config/pdf.json index 65cb6a2..30ec3a8 100644 --- a/scripts/sdkjs_common/jsdoc/config/plugins/forms.json +++ b/scripts/sdkjs_common/jsdoc/office-api/config/pdf.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs-forms/apiPlugins.js"] + "include": ["../../../../../sdkjs/pdf/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/slide.json b/scripts/sdkjs_common/jsdoc/office-api/config/slide.json similarity index 67% rename from scripts/sdkjs_common/jsdoc/config/plugins/slide.json rename to scripts/sdkjs_common/jsdoc/office-api/config/slide.json index d015171..8a126b7 100644 --- a/scripts/sdkjs_common/jsdoc/config/plugins/slide.json +++ b/scripts/sdkjs_common/jsdoc/office-api/config/slide.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs/slide/api_plugins.js"] + "include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/config/builder/word.json b/scripts/sdkjs_common/jsdoc/office-api/config/word.json similarity index 78% rename from scripts/sdkjs_common/jsdoc/config/builder/word.json rename to scripts/sdkjs_common/jsdoc/office-api/config/word.json index 3b90c0a..8deb05a 100644 --- a/scripts/sdkjs_common/jsdoc/config/builder/word.json +++ b/scripts/sdkjs_common/jsdoc/office-api/config/word.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs/word/apiBuilder.js"] + "include": ["../../../../../sdkjs/word/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/office-api/generate_docs_json.py similarity index 96% rename from scripts/sdkjs_common/jsdoc/generate_docs_json.py rename to scripts/sdkjs_common/jsdoc/office-api/generate_docs_json.py index 0ccb48a..b175a44 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/office-api/generate_docs_json.py @@ -5,14 +5,14 @@ import argparse import re import platform -root = '../../../..' +root = '../../../../..' # Configuration files configs = [ - "./config/builder/word.json", - "./config/builder/cell.json", - "./config/builder/slide.json", - "./config/builder/forms.json" + "./config/word.json", + "./config/cell.json", + "./config/slide.json", + "./config/forms.json" ] editors_maps = { diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/office-api/generate_docs_md.py similarity index 95% rename from scripts/sdkjs_common/jsdoc/generate_docs_md.py rename to scripts/sdkjs_common/jsdoc/office-api/generate_docs_md.py index c9e8cdd..cafc8d2 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_md.py +++ b/scripts/sdkjs_common/jsdoc/office-api/generate_docs_md.py @@ -39,8 +39,8 @@ def process_link_tags(text, root=''): For a method, if an alias is not specified, the name is left in the format 'Class#Method'. """ reserved_links = { - '/docbuilder/global#ShapeType': f"{'../../../' if root == '' else '../../' if root == '../' else root}text-document-api/Enumeration/ShapeType.md", - '/plugin/config': 'https://api.onlyoffice.com/docs/plugin-and-macros/structure/manifest/', + '/docbuilder/global#ShapeType': f"{'../../../../../../' if root == '' else '../../../../../' if root == '../' else root}docs/office-api/usage-api/text-document-api/Enumeration/ShapeType.md", + '/plugin/config': 'https://api.onlyoffice.com/docs/plugin-and-macros/structure/configuration/', '/docbuilder/basic': 'https://api.onlyoffice.com/docs/office-api/usage-api/text-document-api/' } @@ -76,7 +76,7 @@ def process_link_tags(text, root=''): return re.sub(r'{@link\s+([^}]+)}', replace_link, text) -def correct_description(string, root=''): +def correct_description(string, root='', isInTable=False): """ Cleans or transforms specific tags in the doclet description: - => ** (bold text) @@ -88,11 +88,14 @@ def correct_description(string, root=''): if string is None: return 'No description provided.' - # Line breaks - string = string.replace('\r', '\\\n') + if False == isInTable: + # Line breaks + string = string.replace('\r', '\\\n') + # Replace tags with Markdown bold formatting + string = re.sub(r'', '-**', string) + else: + string = re.sub(r'', '**', string) - # Replace tags with Markdown bold formatting - string = re.sub(r'', '-**', string) string = re.sub(r'', '**', string) # Replace tags with an icon and text @@ -195,7 +198,7 @@ def generate_data_types_markdown(types, enumerations, classes, root='../../'): converted = [convert_jsdoc_array_to_ts(t) for t in types] # Set of primitive types - primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "{}"} + primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "date", "{}"} def is_primitive(type): if (type.lower() in primitive_types or @@ -318,7 +321,7 @@ def generate_class_markdown(class_name, methods, properties, enumerations, class returns_markdown = "None" # Processing the method description - description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../')) + description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../', True)) # Form a link to the method document method_link = f"[{method_name}](./Methods/{method_name}.md)" @@ -353,7 +356,7 @@ def generate_method_markdown(method, enumerations, classes, example_editor_name) param_name = param.get('name', 'Unnamed') param_types = param.get('type', {}).get('names', []) if param.get('type') else [] param_types_md = generate_data_types_markdown(param_types, enumerations, classes) - param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../../')) + param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../../', True)) param_required = "Required" if not param.get('optional') else "Optional" param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) @@ -395,7 +398,7 @@ def generate_properties_markdown(properties, enumerations, classes, root='../'): for prop in sorted(properties, key=lambda m: m['name']): prop_name = prop['name'] prop_description = prop.get('description', 'No description provided.') - prop_description = remove_line_breaks(correct_description(prop_description, root)) + prop_description = remove_line_breaks(correct_description(prop_description, root, True)) prop_types = prop['type']['names'] if prop.get('type') else [] param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root) content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" @@ -553,8 +556,11 @@ def generate(output_dir): for editor_name, folder_name in editors.items(): input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json") - shutil.rmtree(output_dir + f'/{folder_name}', ignore_errors=True) - os.makedirs(output_dir + f'/{folder_name}') + editor_folder_path = os.path.join(output_dir, folder_name) + for folder_name in os.listdir(editor_folder_path): + folder_path_to_del = os.path.join(editor_folder_path, folder_name) + if os.path.isdir(folder_path_to_del): + shutil.rmtree(folder_path_to_del, ignore_errors=True) data = load_json(input_file) used_enumerations.clear() @@ -570,7 +576,7 @@ if __name__ == "__main__": type=str, help="Destination directory for the generated documentation", nargs='?', # Indicates the argument is optional - default="../../../../office-js-api/" # Default value + default="../../../../../api.onlyoffice.com/site/docs/office-api/usage-api/" # Default value ) args = parser.parse_args() generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_jsonl_dataset.py b/scripts/sdkjs_common/jsdoc/office-api/generate_jsonl_dataset.py similarity index 98% rename from scripts/sdkjs_common/jsdoc/generate_jsonl_dataset.py rename to scripts/sdkjs_common/jsdoc/office-api/generate_jsonl_dataset.py index cba9b68..5187d0d 100644 --- a/scripts/sdkjs_common/jsdoc/generate_jsonl_dataset.py +++ b/scripts/sdkjs_common/jsdoc/office-api/generate_jsonl_dataset.py @@ -21,7 +21,7 @@ editors_names = { "forms": "Forms" } -root = '../../../..' +root = '../../../../..' missing_examples = [] def load_json(file_path): @@ -228,7 +228,7 @@ if __name__ == "__main__": type=str, help="Destination directory for the generated documentation", nargs='?', # Indicates the argument is optional - default="../../../../office-js-api/dataset" # Default value + default="../../../../../office-js-api/dataset" # Default value ) parser.add_argument( "model", diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js b/scripts/sdkjs_common/jsdoc/plugins/config/correct_doclets.js similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js rename to scripts/sdkjs_common/jsdoc/plugins/config/correct_doclets.js diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/events/cell.json b/scripts/sdkjs_common/jsdoc/plugins/config/events/cell.json new file mode 100644 index 0000000..1194d1b --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/events/cell.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs/cell/plugin-events.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/events/common.json b/scripts/sdkjs_common/jsdoc/plugins/config/events/common.json new file mode 100644 index 0000000..364c80f --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/events/common.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs/common/base-plugin-events.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/events/forms.json b/scripts/sdkjs_common/jsdoc/plugins/config/events/forms.json new file mode 100644 index 0000000..cafd6c9 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/events/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs-forms/plugin-events.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/events/slide.json b/scripts/sdkjs_common/jsdoc/plugins/config/events/slide.json new file mode 100644 index 0000000..c11c90e --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/events/slide.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs/slide/plugin-events.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/events/word.json b/scripts/sdkjs_common/jsdoc/plugins/config/events/word.json new file mode 100644 index 0000000..23ff50a --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/events/word.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs/word/plugin-events.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/cell.json b/scripts/sdkjs_common/jsdoc/plugins/config/methods/cell.json similarity index 63% rename from scripts/sdkjs_common/jsdoc/config/plugins/cell.json rename to scripts/sdkjs_common/jsdoc/plugins/config/methods/cell.json index b49b71e..bf9ac65 100644 --- a/scripts/sdkjs_common/jsdoc/config/plugins/cell.json +++ b/scripts/sdkjs_common/jsdoc/plugins/config/methods/cell.json @@ -1,8 +1,8 @@ { "source": { - "include": ["../../../../sdkjs/cell/api_plugins.js"] + "include": ["../../../../../sdkjs/cell/api_plugins.js"] }, - "plugins": ["./correct_doclets.js"], + "plugins": ["../correct_doclets.js"], "opts": { "destination": "./out", "recurse": true, diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/common.json b/scripts/sdkjs_common/jsdoc/plugins/config/methods/common.json similarity index 51% rename from scripts/sdkjs_common/jsdoc/config/plugins/common.json rename to scripts/sdkjs_common/jsdoc/plugins/config/methods/common.json index 4bf510c..55b21ad 100644 --- a/scripts/sdkjs_common/jsdoc/config/plugins/common.json +++ b/scripts/sdkjs_common/jsdoc/plugins/config/methods/common.json @@ -1,8 +1,8 @@ { "source": { - "include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"] + "include": ["../../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../../sdkjs/common/apiBase_plugins.js"] }, - "plugins": ["./correct_doclets.js"], + "plugins": ["../correct_doclets.js"], "opts": { "destination": "./out", "recurse": true, diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/methods/forms.json b/scripts/sdkjs_common/jsdoc/plugins/config/methods/forms.json new file mode 100644 index 0000000..82bf9bd --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/methods/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs-forms/apiPlugins.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/plugins/config/methods/slide.json b/scripts/sdkjs_common/jsdoc/plugins/config/methods/slide.json new file mode 100644 index 0000000..4c79b00 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/config/methods/slide.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../../sdkjs/slide/api_plugins.js"] + }, + "plugins": ["../correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/word.json b/scripts/sdkjs_common/jsdoc/plugins/config/methods/word.json similarity index 54% rename from scripts/sdkjs_common/jsdoc/config/plugins/word.json rename to scripts/sdkjs_common/jsdoc/plugins/config/methods/word.json index b06743e..45d7f3e 100644 --- a/scripts/sdkjs_common/jsdoc/config/plugins/word.json +++ b/scripts/sdkjs_common/jsdoc/plugins/config/methods/word.json @@ -1,8 +1,8 @@ { "source": { - "include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"] + "include": ["../../../../../sdkjs/word/api_plugins.js", "../../../../../sdkjs-forms/apiPlugins.js"] }, - "plugins": ["./correct_doclets.js"], + "plugins": ["../correct_doclets.js"], "opts": { "destination": "./out", "recurse": true, diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_json.py similarity index 93% rename from scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py rename to scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_json.py index 12f5d57..850aa14 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py +++ b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_json.py @@ -6,14 +6,14 @@ import re # Configuration files configs = [ - "./config/plugins/common.json", - "./config/plugins/word.json", - "./config/plugins/cell.json", - "./config/plugins/slide.json", - "./config/plugins/forms.json" + "./config/events/common.json", + "./config/events/word.json", + "./config/events/cell.json", + "./config/events/slide.json", + "./config/events/forms.json" ] -root = '../../../..' +root = '../../../../..' def generate(output_dir, md=False): if not os.path.exists(output_dir): @@ -49,7 +49,7 @@ def generate(output_dir, md=False): # Modify JSON data for idx, doclet in enumerate(data): - if idx == start_common_doclet_idx: + if idx >= start_common_doclet_idx: example_folder_name = 'common' elif editor_name == 'forms': example_folder_name = 'word' @@ -84,7 +84,7 @@ def generate(output_dir, md=False): with open(output_file, 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False, indent=4) - print("Documentation generation for builder completed.") + print("Documentation generation for plugin events completed.") def remove_builder_lines(text): lines = text.splitlines() # Split text into lines diff --git a/scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_md.py b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_md.py new file mode 100644 index 0000000..12e0299 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_events_md.py @@ -0,0 +1,398 @@ +#!/usr/bin/env python3 +import os +import json +import re +import shutil +import argparse +import generate_docs_events_json + +# Папки для каждого editor_name +editors = { + "word": "text-document-api", + "cell": "spreadsheet-api", + "slide": "presentation-api", + "forms": "form-api" +} + +missing_examples = [] +used_enumerations = set() + + +def load_json(path): + with open(path, 'r', encoding='utf-8') as f: + return json.load(f) + + +def write_markdown_file(path, content): + os.makedirs(os.path.dirname(path), exist_ok=True) + with open(path, 'w', encoding='utf-8') as f: + f.write(content) + + +def remove_js_comments(text): + text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) + text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) + return text.strip() + + +def correct_description(string, root='', isInTable=False): + """ + Cleans or transforms specific tags in the doclet description: + - => ** (bold text) + - ... => 💡 ... + - {@link ...} is replaced with a Markdown link + - If the description is missing, returns a default value. + - All '\r' characters are replaced with '\n'. + """ + if string is None: + return 'No description provided.' + + if False == isInTable: + # Line breaks + string = string.replace('\r', '\\\n') + # Replace tags with Markdown bold formatting + string = re.sub(r'', '-**', string) + else: + string = re.sub(r'', '**', string) + + string = re.sub(r'', '**', string) + + # Replace tags with an icon and text + string = re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) + + # Process {@link ...} constructions + string = process_link_tags(string, root) + + return string + +def process_link_tags(text, root=''): + """ + Finds patterns like {@link ...} and replaces them with Markdown links. + If the prefix 'global#' is found, a link to a typedef is generated, + otherwise, a link to a class method is created. + For a method, if an alias is not specified, the name is left in the format 'Class#Method'. + """ + reserved_links = { + '/docbuilder/global#ShapeType': f"{'../../../../../../' if root == '' else '../../../../../' if root == '../' else root}docs/office-api/usage-api/text-document-api/Enumeration/ShapeType.md", + '/plugin/config': 'https://api.onlyoffice.com/docs/plugin-and-macros/structure/configuration/', + '/docbuilder/basic': 'https://api.onlyoffice.com/docs/office-api/usage-api/text-document-api/' + } + + def replace_link(match): + content = match.group(1).strip() # Example: "/docbuilder/global#ShapeType shape type" or "global#ErrorValue ErrorValue" + parts = content.split() + ref = parts[0] + label = parts[1] if len(parts) > 1 else None + + if ref.startswith('/'): + # Handle reserved links using mapping + if ref in reserved_links: + url = reserved_links[ref] + display_text = label if label else ref + return f"[{display_text}]({url})" + elif ref.startswith('/docs/plugins/'): + url = f"../../{ref.split('/docs/plugins/')[1]}.md" + display_text = label if label else ref + return f"[{display_text}]({url})" + else: + # If the link is not in the mapping, return the original construction + return match.group(0) + elif ref.startswith("global#"): + # Handle links to typedef (similar logic as before) + typedef_name = ref.split("#")[1] + used_enumerations.add(typedef_name) + display_text = label if label else typedef_name + return f"[{display_text}]({root}Enumeration/Event_{typedef_name}.md)" + else: + # Handle links to class methods like ClassName#MethodName + try: + class_name, method_name = ref.split("#") + except ValueError: + return match.group(0) + display_text = label if label else ref # Keep the full notation, e.g., "Api#CreateSlide" + return f"[{display_text}]({root}{class_name}/Methods/{method_name}.md)" + + return re.sub(r'{@link\s+([^}]+)}', replace_link, text) + +def remove_line_breaks(s): + return re.sub(r'[\r\n]+', ' ', s) + + +def convert_jsdoc_array_to_ts(type_str): + p = re.compile(r'Array\.<([^>]+)>') + while True: + m = p.search(type_str) + if not m: + break + inner = convert_jsdoc_array_to_ts(m.group(1).strip()) + type_str = type_str[:m.start()] + inner + '[]' + type_str[m.end():] + return type_str + + +def generate_data_types_markdown(types, enumerations, root=''): + converted = [convert_jsdoc_array_to_ts(t) for t in types] + primitives = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "date", "{}"} + result = [] + enum_names = {e['name'] for e in enumerations} + for t in converted: + base = t.rstrip('[]') + dims = t[len(base):] + if base in enum_names: + used_enumerations.add(base) + link = f"[Event_{base}]({root}../Enumeration/Event_{base}.md)" + elif base in primitives or re.match(r"^['\"].*['\"]$", base) or re.match(r"^-?\d+(\.\d+)?$", base): + link = base + else: + link = base + result.append(link + dims) + return " | ".join(result) + + +def escape_text_outside_code_blocks(md): + parts = re.split(r'(```.*?```)', md, flags=re.DOTALL) + for i in range(0, len(parts), 2): + parts[i] = parts[i].replace('<', '<').replace('>', '>') + return "".join(parts) + + +def generate_event_markdown(event, enumerations): + name = event['name'] + desc = correct_description(event.get('description', '')) + params = event.get('params', []) + + md = f"# {name}\n\n{desc}\n\n" + + # Parameters + md += "## Parameters\n\n" + if params: + md += "| **Name** | **Data type** | **Description** |\n" + md += "| --------- | ------------- | ----------- |\n" + for p in params: + t_md = generate_data_types_markdown( + p.get('type', {}).get('names', []), + enumerations + ) + d = remove_line_breaks(correct_description(p.get('description', ''), isInTable=True)) + md += f"| {p['name']} | {t_md} | {d} |\n" + md += "\n" + else: + md += "This event has no parameters.\n\n" + + for ex in event.get('examples', []): + code = remove_js_comments(ex).strip() + md += f"```javascript\n{code}\n```\n\n" + + return escape_text_outside_code_blocks(md) + + +def generate_enumeration_markdown(enumeration, enumerations): + """ + Generates Markdown documentation for a 'typedef' doclet. + This version only works with `enumeration['examples']` (an array of strings), + ignoring any single `enumeration['examples']` field. + """ + enum_name = enumeration['name'] + + if enum_name not in used_enumerations: + return None + + description = enumeration.get('description', 'No description provided.') + description = correct_description(description, '../') + + # Only use the 'examples' array + examples = enumeration.get('examples', []) + + content = f"# Event_{enum_name}\n\n{description}\n\n" + + parsed_type = enumeration['type'].get('parsedType') + if not parsed_type: + # If parsedType is missing, just list 'type.names' if available + type_names = enumeration['type'].get('names', []) + if type_names: + content += "## Type\n\n" + t_md = generate_data_types_markdown(type_names, enumerations) + content += t_md + "\n\n" + else: + ptype = parsed_type['type'] + + # 1) Handle TypeUnion + if ptype == 'TypeUnion': + content += "## Type\n\nEnumeration\n\n" + content += "## Values\n\n" + for raw_t in enumeration['type']['names']: + # Attempt linking + if any(enum['name'] == raw_t for enum in enumerations): + used_enumerations.add(raw_t) + content += f"- [{raw_t}](../Enumeration/Event_{raw_t}.md)\n" + else: + content += f"- {raw_t}\n" + + # 2) Handle TypeApplication (e.g. Object.) + elif ptype == 'TypeApplication': + content += "## Type\n\nObject\n\n" + type_names = enumeration['type'].get('names', []) + if type_names: + t_md = generate_data_types_markdown(type_names, enumerations) + content += f"**Type:** {t_md}\n\n" + + # 3) If properties are present, treat it like an object + if enumeration.get('properties') is not None: + content += generate_properties_markdown(enumeration['properties'], enumerations) + + # 4) If it's neither TypeUnion nor TypeApplication, just output the type names + if ptype not in ('TypeUnion', 'TypeApplication'): + type_names = enumeration['type'].get('names', []) + if type_names: + content += "## Type\n\n" + t_md = generate_data_types_markdown(type_names, enumerations) + content += t_md + "\n\n" + + # Process examples array + if examples: + if len(examples) > 1: + content += "\n\n## Examples\n\n" + else: + content += "\n\n## Example\n\n" + + for i, ex_line in enumerate(examples, start=1): + # Remove JS comments + cleaned_example = remove_js_comments(ex_line).strip() + + # Attempt splitting if the user used ```js + if '```js' in cleaned_example: + comment, code = cleaned_example.split('```js', 1) + comment = comment.strip() + code = code.strip() + if len(examples) > 1: + content += f"**Example {i}:**\n\n{comment}\n\n" + + content += f"```javascript\n{code}\n```\n" + else: + if len(examples) > 1: + content += f"**Example {i}:**\n\n{comment}\n\n" + # No special fences, just show as code + content += f"```javascript\n{cleaned_example}\n```\n" + + return escape_text_outside_code_blocks(content) + +def generate_events_summary(events): + """ + Create Events.md summary listing all events with their description. + """ + header = [ + "# Events\n\n", + "| Event | Description |\n", + "| ----- | ----------- |\n" + ] + lines = [ + f"| [{ev['name']}](./{ev['name']}.md) | " + f"{remove_line_breaks(correct_description(ev.get('description', ''), isInTable=True))} |\n" + for ev in sorted(events, key=lambda e: e['name']) + ] + return "".join(header + lines) + +def generate_properties_markdown(properties, enumerations): + if properties is None: + return '' + + content = "## Properties\n\n" + content += "| Name | Type | Description |\n" + content += "| ---- | ---- | ----------- |\n" + + for prop in sorted(properties, key=lambda m: m['name']): + prop_name = prop['name'] + prop_description = prop.get('description', 'No description provided.') + prop_description = remove_line_breaks(correct_description(prop_description, isInTable=True)) + prop_types = prop['type']['names'] if prop.get('type') else [] + param_types_md = generate_data_types_markdown(prop_types, enumerations) + content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" + + # Escape outside code blocks + return escape_text_outside_code_blocks(content) + +def clean_editor_dir(editor_dir): + for root, dirs, files in os.walk(editor_dir, topdown=False): + for file in files: + if not file.endswith(('.json')): + os.remove(os.path.join(root, file)) + for dir in dirs: + dir_path = os.path.join(root, dir) + # remove empty folder + if not os.listdir(dir_path): + os.rmdir(dir_path) + +def clean_enum_files(editor_dir: str): + for root, _, files in os.walk(editor_dir, topdown=False): + for file in files: + if True == file.startswith('Event_') and False == file.endswith('.json'): + os.remove(os.path.join(root, file)) + +def process_events(data, editor_dir): + enumerations = [] + events = [] + + for doclet in data: + kind = doclet.get('kind') + if kind == 'typedef': + enumerations.append(doclet) + elif kind == 'event': + events.append(doclet) + + events_dir = f'{editor_dir}/Events' + clean_editor_dir(events_dir) + os.makedirs(events_dir, exist_ok=True) + used_enumerations.clear() + + # пишем события + for ev in events: + path = os.path.join(events_dir, f"{ev['name']}.md") + write_markdown_file(path, generate_event_markdown(ev, enumerations)) + if not ev.get('examples'): + missing_examples.append(os.path.relpath(path, events_dir)) + + # пишем перечисления, используемые событиями + enum_dir = os.path.join(editor_dir, 'Enumeration') + clean_enum_files(enum_dir) + + os.makedirs(enum_dir, exist_ok=True) + prev = -1 + while len(used_enumerations) != prev: + prev = len(used_enumerations) + for e in enumerations: + if e['name'] in used_enumerations: + generate_enumeration_markdown(e, enumerations) + for e in enumerations: + if e['name'] in used_enumerations: + path = os.path.join(enum_dir, f"Event_{e['name']}.md") + write_markdown_file(path, generate_enumeration_markdown(e, enumerations)) + if not e.get('examples'): + missing_examples.append(os.path.relpath(path, editor_dir)) + + # events summary + write_markdown_file(os.path.join(events_dir, "Events.md"), generate_events_summary(events)) + +def generate_events(output_dir): + if output_dir.endswith('/'): + output_dir = output_dir[:-1] + tmp = os.path.join(output_dir, 'tmp_json') + + generate_docs_events_json.generate(tmp, md=True) + + for editor_name, folder in editors.items(): + data = load_json(os.path.join(tmp, f"{editor_name}.json")) + process_events(data, os.path.join(output_dir, folder)) + + shutil.rmtree(tmp) + print("Done. Missing examples:", missing_examples) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate events documentation") + parser.add_argument( + "destination", + nargs="?", + default="../../../../../api.onlyoffice.com/site/docs/plugin-and-macros/interacting-with-editors/", + help="Output directory" + ) + args = parser.parse_args() + generate_events(args.destination) \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_json.py b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_json.py new file mode 100644 index 0000000..7f55b3c --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_json.py @@ -0,0 +1,111 @@ +import os +import subprocess +import json +import argparse +import re + +# Configuration files +configs = [ + "./config/methods/common.json", + "./config/methods/word.json", + "./config/methods/cell.json", + "./config/methods/slide.json", + "./config/methods/forms.json" +] + +root = '../../../../..' + +def generate(output_dir, md=False): + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + # Generate JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + command = f"npx jsdoc -c {config} -X > {output_file}" + print(f"Generating {editor_name}.json: {command}") + subprocess.run(command, shell=True) + + common_doclets_file = os.path.join(output_dir, 'common.json') + with open(common_doclets_file, 'r', encoding='utf-8') as f: + common_doclets_json = json.dumps(json.load(f)) + os.remove(common_doclets_file) + + # Append examples to JSON documentation + for config in configs: + if (config.find('common') != -1): + continue + + editor_name = config.split('/')[-1].replace('.json', '') + example_folder_name = editor_name # name of folder with examples + output_file = os.path.join(output_dir, editor_name + ".json") + + # Read the JSON file + with open(output_file, 'r', encoding='utf-8') as f: + data = json.load(f) + start_common_doclet_idx = len(data) + data += json.loads(common_doclets_json) + + # Modify JSON data + for idx, doclet in enumerate(data): + if idx >= start_common_doclet_idx: + example_folder_name = 'common' + elif editor_name == 'forms': + example_folder_name = 'word' + + if 'see' in doclet: + if doclet['see'] is not None: + doclet['see'][0] = doclet['see'][0].replace('{Editor}', example_folder_name.title()) + file_path = f'{root}/' + doclet['see'][0] + + if os.path.exists(file_path): + with open(file_path, 'r', encoding='utf-8') as see_file: + example_content = see_file.read() + + # Extract the first line as a comment if it exists + lines = example_content.split('\n') + if lines[0].startswith('//'): + comment = lines[0] + '\n' + code_content = '\n'.join(lines[1:]) + else: + comment = '' + code_content = example_content + + doclet['examples'] = [remove_js_comments(comment) + code_content] + + if md == False: + document_type = editor_name + if "forms" == document_type: + document_type = "pdf" + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```' + + # Write the modified JSON file back + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(data, f, ensure_ascii=False, indent=4) + + print("Documentation generation for plugin methods completed.") + +def remove_builder_lines(text): + lines = text.splitlines() # Split text into lines + filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] + return "\n".join(filtered_lines) + +def remove_js_comments(text): + # Remove single-line comments, leaving text after // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Remove multi-line comments, leaving text after /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default=f"{root}/office-js-api-declarations/office-js-api-plugins" + ) + args = parser.parse_args() + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_md.py b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_md.py similarity index 88% rename from scripts/sdkjs_common/jsdoc/generate_docs_plugins_md.py rename to scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_md.py index 4145fc9..65ab6c2 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_md.py +++ b/scripts/sdkjs_common/jsdoc/plugins/generate_docs_methods_md.py @@ -3,7 +3,7 @@ import json import re import shutil import argparse -import generate_docs_plugins_json +import generate_docs_methods_json # Configuration files editors = { @@ -18,10 +18,6 @@ used_enumerations = set() cur_editor_name = None -_CODE_BLOCK_RE = re.compile(r'(```.*?```)', re.DOTALL) -_QSTRING_RE = re.compile(r'(["\'])(.*?)(? => ** (bold text) @@ -92,11 +91,14 @@ def correct_description(string, root=''): if string is None: return 'No description provided.' - # Line breaks - string = string.replace('\r', '\\\n') + if False == isInTable: + # Line breaks + string = string.replace('\r', '\\\n') + # Replace tags with Markdown bold formatting + string = re.sub(r'', '-**', string) + else: + string = re.sub(r'', '**', string) - # Replace tags with Markdown bold formatting - string = re.sub(r'', '-**', string) string = re.sub(r'', '**', string) # Replace tags with an icon and text @@ -189,7 +191,7 @@ def get_base_type(ts_type: str) -> str: ts_type = ts_type[:-2] return ts_type -def generate_data_types_markdown(types, enumerations, classes, root='../../'): +def generate_data_types_markdown(types, enumerations, classes, root='../'): """ 1) Converts each type from JSDoc (e.g., Array.) to T[]. 2) Processes union types by splitting them using '|'. @@ -201,7 +203,7 @@ def generate_data_types_markdown(types, enumerations, classes, root='../../'): converted = [convert_jsdoc_array_to_ts(t) for t in types] # Set of primitive types - primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "{}"} + primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "date", "{}"} def is_primitive(type): if (type.lower() in primitive_types or @@ -323,10 +325,10 @@ def generate_class_markdown(class_name, methods, properties, enumerations, class returns_markdown = "None" # Processing the method description - description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../')) + description = remove_line_breaks(correct_description(method.get('description', 'No description provided.'), '../', True)) # Form a link to the method document - method_link = f"[{method_name}](./Methods/{method_name}.md)" + method_link = f"[{method_name}](./{method_name}.md)" content += f"| {method_link} | {returns_markdown} | {description} |\n" @@ -340,7 +342,7 @@ def generate_method_markdown(method, enumerations, classes): method_name = method['name'] description = method.get('description', 'No description provided.') - description = correct_description(description, '../../') + description = correct_description(description, '../') params = method.get('params', []) returns = method.get('returns', []) memberof = method.get('memberof', '') @@ -354,7 +356,7 @@ def generate_method_markdown(method, enumerations, classes): param_list = ', '.join([param['name'] for param in params if '.' not in param['name']]) if params else '' content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n" if memberof: - content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n" + content += f"`expression` - A variable that represents a [{memberof}](Methods.md) class.\n\n" # Parameters content += "## Parameters\n\n" @@ -365,7 +367,7 @@ def generate_method_markdown(method, enumerations, classes): param_name = param.get('name', 'Unnamed') param_types = param.get('type', {}).get('names', []) if param.get('type') else [] param_types_md = generate_data_types_markdown(param_types, enumerations, classes) - param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../../')) + param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'), '../', True)) param_required = "Required" if not param.get('optional') else "Optional" param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) @@ -421,7 +423,7 @@ def generate_properties_markdown(properties, enumerations, classes, root='../'): for prop in sorted(properties, key=lambda m: m['name']): prop_name = prop['name'] prop_description = prop.get('description', 'No description provided.') - prop_description = remove_line_breaks(correct_description(prop_description)) + prop_description = remove_line_breaks(correct_description(prop_description, isInTable=True)) prop_types = prop['type']['names'] if prop.get('type') else [] param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root) content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" @@ -521,6 +523,23 @@ def generate_enumeration_markdown(enumeration, enumerations, classes): return escape_text_outside_code_blocks(content) +def clean_methods_dir(methods_dir): + for root, dirs, files in os.walk(methods_dir, topdown=False): + for file in files: + if not file.endswith(('.json')): + os.remove(os.path.join(root, file)) + for dir in dirs: + dir_path = os.path.join(root, dir) + # remove empty folder + if not os.listdir(dir_path): + os.rmdir(dir_path) + +def clean_enum_files(editor_dir: str): + for root, _, files in os.walk(editor_dir, topdown=False): + for file in files: + if False == file.startswith('Event_') and False == file.endswith('.json'): + os.remove(os.path.join(root, file)) + def process_doclets(data, output_dir, editor_name): global cur_editor_name cur_editor_name = editor_name @@ -529,6 +548,10 @@ def process_doclets(data, output_dir, editor_name): classes_props = {} enumerations = [] editor_dir = os.path.join(output_dir, editors[editor_name]) + methods_dir = os.path.join(output_dir, editors[editor_name], 'Methods') + + clean_methods_dir(methods_dir) + os.makedirs(methods_dir, exist_ok=True) for doclet in data: if doclet['kind'] == 'class': @@ -546,36 +569,31 @@ def process_doclets(data, output_dir, editor_name): elif doclet['kind'] == 'typedef': enumerations.append(doclet) - # Process classes - for class_name, methods in classes.items(): - if (len(methods) == 0): - continue + # Process api methods + class_name = 'Api' + methods = classes[class_name] + # Write class file + class_content = generate_class_markdown( + class_name, + methods, + classes_props[class_name], + enumerations, + classes + ) + write_markdown_file(os.path.join(methods_dir, f"Methods.md"), class_content) - class_dir = os.path.join(editor_dir, class_name) - methods_dir = os.path.join(class_dir, 'Methods') - os.makedirs(methods_dir, exist_ok=True) + # Write method files + for method in methods: + method_file_path = os.path.join(methods_dir, f"{method['name']}.md") + method_content = generate_method_markdown(method, enumerations, classes) + write_markdown_file(method_file_path, method_content) - # Write class file - class_content = generate_class_markdown( - class_name, - methods, - classes_props[class_name], - enumerations, - classes - ) - write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) - - # Write method files - for method in methods: - method_file_path = os.path.join(methods_dir, f"{method['name']}.md") - method_content = generate_method_markdown(method, enumerations, classes) - write_markdown_file(method_file_path, method_content) - - if not method.get('examples', ''): - missing_examples.append(os.path.relpath(method_file_path, output_dir)) + if not method.get('examples', ''): + missing_examples.append(os.path.relpath(method_file_path, output_dir)) # Process enumerations enum_dir = os.path.join(editor_dir, 'Enumeration') + clean_enum_files(enum_dir) os.makedirs(enum_dir, exist_ok=True) # idle run @@ -601,13 +619,10 @@ def generate(output_dir): if output_dir[-1] == '/': output_dir = output_dir[:-1] - generate_docs_plugins_json.generate(output_dir + '/tmp_json', md=True) + generate_docs_methods_json.generate(output_dir + '/tmp_json', md=True) for editor_name, folder_name in editors.items(): input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json") - shutil.rmtree(output_dir + f'/{folder_name}', ignore_errors=True) - os.makedirs(output_dir + f'/{folder_name}') - data = load_json(input_file) used_enumerations.clear() process_doclets(data, output_dir, editor_name) @@ -622,7 +637,7 @@ if __name__ == "__main__": type=str, help="Destination directory for the generated documentation", nargs='?', # Indicates the argument is optional - default="../../../../office-js-api/plugins/" # Default value + default="../../../../../api.onlyoffice.com/site/docs/plugin-and-macros/interacting-with-editors/" # Default value ) args = parser.parse_args() generate(args.destination) diff --git a/sln.json b/sln.json index 15dc71c..3fd4408 100644 --- a/sln.json +++ b/sln.json @@ -93,8 +93,7 @@ "[win,linux]desktop-apps/win-linux/ASCDocumentEditor.pro", "[win]desktop-apps/win-linux/extras/projicons/ProjIcons.pro", - "[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro", - "[win_xp]desktop-apps/win-linux/extras/online-installer/OnlineInstaller.pro" + "[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro" ], "mobile" : [ diff --git a/tools/linux/arm/cross_arm64/.gitignore b/tools/linux/arm/cross_arm64/.gitignore new file mode 100644 index 0000000..d8eeaf0 --- /dev/null +++ b/tools/linux/arm/cross_arm64/.gitignore @@ -0,0 +1,5 @@ +qt_source* +qt_binary* +qt_build* +qt-* +gcc-linaro-* \ No newline at end of file diff --git a/tools/linux/arm/cross_arm64/build_qt.py b/tools/linux/arm/cross_arm64/build_qt.py new file mode 100644 index 0000000..e76cf60 --- /dev/null +++ b/tools/linux/arm/cross_arm64/build_qt.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python + +import sys +import os +sys.path.append('../../../../scripts') + +import base + +def update_qmake_conf(arm_toolchain_bin): + replace_file = "./qt-everywhere-src-5.15.2/qtbase/mkspecs/linux-aarch64-gnu-g++/qmake.conf" + arm_toolchain_bin = os.path.abspath(arm_toolchain_bin) + + replace_src = "" + replace_src += "# modifications to g++.conf\n" + replace_src += "QMAKE_CC = aarch64-linux-gnu-gcc\n" + replace_src += "QMAKE_CXX = aarch64-linux-gnu-g++\n" + replace_src += "QMAKE_LINK = aarch64-linux-gnu-g++\n" + replace_src += "QMAKE_LINK_SHLIB = aarch64-linux-gnu-g++\n" + replace_src += "\n" + replace_src += "# modifications to linux.conf\n" + replace_src += "QMAKE_AR = aarch64-linux-gnu-ar cqs\n" + replace_src += "QMAKE_OBJCOPY = aarch64-linux-gnu-objcopy\n" + replace_src += "QMAKE_NM = aarch64-linux-gnu-nm -P\n" + replace_src += "QMAKE_STRIP = aarch64-linux-gnu-strip\n" + + replace_dst = "" + replace_dst += "# modifications to g++.conf\n" + replace_dst += "QMAKE_CC = " + arm_toolchain_bin + "/aarch64-linux-gnu-gcc\n" + replace_dst += "QMAKE_CXX = " + arm_toolchain_bin + "/aarch64-linux-gnu-g++\n" + replace_dst += "QMAKE_LINK = " + arm_toolchain_bin + "/aarch64-linux-gnu-g++\n" + replace_dst += "QMAKE_LINK_SHLIB = " + arm_toolchain_bin + "/aarch64-linux-gnu-g++\n" + replace_dst += "\n" + replace_dst += "# modifications to linux.conf\n" + replace_dst += "QMAKE_AR = " + arm_toolchain_bin + "/aarch64-linux-gnu-ar cqs\n" + replace_dst += "QMAKE_OBJCOPY = " + arm_toolchain_bin + "/aarch64-linux-gnu-objcopy\n" + replace_dst += "QMAKE_NM = " + arm_toolchain_bin + "/aarch64-linux-gnu-nm -P\n" + replace_dst += "QMAKE_STRIP = " + arm_toolchain_bin + "/aarch64-linux-gnu-strip\n" + + base.replaceInFile(replace_file, replace_src, replace_dst) + +def make(arm_toolchain_bin=""): + qt_build_path = os.path.dirname(os.path.abspath(__file__)) + "/qt_build/Qt-5.15.2/linux_arm64" + qt_params = ["-opensource", + "-confirm-license", + "-release", + "-shared", + "-accessibility", + "-prefix", "\"" + qt_build_path + "\"", + "-extprefix", "\"" + qt_build_path + "\"", + "-hostprefix", "\"" + qt_build_path + "\"", + "-c++std", "c++11", + "-qt-zlib", + "-qt-libpng", + "-qt-libjpeg", + "-qt-pcre", + "-no-sql-sqlite", + "-no-opengl", + "-nomake", "examples", + "-nomake", "tests", + "-skip", "qtlocation", + "-skip", "qtserialport", + "-skip", "qtsensors", + "-skip", "qtxmlpatterns", + "-skip", "qt3d", + "-skip", "qtwebview", + "-skip", "qtwebengine", + "-skip", "qtdeclarative", + "-xplatform", "linux-aarch64-gnu-g++", # be sure that aarch64 gnu compiler is installed + "-no-pch"] + + qt_params_str = "" + for param in qt_params: + qt_params_str += (param + " ") + + qt_url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/qt/qt-everywhere-src-5.15.2.tar.xz" + if not base.is_file("./qt_source_5.15.2.tar.xz"): + base.download(qt_url, "./qt_source_5.15.2.tar.xz") + + if not base.is_dir("./qt-everywhere-src-5.15.2"): + base.cmd("tar", ["-xf", "./qt_source_5.15.2.tar.xz"]) + + # https://bugreports.qt.io/browse/QTBUG-93452 + # for GCC 11 and Qt5/Qt6 + additional_gcc_11 = "#ifdef __cplusplus\n#include \n#endif\n" + chanage_file = "./qt-everywhere-src-5.15.2/qtbase/src/corelib/global/qglobal.h" + filedata = base.readFile(chanage_file) + if filedata.find(additional_gcc_11) == -1: + filedata = additional_gcc_11 + filedata + base.writeFile(chanage_file, filedata) + + if arm_toolchain_bin != "": + update_qmake_conf(arm_toolchain_bin) + + base.cmd_in_dir("./qt-everywhere-src-5.15.2", "./configure " + qt_params_str) + base.cmd_in_dir("./qt-everywhere-src-5.15.2", "make -j4") + base.cmd_in_dir("./qt-everywhere-src-5.15.2", "make install") + +if __name__ == "__main__": + arm_toolchain_path = "./gcc-linaro-5.4.1-2017.05-x86_64_aarch64-linux-gnu/bin" + if len(sys.argv) != 1: + arm_toolchain_path = sys.argv[1] + + make(arm_toolchain_path) diff --git a/tools/linux/arm/cross_arm64/download_arm_toolchain.py b/tools/linux/arm/cross_arm64/download_arm_toolchain.py new file mode 100644 index 0000000..ef5fa31 --- /dev/null +++ b/tools/linux/arm/cross_arm64/download_arm_toolchain.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python + +import sys +import os +sys.path.append('../../../../scripts') + +import base + +def make(): + arm_toolchain_url = 'https://releases.linaro.org/components/toolchain/binaries/5.4-2017.05/aarch64-linux-gnu/' + arm_toolchain_tar_filename = 'gcc-linaro-5.4.1-2017.05-x86_64_aarch64-linux-gnu.tar.xz' + base.cmd2('wget', [arm_toolchain_url + arm_toolchain_tar_filename]) + base.cmd2('tar', ['-xf', arm_toolchain_tar_filename]) + +if __name__ == "__main__": + make() \ No newline at end of file diff --git a/tools/linux/arm/cross_arm64/fetch_qt.py b/tools/linux/arm/cross_arm64/fetch_qt.py new file mode 100644 index 0000000..1b1f2ff --- /dev/null +++ b/tools/linux/arm/cross_arm64/fetch_qt.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +import sys +import os +sys.path.append('../../../scripts') + +import base + + +def make(): + qt_build_path = os.path.dirname(os.path.abspath(__file__)) + "/qt_build/Qt-5.15.2" + qt_binary_url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/qt/qt_binary_linux_arm64.7z" + + if not base.is_file("./qt_binary_linux_arm64.7z"): + base.download(qt_binary_url, "./qt_binary_linux_arm64.7z") + + if not base.is_dir(qt_build_path): + os.makedirs(qt_build_path) + base.cmd("tar", ["-xf", "./qt_binary_linux_arm64.7z", "-C", qt_build_path]) + +if __name__ == "__main__": + make() \ No newline at end of file diff --git a/tools/linux/arm/cross_arm64/pyrightconfig.json b/tools/linux/arm/cross_arm64/pyrightconfig.json new file mode 100644 index 0000000..4094cf8 --- /dev/null +++ b/tools/linux/arm/cross_arm64/pyrightconfig.json @@ -0,0 +1,5 @@ +{ + "exclude": [ + "qt-everywhere-src-5.15.2/**" + ] +} \ No newline at end of file diff --git a/tools/win/arm64/.gitignore b/tools/win/arm64/.gitignore new file mode 100644 index 0000000..0260ec9 --- /dev/null +++ b/tools/win/arm64/.gitignore @@ -0,0 +1,5 @@ +tmp.bat +qt_source* +qt_binary* +qt_build* +qt-* diff --git a/tools/win/arm64/build_qt.py b/tools/win/arm64/build_qt.py new file mode 100644 index 0000000..467b1d7 --- /dev/null +++ b/tools/win/arm64/build_qt.py @@ -0,0 +1,77 @@ +import sys +import os +sys.path.append('../../../scripts') + +import base + +def get_vs_path(version = "2019"): + vs_path = "" + programFilesDir = base.get_env("ProgramFiles") + if ("" != base.get_env("ProgramFiles(x86)")): + programFilesDir = base.get_env("ProgramFiles(x86)") + if ("2015" == version): + vs_path = programFilesDir + "/Microsoft Visual Studio 14.0/VC" + elif ("2019" == version): + if base.is_dir(programFilesDir + "/Microsoft Visual Studio/2019/Enterprise/VC/Auxiliary/Build"): + vs_path = programFilesDir + "/Microsoft Visual Studio/2019/Enterprise/VC/Auxiliary/Build" + elif base.is_dir(programFilesDir + "/Microsoft Visual Studio/2019/Professional/VC/Auxiliary/Build"): + vs_path = programFilesDir + "/Microsoft Visual Studio/2019/Professional/VC/Auxiliary/Build" + else: + vs_path = programFilesDir + "/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build" + + return vs_path + +def make(): + qt_build_path = os.path.dirname(os.path.abspath(__file__)) + "/qt_build/Qt-5.15.2/win_arm64" + qt_params = ["-opensource", + "-confirm-license", + "-release", + "-shared", + "-accessibility", + "-prefix", "\"" + qt_build_path + "\"", + "-extprefix", "\"" + qt_build_path + "\"", + "-hostprefix", "\"" + qt_build_path + "\"", + "-c++std", "c++11", + "-qt-zlib", + "-qt-libpng", + "-qt-libjpeg", + "-qt-pcre", + "-no-sql-sqlite", + "-no-qml-debug", + "-nomake", "examples", + "-nomake", "tests", + "-skip", "qtlocation", + "-skip", "qtserialport", + "-skip", "qtsensors", + "-skip", "qtxmlpatterns", + "-skip", "qt3d", + "-skip", "qtwebview", + "-skip", "qtwebengine", + "-xplatform", "win32-arm64-msvc2017", + "-mp", + "-no-pch"] + + qt_params_str = "" + for param in qt_params: + qt_params_str += (param + " ") + + qt_url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/qt/qt-everywhere-src-5.15.2.tar.xz" + if not base.is_file("./qt_source_5.15.2.tar.xz"): + base.download(qt_url, "./qt_source_5.15.2.tar.xz") + + if not base.is_dir("./qt-everywhere-src-5.15.2"): + base.cmd("tar", ["-xf", "./qt_source_5.15.2.tar.xz"]) + + vs_path = get_vs_path() + vcvarsall_host_arch = "x64" + + qt_build_bat = [] + qt_build_bat.append("call \"" + vs_path + "/vcvarsall.bat\" " + vcvarsall_host_arch) # for nmake + qt_build_bat.append("cd qt-everywhere-src-5.15.2") + qt_build_bat.append("call configure " + qt_params_str) + qt_build_bat.append("call nmake") + qt_build_bat.append("call nmake install") + base.run_as_bat(qt_build_bat) + +if __name__ == "__main__": + make() \ No newline at end of file diff --git a/tools/win/arm64/fetch_qt.py b/tools/win/arm64/fetch_qt.py new file mode 100644 index 0000000..e3b7994 --- /dev/null +++ b/tools/win/arm64/fetch_qt.py @@ -0,0 +1,20 @@ +import sys +import os +sys.path.append('../../../scripts') + +import base + +def make(): + qt_build_path = os.path.dirname(os.path.abspath(__file__)) + "/qt_build/Qt-5.15.2" + qt_binary_url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/qt/qt_binary_win_arm64.7z" + + if not base.is_file("./qt_binary_win_arm64.7z"): + base.download(qt_binary_url, "./qt_binary_win_arm64.7z") + + if not base.is_dir(qt_build_path): + os.makedirs(qt_build_path) + base.extract("./qt_binary_win_arm64.7z", qt_build_path) + +if __name__ == "__main__": + base.configure_common_apps() + make() \ No newline at end of file diff --git a/tools/win/arm64/pyrightconfig.json b/tools/win/arm64/pyrightconfig.json new file mode 100644 index 0000000..4094cf8 --- /dev/null +++ b/tools/win/arm64/pyrightconfig.json @@ -0,0 +1,5 @@ +{ + "exclude": [ + "qt-everywhere-src-5.15.2/**" + ] +} \ No newline at end of file diff --git a/version b/version index c9277c5..93c8cbd 100644 --- a/version +++ b/version @@ -1 +1 @@ -9.0.0 \ No newline at end of file +9.0.4 \ No newline at end of file