mirror of
https://github.com/ONLYOFFICE/build_tools.git
synced 2026-04-07 14:06:31 +08:00
Compare commits
96 Commits
v8.2.0.4
...
fix/depend
| Author | SHA1 | Date | |
|---|---|---|---|
| 8abdeb21da | |||
| f463bff49e | |||
| a817e2b046 | |||
| 3539e36bde | |||
| 6930a9ffe1 | |||
| e0a44502b1 | |||
| 19e1bd5586 | |||
| ea65ba02f1 | |||
| 8406e48009 | |||
| a8f1d11cbc | |||
| f245a4a9c6 | |||
| 597529a16d | |||
| 9b9dba05c2 | |||
| 2d0bbc824f | |||
| fa523c673f | |||
| da1a4ba393 | |||
| e9c9712e52 | |||
| 78561ca659 | |||
| 1ad87383e3 | |||
| c29ac1549f | |||
| f09eeb19e5 | |||
| 4b7b2c78a2 | |||
| 414af6bdb0 | |||
| df7288b275 | |||
| ce80953086 | |||
| d1344dab71 | |||
| 4f2ba4ae76 | |||
| 6bd525c3b4 | |||
| 341671a612 | |||
| 9161aa1556 | |||
| 70e9fbabce | |||
| a2c00deba2 | |||
| 3baee0c14e | |||
| bd279d1ad7 | |||
| 4d55a66307 | |||
| 9481e01581 | |||
| fe91bf9620 | |||
| d812ba379b | |||
| e1cc7f3c83 | |||
| f50d5d2cd1 | |||
| b3987b0ad5 | |||
| 63fbbc5603 | |||
| dabbc31c09 | |||
| 997bfa3dd5 | |||
| 50eca8aab5 | |||
| 6e4a2e4d5e | |||
| 40e9938885 | |||
| 5bc8ca2266 | |||
| 4cdbfbfb86 | |||
| 01575d1f2e | |||
| 8f75c75b80 | |||
| ebc084f9ea | |||
| 626efaf5cf | |||
| 096ce99588 | |||
| 9ce103b31b | |||
| 13cbd84b58 | |||
| a8912dff41 | |||
| 8b773614ba | |||
| d04f04f382 | |||
| 9a44dae4f9 | |||
| 07665dd93e | |||
| eeca17e78b | |||
| f91264bc94 | |||
| 0983e67f21 | |||
| 8e7db87554 | |||
| 9d000b2284 | |||
| e29fd0ca09 | |||
| dcfde5b5e7 | |||
| 871750d6ae | |||
| d6b5dc0830 | |||
| e99a3e8978 | |||
| 13db6d3155 | |||
| f8845d4fc5 | |||
| efcfb00239 | |||
| 1727313e54 | |||
| f6d55d07c1 | |||
| 331bbadaad | |||
| f012c604b8 | |||
| a8f6b0c599 | |||
| e46d73869c | |||
| 6bf413a008 | |||
| 963c3bf212 | |||
| f7071569d9 | |||
| 4e5eadbf82 | |||
| 3af65bf276 | |||
| 0a51c3bdea | |||
| ba6c3a8f38 | |||
| 66e196b5ec | |||
| d4a49d7137 | |||
| 1cca8af54f | |||
| edccac17f6 | |||
| 1d36cad17e | |||
| 08e6d5ba53 | |||
| 6505ee1b35 | |||
| 6338fd58c3 | |||
| e719ae24f0 |
13
.github/workflows/check.yml
vendored
13
.github/workflows/check.yml
vendored
@ -1,25 +1,24 @@
|
||||
name: Markdown check
|
||||
name: Markdown Lint
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '**'
|
||||
paths:
|
||||
- '*.md'
|
||||
- 'develop/*.md'
|
||||
- 'scripts/**.md'
|
||||
- '.markdownlint.jsonc'
|
||||
|
||||
jobs:
|
||||
markdownlint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v9
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v16
|
||||
with:
|
||||
command: config
|
||||
globs: |
|
||||
.markdownlint.jsonc
|
||||
*.md
|
||||
develop/*.md
|
||||
scripts/**.md
|
||||
scripts/**.md
|
||||
|
||||
9
.github/workflows/update-version.yml
vendored
9
.github/workflows/update-version.yml
vendored
@ -3,16 +3,13 @@ name: Update hard-coded version
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
|
||||
update-version:
|
||||
if: >-
|
||||
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
|
||||
contains(github.ref, 'refs/heads/release/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PUSH_TOKEN }}
|
||||
|
||||
@ -25,9 +22,9 @@ jobs:
|
||||
run: echo "${{ env.version }}" > version
|
||||
|
||||
- name: Commit & push changes
|
||||
uses: EndBug/add-and-commit@v8
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
author_name: github-actions[bot]
|
||||
author_email: github-actions[bot]@users.noreply.github.com
|
||||
message: Update hard-coded version to v${{ env.version }}
|
||||
message: Update hard-coded version to ${{ env.version }}
|
||||
add: version
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -12,3 +12,5 @@ tests/puppeteer/node_modules
|
||||
tests/puppeteer/work_directory
|
||||
tests/puppeteer/package.json
|
||||
tests/puppeteer/package-lock.json
|
||||
scripts/sdkjs_common/jsdoc/node_modules
|
||||
scripts/sdkjs_common/jsdoc/package-lock.json
|
||||
|
||||
@ -196,9 +196,8 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
|
||||
**Note**: The created database must have **onlyoffice** both for user and password.
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
|
||||
```
|
||||
|
||||
3. Configure the database:
|
||||
|
||||
@ -27,7 +27,7 @@ parser.add_option("--db-pass", action="store", type="string", dest="db-pass", de
|
||||
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
|
||||
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
|
||||
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
|
||||
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
|
||||
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")
|
||||
|
||||
@ -61,7 +61,6 @@ Clone development modules to the work dir
|
||||
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
|
||||
|
||||
```bash
|
||||
cd ../..
|
||||
git clone https://github.com/ONLYOFFICE/sdkjs.git
|
||||
git clone https://github.com/ONLYOFFICE/web-apps.git
|
||||
git clone https://github.com/ONLYOFFICE/server.git
|
||||
@ -75,14 +74,15 @@ along with the relative paths to the required folders.
|
||||
The folders `sdkjs` and `web-apps` are required for proper development workflow.
|
||||
The folders `server` is optional
|
||||
|
||||
**Note**: Run command with the current working directory
|
||||
containing `sdkjs`, `web-apps`...
|
||||
|
||||
**Note**: ONLYOFFICE server uses port 80.
|
||||
Look for another application using port 80 and stop it
|
||||
|
||||
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
|
||||
and takes 20 minutes with `server`
|
||||
|
||||
**Note**: Run command from work dir with development modules
|
||||
|
||||
### docker run on Windows (PowerShell)
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
|
||||
@ -10,15 +10,17 @@ import package_utils as utils
|
||||
# parse
|
||||
parser = argparse.ArgumentParser(description="Build packages.")
|
||||
parser.add_argument("-P", "--platform", dest="platform", type=str,
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-V", "--version", dest="version", type=str,
|
||||
action="store", help="Defines version")
|
||||
action="store", help="Defines version")
|
||||
parser.add_argument("-B", "--build", dest="build", type=str,
|
||||
action="store", help="Defines build")
|
||||
action="store", help="Defines build")
|
||||
parser.add_argument("-H", "--branch", dest="branch", type=str,
|
||||
action="store", help="Defines branch")
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
args = parser.parse_args()
|
||||
|
||||
# vars
|
||||
@ -29,8 +31,16 @@ common.targets = args.targets
|
||||
common.clean = "clean" in args.targets
|
||||
common.sign = "sign" in args.targets
|
||||
common.deploy = "deploy" in args.targets
|
||||
common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0")
|
||||
common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0")
|
||||
if args.version: common.version = args.version
|
||||
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
|
||||
utils.set_env("PRODUCT_VERSION", common.version)
|
||||
utils.set_env("BUILD_VERSION", common.version)
|
||||
if args.build: common.build = args.build
|
||||
else: common.build = utils.get_env("BUILD_NUMBER", "0")
|
||||
utils.set_env("BUILD_NUMBER", common.build)
|
||||
if args.branch: common.branch = args.branch
|
||||
else: common.branch = utils.get_env("BRANCH_NAME", "null")
|
||||
utils.set_env("BRANCH_NAME", common.branch)
|
||||
common.branding = args.branding
|
||||
common.timestamp = utils.get_timestamp()
|
||||
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
|
||||
|
||||
114
scripts/base.py
114
scripts/base.py
@ -39,6 +39,9 @@ def is_os_arm():
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_platform():
|
||||
return platform.machine().lower()
|
||||
|
||||
def is_python_64bit():
|
||||
return (struct.calcsize("P") == 8)
|
||||
|
||||
@ -384,7 +387,7 @@ def cmd2(prog, args=[], is_no_errors=False):
|
||||
sys.exit("Error (" + prog + "): " + str(ret))
|
||||
return ret
|
||||
|
||||
def cmd_exe(prog, args):
|
||||
def cmd_exe(prog, args, is_no_errors=False):
|
||||
prog_dir = os.path.dirname(prog)
|
||||
env_dir = os.environ
|
||||
if ("linux" == host_platform()):
|
||||
@ -406,7 +409,7 @@ def cmd_exe(prog, args):
|
||||
command += (" \"" + arg + "\"")
|
||||
process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir)
|
||||
ret = process.wait()
|
||||
if ret != 0:
|
||||
if ret != 0 and True != is_no_errors:
|
||||
sys.exit("Error (" + prog + "): " + str(ret))
|
||||
return ret
|
||||
|
||||
@ -426,12 +429,13 @@ def cmd_and_return_cwd(prog, args=[], is_no_errors=False):
|
||||
|
||||
def run_command(sCommand):
|
||||
popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
result = {'stdout' : '', 'stderr' : ''}
|
||||
result = {'stdout' : '', 'stderr' : '', 'returncode' : 0}
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
result['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
|
||||
result['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
|
||||
result['returncode'] = popen.returncode
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
@ -496,12 +500,37 @@ def set_cwd(dir):
|
||||
return
|
||||
|
||||
# git ---------------------------------------------------
|
||||
def git_get_origin():
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(get_script_dir() + "/../")
|
||||
ret = run_command("git config --get remote.origin.url")["stdout"]
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def git_is_ssh():
|
||||
git_protocol = config.option("git-protocol")
|
||||
if (git_protocol == "https"):
|
||||
return False
|
||||
if (git_protocol == "ssh"):
|
||||
return True
|
||||
origin = git_get_origin()
|
||||
if (git_protocol == "auto") and (origin.find(":ONLYOFFICE/") != -1):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_ssh_base_url():
|
||||
cur_origin = git_get_origin()
|
||||
ind = cur_origin.find(":ONLYOFFICE/")
|
||||
if (ind == -1):
|
||||
return "git@github.com:ONLYOFFICE/"
|
||||
return cur_origin[:ind+12]
|
||||
|
||||
def git_update(repo, is_no_errors=False, is_current_dir=False, git_owner=""):
|
||||
print("[git] update: " + repo)
|
||||
owner = git_owner if git_owner else "ONLYOFFICE"
|
||||
url = "https://github.com/" + owner + "/" + repo + ".git"
|
||||
if config.option("git-protocol") == "ssh":
|
||||
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
|
||||
if git_is_ssh():
|
||||
url = get_ssh_base_url() + repo + ".git"
|
||||
folder = get_script_dir() + "/../../" + repo
|
||||
if is_current_dir:
|
||||
folder = repo
|
||||
@ -570,8 +599,8 @@ def get_branding_repositories(checker):
|
||||
def create_pull_request(branches_to, repo, is_no_errors=False, is_current_dir=False):
|
||||
print("[git] create pull request: " + repo)
|
||||
url = "https://github.com/ONLYOFFICE/" + repo + ".git"
|
||||
if config.option("git-protocol") == "ssh":
|
||||
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
|
||||
if git_is_ssh():
|
||||
url = get_ssh_base_url() + repo + ".git"
|
||||
folder = get_script_dir() + "/../../" + repo
|
||||
if is_current_dir:
|
||||
folder = repo
|
||||
@ -1046,15 +1075,15 @@ def web_apps_addons_param():
|
||||
def download(url, dst):
|
||||
return cmd_exe("curl", ["-L", "-o", dst, url])
|
||||
|
||||
def extract(src, dst):
|
||||
def extract(src, dst, is_no_errors=False):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
|
||||
return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors)
|
||||
|
||||
def extract_unicode(src, dst):
|
||||
def extract_unicode(src, dst, is_no_errors=False):
|
||||
if "windows" == host_platform():
|
||||
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
|
||||
return
|
||||
return extract(src, dst)
|
||||
return extract(src, dst, is_no_errors)
|
||||
|
||||
def archive_folder(src, dst):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
@ -1225,7 +1254,12 @@ def mac_correct_rpath_docbuilder(dir):
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True)
|
||||
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
|
||||
mac_correct_rpath_library("docbuilder.c", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@loader_path", "libdocbuilder.c.dylib"], True)
|
||||
|
||||
def add_loader_path_to_rpath(libs):
|
||||
for lib in libs:
|
||||
cmd("install_name_tool", ["-add_rpath", "@loader_path", "lib" + lib + ".dylib"], True)
|
||||
|
||||
add_loader_path_to_rpath(["icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "docbuilder.c"])
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
@ -1271,7 +1305,7 @@ def linux_set_origin_rpath_libraries(dir, libs):
|
||||
return
|
||||
|
||||
def linux_correct_rpath_docbuilder(dir):
|
||||
linux_set_origin_rpath_libraries(dir, ["docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so"])
|
||||
linux_set_origin_rpath_libraries(dir, ["docbuilder.jni.so", "docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so"])
|
||||
return
|
||||
|
||||
def common_check_version(name, good_version, clean_func):
|
||||
@ -1327,7 +1361,7 @@ def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=Fal
|
||||
git_dir = __file__script__path__ + "/../.."
|
||||
if False:
|
||||
# old version
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
|
||||
copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
|
||||
return
|
||||
src_dir_path = git_dir + "/onlyoffice.github.io/store/plugin"
|
||||
name = "marketplace"
|
||||
@ -1354,7 +1388,7 @@ def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, i
|
||||
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
|
||||
plugins_list_config = config.option("sdkjs-plugin")
|
||||
if isXp:
|
||||
plugins_list_config="photoeditor, macros, drawio, highlightcode, doc2md"
|
||||
plugins_list_config="photoeditor, macros, highlightcode, doc2md"
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
plugins_list = plugins_list_config.rsplit(", ")
|
||||
@ -1743,3 +1777,53 @@ def check_tools():
|
||||
create_dir(directory + "/qt")
|
||||
cmd("python", [directory + "/arm/build_qt.py", "--arch", "arm64", directory + "/qt/arm64"])
|
||||
return
|
||||
|
||||
def apply_patch(file, patch):
|
||||
patch_content = readFile(patch)
|
||||
index1 = patch_content.find("<<<<<<<")
|
||||
index2 = patch_content.find("=======")
|
||||
index3 = patch_content.find(">>>>>>>")
|
||||
file_content_old = patch_content[index1 + 7:index2].strip()
|
||||
file_content_new = patch_content[index2 + 7:index3].strip()
|
||||
#file_content_new = "\n#if 0" + file_content_old + "#else" + file_content_new + "#endif\n"
|
||||
replaceInFile(file, file_content_old, file_content_new)
|
||||
return
|
||||
|
||||
def get_autobuild_version(product, platform="", branch="", build=""):
|
||||
download_platform = platform
|
||||
if ("" == download_platform):
|
||||
osType = get_platform()
|
||||
isArm = True if (-1 != osType.find("arm")) else False
|
||||
is64 = True if (osType.endswith("64")) else False
|
||||
|
||||
if ("windows" == host_platform()):
|
||||
download_platform = "win-"
|
||||
elif ("linux" == host_platform()):
|
||||
download_platform = "linux-"
|
||||
else:
|
||||
download_platform = "mac-"
|
||||
|
||||
download_platform += ("arm" if isArm else "")
|
||||
download_platform += ("64" if is64 else "32")
|
||||
else:
|
||||
download_platform = download_platform.replace("_", "-")
|
||||
|
||||
download_build = build
|
||||
if ("" == download_build):
|
||||
download_build = "latest"
|
||||
|
||||
download_branch = branch
|
||||
if ("" == download_branch):
|
||||
download_branch = "develop"
|
||||
|
||||
download_addon = download_branch + "/" + download_build + "/" + product + "-" + download_platform + ".7z"
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/archive/" + download_addon
|
||||
|
||||
def create_x2t_js_cache(dir, product):
|
||||
if is_file(dir + "/libdoctrenderer.dylib") and (os.path.getsize(dir + "/libdoctrenderer.dylib") < 5*1024*1024):
|
||||
return
|
||||
|
||||
if (product in ["builder", "server"]):
|
||||
cmd_in_dir(dir, "./x2t", ["-create-js-cache"], True)
|
||||
cmd_in_dir(dir, "./x2t", ["-create-js-snapshots"], True)
|
||||
return
|
||||
|
||||
@ -146,15 +146,25 @@ def build_sdk_native(directory, minimize=True):
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
|
||||
return
|
||||
|
||||
|
||||
def build_sdkjs_develop(root_dir):
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
|
||||
|
||||
def build_js_develop(root_dir):
|
||||
#_run_npm_cli(root_dir + "/sdkjs/build")
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
build_sdkjs_develop(root_dir)
|
||||
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build")
|
||||
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
|
||||
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
|
||||
|
||||
@ -46,4 +46,16 @@ def make(solution=""):
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
|
||||
# build Java docbuilder wrapper
|
||||
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
# build JNI library
|
||||
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
|
||||
# build Java code to JAR
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
|
||||
|
||||
return
|
||||
|
||||
@ -49,7 +49,6 @@ cpp_flags = [
|
||||
|
||||
"-DUCONFIG_NO_COLLATION=0",
|
||||
"-DUCONFIG_NO_FORMATTING=0",
|
||||
"-DUCONFIG_NO_LEGACY_CONVERSION=1",
|
||||
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
|
||||
"-DUCONFIG_NO_TRANSLITERATION=0",
|
||||
|
||||
|
||||
@ -27,8 +27,16 @@ def make():
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
|
||||
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
|
||||
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
|
||||
# patches
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
|
||||
|
||||
# no tls realization (remove if socket.io fix this)
|
||||
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"
|
||||
|
||||
@ -76,9 +76,12 @@ def make():
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
|
||||
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
|
||||
# python wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
|
||||
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
|
||||
# java wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
|
||||
|
||||
# app
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
|
||||
@ -120,6 +123,8 @@ def make():
|
||||
if (0 == platform.find("mac")):
|
||||
base.mac_correct_rpath_x2t(root_dir)
|
||||
base.mac_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
base.create_x2t_js_cache(root_dir, "builder")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -39,6 +39,7 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
|
||||
|
||||
@ -61,13 +62,14 @@ def make():
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
|
||||
|
||||
# js cache
|
||||
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
|
||||
base.create_x2t_js_cache(archive_dir, "core")
|
||||
base.delete_file(archive_dir + "/DoctRenderer.config")
|
||||
|
||||
# dictionaries
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
|
||||
|
||||
if base.is_file(archive_dir + ".7z"):
|
||||
base.delete_file(archive_dir + ".7z")
|
||||
base.archive_folder(archive_dir + "/*", archive_dir + ".7z")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -260,6 +260,8 @@ def make():
|
||||
if isUseJSC:
|
||||
base.delete_file(root_dir + "/converter/icudtl.dat")
|
||||
|
||||
base.create_x2t_js_cache(root_dir + "/converter", "desktop")
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.delete_file(root_dir + "/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/libcef.lib")
|
||||
|
||||
@ -114,10 +114,13 @@ def make():
|
||||
js_dir = root_dir
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
|
||||
base.copy_file(js_dir + "/web-apps/apps/api/documents/api.js", js_dir + "/web-apps/apps/api/documents/api.js.tpl")
|
||||
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
|
||||
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
|
||||
base.delete_file(file)
|
||||
|
||||
base.create_x2t_js_cache(converter_dir, "server")
|
||||
|
||||
# add embed worker code
|
||||
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
|
||||
|
||||
|
||||
@ -5,9 +5,6 @@ import base
|
||||
import os
|
||||
import json
|
||||
|
||||
def get_core_url(arch, branch):
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
|
||||
|
||||
def make():
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
old_cur = os.getcwd()
|
||||
@ -18,16 +15,10 @@ def make():
|
||||
|
||||
os.chdir(work_dir)
|
||||
|
||||
arch = "x64"
|
||||
arch2 = "_64"
|
||||
if ("windows" == base.host_platform()) and not base.host_platform_is64():
|
||||
arch = "x86"
|
||||
arch2 = "_32"
|
||||
|
||||
url = get_core_url(arch, config.option("branch"))
|
||||
url = base.get_autobuild_version("core", "", config.option("branch"))
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
if (data_url == "" and config.option("branch") != "develop"):
|
||||
url = get_core_url(arch, "develop")
|
||||
url = base.get_autobuild_version("core", "", "develop")
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
|
||||
old_data_url = base.readFile("./core.7z.data")
|
||||
@ -49,12 +40,6 @@ def make():
|
||||
base.extract("./core.7z", "./")
|
||||
base.writeFile("./core.7z.data", data_url)
|
||||
|
||||
platform = ""
|
||||
if ("windows" == base.host_platform()):
|
||||
platform = "win" + arch2
|
||||
else:
|
||||
platform = base.host_platform() + arch2
|
||||
|
||||
base.copy_files("./core/*", "./")
|
||||
else:
|
||||
print("-----------------------------------------------------------")
|
||||
|
||||
@ -904,7 +904,16 @@ def install_gruntcli():
|
||||
|
||||
def install_mysqlserver():
|
||||
if (host_platform == 'windows'):
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
|
||||
program_files = os.environ["ProgramFiles(x86)"]
|
||||
mysql_installer_path = f"\"{program_files}\MySQL\MySQL Installer for Windows\MySQLInstallerConsole.exe\""
|
||||
mysql_installer_params = " community install server;"
|
||||
mysql_installer_params += "8.0.21" + ";"
|
||||
mysql_installer_params += "x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid="
|
||||
mysql_installer_params += "3306" + ';'
|
||||
mysql_installer_params += "enable_tcpip=true;port=" + "3306" + ";"
|
||||
mysql_installer_params += "rootpasswd=" + "onlyoffice"
|
||||
mysql_installer_params += " -silent"
|
||||
base.cmd2(mysql_installer_path, [mysql_installer_params])
|
||||
elif (host_platform == 'linux'):
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
|
||||
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
|
||||
|
||||
@ -49,7 +49,18 @@ def run_integration_example():
|
||||
|
||||
def start_linux_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
|
||||
|
||||
|
||||
def update_config(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
|
||||
def make_start():
|
||||
base.configure_common_apps()
|
||||
|
||||
@ -64,15 +75,8 @@ def make_start():
|
||||
start_linux_services()
|
||||
|
||||
def make_configure(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
base.print_info('Build modules')
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
update_config(args)
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
|
||||
def make_install():
|
||||
platform = base.host_platform()
|
||||
|
||||
@ -30,6 +30,59 @@ if utils.is_macos():
|
||||
builder_product_name = "Document Builder"
|
||||
|
||||
if utils.is_linux():
|
||||
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
|
||||
builder_make_targets = ["deb", "rpm"] # tar
|
||||
server_make_targets = ["deb", "rpm", "tar"]
|
||||
builder_make_targets = [
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "tar/*.tar*",
|
||||
"dst": "builder/linux/generic/"
|
||||
},
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "builder/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/build/RPMS/*/*.rpm",
|
||||
"dst": "builder/linux/rhel/"
|
||||
}
|
||||
]
|
||||
desktop_make_targets = [
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "tar/*.tar*",
|
||||
"dst": "desktop/linux/generic/"
|
||||
},
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "desktop/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "rpm-suse",
|
||||
"src": "rpm-suse/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/suse/"
|
||||
}
|
||||
]
|
||||
server_make_targets = [
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "server/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/builddir/RPMS/*/*.rpm",
|
||||
"dst": "server/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "*.tar*",
|
||||
"dst": "server/linux/snap/"
|
||||
}
|
||||
]
|
||||
|
||||
@ -7,14 +7,17 @@ import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("BUILDER")
|
||||
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_archive()
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif utils.is_macos():
|
||||
make_macos()
|
||||
elif utils.is_linux():
|
||||
make_linux()
|
||||
else:
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def s3_upload(files, dst):
|
||||
@ -29,6 +32,37 @@ def s3_upload(files, dst):
|
||||
ret &= upload
|
||||
return ret
|
||||
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
utils.log_h2("builder archive build")
|
||||
utils.delete_file("builder.7z")
|
||||
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("builder archive build", ret)
|
||||
|
||||
utils.log_h2("builder archive deploy")
|
||||
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("builder archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.add_deploy_data(dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_windows():
|
||||
global inno_file, zip_file, suffix, key_prefix
|
||||
utils.set_cwd("document-builder-package")
|
||||
@ -137,7 +171,7 @@ def make_linux():
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
utils.log_h2("builder build")
|
||||
make_args = branding.builder_make_targets
|
||||
make_args = [t["make"] for t in branding.builder_make_targets]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -146,32 +180,10 @@ def make_linux():
|
||||
utils.set_summary("builder build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.log_h2("builder tar deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("tar/*.tar.gz"),
|
||||
"builder/linux/generic/")
|
||||
utils.set_summary("builder tar deploy", ret)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.log_h2("builder deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"builder/linux/debian/")
|
||||
utils.set_summary("builder deb deploy", ret)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.log_h2("builder rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
|
||||
"builder/linux/rhel/")
|
||||
utils.set_summary("builder rpm deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.set_summary("builder tar deploy", False)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.set_summary("builder deb deploy", False)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.set_summary("builder rpm deploy", False)
|
||||
for t in branding.builder_make_targets:
|
||||
utils.log_h2("builder " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("builder " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -10,47 +10,38 @@ def make():
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_core()
|
||||
make_archive()
|
||||
return
|
||||
|
||||
def make_core():
|
||||
prefix = common.platformPrefixes[common.platform]
|
||||
company = branding.company_name.lower()
|
||||
repos = {
|
||||
"windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build },
|
||||
"windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build },
|
||||
"darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
"darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build },
|
||||
"linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
}
|
||||
repo = repos[common.platform]
|
||||
branch = utils.get_env("BRANCH_NAME")
|
||||
core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company))
|
||||
dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"])
|
||||
dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"])
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
if branch is None:
|
||||
utils.log_err("BRANCH_NAME variable is undefined")
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
if not utils.is_file(core_7z):
|
||||
utils.log_err("file not exist: " + core_7z)
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
utils.log_h2("core archive build")
|
||||
utils.delete_file("core.7z")
|
||||
args = ["7z", "a", "-y", "core.7z", "./core/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("core archive build", ret)
|
||||
|
||||
utils.log_h2("core deploy")
|
||||
utils.log_h2("core archive deploy")
|
||||
dest = "core-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
core_7z,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z")
|
||||
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("core archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z")
|
||||
utils.add_deploy_data(dest_version + "/core.7z")
|
||||
ret = utils.s3_sync(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version + "/",
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest + "/",
|
||||
delete=True)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z")
|
||||
utils.set_summary("core deploy", ret)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.add_deploy_data(dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def deploy_closuremaps_sdkjs(license):
|
||||
|
||||
@ -329,7 +329,7 @@ def make_linux():
|
||||
utils.set_cwd("desktop-apps/win-linux/package/linux")
|
||||
|
||||
utils.log_h2("desktop build")
|
||||
make_args = branding.desktop_make_targets
|
||||
make_args = [t["make"] for t in branding.desktop_make_targets]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -337,68 +337,11 @@ def make_linux():
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("desktop build", ret)
|
||||
|
||||
rpm_arch = "*"
|
||||
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
utils.log_h2("desktop tar deploy")
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
ret = s3_upload(
|
||||
utils.glob_path("tar/*.tar*"),
|
||||
"desktop/linux/generic/")
|
||||
utils.set_summary("desktop tar deploy", ret)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"desktop/linux/debian/")
|
||||
utils.set_summary("desktop deb deploy", ret)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb-astra deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb-astra/*.deb"),
|
||||
"desktop/linux/astra/")
|
||||
utils.set_summary("desktop deb-astra deploy", ret)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/rhel/")
|
||||
utils.set_summary("desktop rpm deploy", ret)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop suse-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/suse/")
|
||||
utils.set_summary("desktop suse-rpm deploy", ret)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop apt-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/altlinux/")
|
||||
utils.set_summary("desktop apt-rpm deploy", ret)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop urpmi deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/rosa/")
|
||||
utils.set_summary("desktop urpmi deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop tar deploy", False)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb deploy", False)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb-astra deploy", False)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop rpm deploy", False)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop suse-rpm deploy", False)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop apt-rpm deploy", False)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop urpmi deploy", False)
|
||||
for t in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("desktop " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -61,7 +61,8 @@ def make_linux(edition):
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
|
||||
make_args = [t["make"] for t in branding.server_make_targets]
|
||||
make_args += ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -70,40 +71,10 @@ def make_linux(edition):
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"server/linux/debian/")
|
||||
utils.set_summary("server " + edition + " deb deploy", ret)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
|
||||
"server/linux/rhel/")
|
||||
utils.set_summary("server " + edition + " rpm deploy", ret)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " apt-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"),
|
||||
"server/linux/altlinux/")
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " snap deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("*.tar.gz"),
|
||||
"server/linux/snap/")
|
||||
utils.set_summary("server " + edition + " snap deploy", ret)
|
||||
else:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " deb deploy", False)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " rpm deploy", False)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", False)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " snap deploy", False)
|
||||
for t in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -385,15 +385,13 @@ def s3_upload(src, dst, **kwargs):
|
||||
ret = sh(" ".join(args), verbose=True)
|
||||
return ret
|
||||
|
||||
def s3_sync(src, dst, **kwargs):
|
||||
def s3_copy(src, dst, **kwargs):
|
||||
args = ["aws"]
|
||||
if kwargs.get("endpoint_url"):
|
||||
args += ["--endpoint-url", kwargs["endpoint_url"]]
|
||||
args += ["s3", "sync", "--no-progress"]
|
||||
args += ["s3", "cp", "--no-progress"]
|
||||
if kwargs.get("acl"):
|
||||
args += ["--acl", kwargs["acl"]]
|
||||
if kwargs.get("delete") and kwargs["delete"]:
|
||||
args += ["--delete"]
|
||||
args += [src, dst]
|
||||
if is_windows():
|
||||
ret = cmd(*args, verbose=True)
|
||||
|
||||
@ -29,7 +29,7 @@ def check_support_platform(platform):
|
||||
return False
|
||||
return True
|
||||
|
||||
def make(platform, project, qmake_config_addon=""):
|
||||
def make(platform, project, qmake_config_addon="", is_no_errors=False):
|
||||
# check platform
|
||||
if not check_support_platform(platform):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
@ -100,7 +100,7 @@ def make(platform, project, qmake_config_addon=""):
|
||||
base.cmd_and_return_cwd("make", distclean_params, True)
|
||||
base.cmd(qmake_app, build_params)
|
||||
base.correct_makefile_after_qmake(platform, makefile)
|
||||
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num())
|
||||
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
|
||||
else:
|
||||
config_params_array = base.qt_config_as_param(config_param)
|
||||
config_params_string = ""
|
||||
@ -123,7 +123,7 @@ def make(platform, project, qmake_config_addon=""):
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefile)
|
||||
base.run_as_bat(qmake_bat)
|
||||
base.run_as_bat(qmake_bat, is_no_errors)
|
||||
|
||||
if (base.is_file(stash_file)):
|
||||
base.delete_file(stash_file)
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import argparse
|
||||
|
||||
def readFile(path):
|
||||
with open(path, "r", errors='replace') as file:
|
||||
filedata = file.read()
|
||||
@ -179,7 +181,7 @@ class EditorApi(object):
|
||||
|
||||
def generate(self):
|
||||
for file in self.files:
|
||||
file_content = readFile(file)
|
||||
file_content = readFile(f'{sdkjs_dir}/{file}')
|
||||
arrRecords = file_content.split("/**")
|
||||
arrRecords = arrRecords[1:-1]
|
||||
for record in arrRecords:
|
||||
@ -187,8 +189,8 @@ class EditorApi(object):
|
||||
self.numfile += 1
|
||||
correctContent = ''.join(self.records)
|
||||
correctContent += "\n"
|
||||
os.mkdir('deploy/api_builder/' + self.folder)
|
||||
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
|
||||
os.mkdir(args.destination + self.folder)
|
||||
writeFile(args.destination + self.folder + "/api.js", correctContent)
|
||||
return
|
||||
|
||||
def convert_to_interface(arrFiles, sEditorType):
|
||||
@ -197,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType):
|
||||
editor.generate()
|
||||
return
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir("../../../sdkjs")
|
||||
if True == os.path.isdir('deploy/api_builder'):
|
||||
shutil.rmtree('deploy/api_builder', ignore_errors=True)
|
||||
os.mkdir('deploy/api_builder')
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
sdkjs_dir = "../../../sdkjs"
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
old_cur = os.getcwd()
|
||||
|
||||
if True == os.path.isdir(args.destination):
|
||||
shutil.rmtree(args.destination, ignore_errors=True)
|
||||
os.mkdir(args.destination)
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
|
||||
|
||||
|
||||
80
scripts/sdkjs_common/jsdoc/README.md
Normal file
80
scripts/sdkjs_common/jsdoc/README.md
Normal file
@ -0,0 +1,80 @@
|
||||
|
||||
# Documentation Generation Guide
|
||||
|
||||
This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
|
||||
|
||||
## Requirements
|
||||
|
||||
```bash
|
||||
Node.js v20 and above
|
||||
Python v3.10 and above
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
cd build_tools/scripts/sdkjs_common/jsdoc
|
||||
npm install
|
||||
```
|
||||
|
||||
## Scripts Overview
|
||||
|
||||
### `generate_docs_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api`.
|
||||
|
||||
### `generate_docs_plugins_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `api_plugins.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_plugins_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api-plugins`.
|
||||
|
||||
### `generate_docs_md.py`
|
||||
|
||||
This script generates Markdown documentation from the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_md.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `../../../../office-js-api/`.
|
||||
|
||||
## Example
|
||||
|
||||
To generate JSON documentation with the default output path:
|
||||
```bash
|
||||
python generate_docs_json.py /path/to/save/json
|
||||
```
|
||||
|
||||
To generate JSON documentation with the default output path:
|
||||
```bash
|
||||
python generate_docs_plugins_json.py /path/to/save/json
|
||||
```
|
||||
|
||||
To generate Markdown documentation and specify a custom output path:
|
||||
```bash
|
||||
python generate_docs_md.py /path/to/save/markdown
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
|
||||
- The output directories will be created if they do not exist.
|
||||
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/cell.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/cell.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
216
scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js
Normal file
216
scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js
Normal file
@ -0,0 +1,216 @@
|
||||
exports.handlers = {
|
||||
processingComplete: function(e) {
|
||||
// array for filtered doclets
|
||||
let filteredDoclets = [];
|
||||
|
||||
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
|
||||
|
||||
const classesDocletsMap = {}; // doclets for classes write at the end
|
||||
let passedClasses = []; // passed classes for current editor
|
||||
|
||||
// Remove dublicates doclets
|
||||
const latestDoclets = {};
|
||||
e.doclets.forEach(doclet => {
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
|
||||
latestDoclets[doclet.longname] = doclet;
|
||||
}
|
||||
});
|
||||
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
|
||||
|
||||
// check available classess for current editor
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAdd =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' &&
|
||||
(!isMethod || hasTypeofEditorsTag);
|
||||
|
||||
if (shouldAdd) {
|
||||
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
passedClasses.push(cleanName(doclet.memberof));
|
||||
}
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
classesDocletsMap[cleanName(doclet.name)] = doclet;
|
||||
}
|
||||
}
|
||||
|
||||
// remove unavailave classes in current editor
|
||||
passedClasses = passedClasses.filter(className => {
|
||||
const doclet = classesDocletsMap[className];
|
||||
if (!doclet) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
|
||||
|
||||
// class is passes if there is no editor tag or the current editor is among the tags
|
||||
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
|
||||
return isPassed;
|
||||
});
|
||||
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
if (shouldAddMethod) {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We leave only the necessary fields
|
||||
doclet.memberof = cleanName(doclet.memberof);
|
||||
doclet.longname = cleanName(doclet.longname);
|
||||
doclet.name = cleanName(doclet.name);
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
memberof: cleanName(doclet.memberof),
|
||||
|
||||
params: doclet.params ? doclet.params.map(param => ({
|
||||
type: param.type ? {
|
||||
names: param.type.names,
|
||||
parsedType: param.type.parsedType
|
||||
} : param.type,
|
||||
|
||||
name: param.name,
|
||||
description: param.description,
|
||||
optional: param.optional,
|
||||
defaultvalue: param.defaultvalue
|
||||
})) : doclet.params,
|
||||
|
||||
returns: doclet.returns ? doclet.returns.map(returnObj => ({
|
||||
type: {
|
||||
names: returnObj.type.names,
|
||||
parsedType: returnObj.type.parsedType
|
||||
}
|
||||
})) : doclet.returns,
|
||||
|
||||
name: doclet.name,
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: doclet.scope,
|
||||
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
see: doclet.see
|
||||
};
|
||||
|
||||
// Add the filtered doclet to the array
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.name))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
augments: doclet.augments || undefined,
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
see: doclet.see || undefined
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'typedef') {
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
see: doclet.see,
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace doclets with a filtered array
|
||||
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
|
||||
}
|
||||
};
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/forms.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/forms.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/slide.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/slide.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/word.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/word.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/cell.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/cell.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/cell/api_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/common.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/common.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
85
scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js
Normal file
85
scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js
Normal file
@ -0,0 +1,85 @@
|
||||
exports.handlers = {
|
||||
processingComplete: function(e) {
|
||||
const filteredDoclets = [];
|
||||
|
||||
function checkNullProps(oDoclet) {
|
||||
for (let key of Object.keys(oDoclet)) {
|
||||
if (oDoclet[key] == null) {
|
||||
delete oDoclet[key];
|
||||
}
|
||||
if (typeof(oDoclet[key]) == "object") {
|
||||
checkNullProps(oDoclet[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
if (true == doclet.undocumented || doclet.kind == 'package') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
kind: doclet.kind,
|
||||
since: doclet.since,
|
||||
name: doclet.name,
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type,
|
||||
|
||||
description: doclet.description,
|
||||
memberof: doclet.memberof,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
longname: doclet.longname,
|
||||
scope: doclet.scope,
|
||||
alias: doclet.alias,
|
||||
|
||||
params: doclet.params ? doclet.params.map(param => ({
|
||||
type: param.type ? {
|
||||
names: param.type.names,
|
||||
parsedType: param.type.parsedType
|
||||
} : param.type,
|
||||
|
||||
name: param.name,
|
||||
description: param.description,
|
||||
optional: param.optional,
|
||||
defaultvalue: param.defaultvalue
|
||||
})) : doclet.params,
|
||||
|
||||
returns: doclet.returns ? doclet.returns.map(returnObj => ({
|
||||
type: {
|
||||
names: returnObj.type.names,
|
||||
parsedType: returnObj.type.parsedType
|
||||
}
|
||||
})) : doclet.returns,
|
||||
see: doclet.see
|
||||
};
|
||||
|
||||
checkNullProps(filteredDoclet)
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
|
||||
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
|
||||
}
|
||||
};
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/forms.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/forms.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs-forms/apiPlugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/slide.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/slide.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/slide/api_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/word.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/word.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
110
scripts/sdkjs_common/jsdoc/generate_docs_json.py
Normal file
110
scripts/sdkjs_common/jsdoc/generate_docs_json.py
Normal file
@ -0,0 +1,110 @@
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
import platform
|
||||
|
||||
root = '../../../..'
|
||||
|
||||
# Configuration files
|
||||
configs = [
|
||||
"./config/builder/word.json",
|
||||
"./config/builder/cell.json",
|
||||
"./config/builder/slide.json",
|
||||
"./config/builder/forms.json"
|
||||
]
|
||||
|
||||
editors_maps = {
|
||||
"word": "CDE",
|
||||
"cell": "CSE",
|
||||
"slide": "CPE",
|
||||
"forms": "CFE"
|
||||
}
|
||||
|
||||
def generate(output_dir, md=False):
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# Generate JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
command_set_env = "export"
|
||||
if (platform.system().lower() == "windows"):
|
||||
command_set_env = "set"
|
||||
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
|
||||
print(f"Generating {editor_name}.json: {command}")
|
||||
subprocess.run(command, shell=True)
|
||||
|
||||
# Append examples to JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
|
||||
# Read the JSON file
|
||||
with open(output_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Modify JSON data
|
||||
for doclet in data:
|
||||
if 'see' in doclet:
|
||||
if doclet['see'] is not None:
|
||||
if editor_name == 'forms':
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
|
||||
else:
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
|
||||
|
||||
file_path = f'{root}/' + doclet['see'][0]
|
||||
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as see_file:
|
||||
example_content = see_file.read()
|
||||
|
||||
# Extract the first line as a comment if it exists
|
||||
lines = example_content.split('\n')
|
||||
if lines[0].startswith('//'):
|
||||
comment = lines[0] + '\n'
|
||||
code_content = '\n'.join(lines[1:])
|
||||
else:
|
||||
comment = ''
|
||||
code_content = example_content
|
||||
|
||||
if md == True:
|
||||
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
|
||||
|
||||
if md == False:
|
||||
document_type = editor_name
|
||||
if "forms" == document_type:
|
||||
document_type = "pdf"
|
||||
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
|
||||
|
||||
# Write the modified JSON file back
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
print("Documentation generation for builder completed.")
|
||||
|
||||
def remove_builder_lines(text):
|
||||
lines = text.splitlines() # Split text into lines
|
||||
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
|
||||
return "\n".join(filtered_lines)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/office-js-api-declarations/office-js-api"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
277
scripts/sdkjs_common/jsdoc/generate_docs_md.py
Normal file
277
scripts/sdkjs_common/jsdoc/generate_docs_md.py
Normal file
@ -0,0 +1,277 @@
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import generate_docs_json
|
||||
|
||||
# Configuration files
|
||||
editors = [
|
||||
"word",
|
||||
"cell",
|
||||
"slide",
|
||||
"forms"
|
||||
]
|
||||
|
||||
missing_examples = []
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def write_markdown_file(file_path, content):
|
||||
with open(file_path, 'w', encoding='utf-8') as md_file:
|
||||
md_file.write(content)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
def correct_description(string):
|
||||
if string is None:
|
||||
return 'No description provided.'
|
||||
|
||||
# Replace opening <b> tag with **
|
||||
string = re.sub(r'<b>', '**', string)
|
||||
# Replace closing </b> tag with **
|
||||
string = re.sub(r'</b>', '**', string)
|
||||
# Note
|
||||
return re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
|
||||
|
||||
def correct_default_value(value, enumerations, classes):
|
||||
if value is None:
|
||||
return ''
|
||||
|
||||
if value == True:
|
||||
value = "true"
|
||||
elif value == False:
|
||||
value = "false"
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return generate_data_types_markdown([value], enumerations, classes)
|
||||
|
||||
def remove_line_breaks(string):
|
||||
return re.sub(r'[\r\n]', ' ', string)
|
||||
|
||||
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
|
||||
param_types_md = ' | '.join(types)
|
||||
|
||||
for enum in enumerations:
|
||||
if enum['name'] in types:
|
||||
param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)")
|
||||
for cls in classes:
|
||||
if cls in types:
|
||||
param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)")
|
||||
|
||||
def replace_with_links(match):
|
||||
element = match.group(1).strip()
|
||||
base_type = element.split('.')[0] # Take only the first part before the dot, if any
|
||||
if any(enum['name'] == base_type for enum in enumerations):
|
||||
return f"<[{element}]({root}Enumeration/{base_type}.md)>"
|
||||
elif base_type in classes:
|
||||
return f"<[{element}]({root}{base_type}/{base_type}.md)>"
|
||||
return f"<{element}>"
|
||||
|
||||
return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md)
|
||||
|
||||
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
|
||||
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
|
||||
|
||||
content += generate_properties_markdown(properties, enumerations, classes, '../')
|
||||
|
||||
content += "## Methods\n\n"
|
||||
for method in methods:
|
||||
method_name = method['name']
|
||||
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
|
||||
return content
|
||||
|
||||
def generate_method_markdown(method, enumerations, classes):
|
||||
method_name = method['name']
|
||||
description = method.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
params = method.get('params', [])
|
||||
returns = method.get('returns', [])
|
||||
example = method.get('example', '')
|
||||
memberof = method.get('memberof', '')
|
||||
|
||||
content = f"# {method_name}\n\n{description}\n\n"
|
||||
|
||||
# Syntax section
|
||||
param_list = ', '.join([param['name'] for param in params]) if params else ''
|
||||
content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n"
|
||||
if memberof:
|
||||
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
|
||||
|
||||
content += "## Parameters\n\n"
|
||||
|
||||
if params:
|
||||
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
|
||||
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
|
||||
for param in params:
|
||||
param_name = param.get('name', 'Unnamed')
|
||||
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
|
||||
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
|
||||
param_required = "Required" if not param.get('optional') else "Optional"
|
||||
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
|
||||
|
||||
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
|
||||
else:
|
||||
content += "This method doesn't have any parameters.\n"
|
||||
|
||||
content += "\n## Returns\n\n"
|
||||
if returns:
|
||||
return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown'
|
||||
|
||||
# Check for enumerations and classes in return type and add links if they exist
|
||||
return_type_md = generate_data_types_markdown([return_type], enumerations, classes)
|
||||
content += return_type_md
|
||||
else:
|
||||
content += "This method doesn't return any data."
|
||||
|
||||
if example:
|
||||
# Separate comment and code, and remove comment symbols
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = remove_js_comments(comment)
|
||||
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
|
||||
|
||||
return content
|
||||
|
||||
def generate_properties_markdown(properties, enumerations, classes, root='../../'):
|
||||
if (properties is None):
|
||||
return ''
|
||||
|
||||
content = "## Properties\n\n"
|
||||
content += "| Name | Type | Description |\n"
|
||||
content += "| ---- | ---- | ----------- |\n"
|
||||
for prop in properties:
|
||||
prop_name = prop['name']
|
||||
prop_description = prop.get('description', 'No description provided.')
|
||||
prop_description = remove_line_breaks(correct_description(prop_description))
|
||||
param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root)
|
||||
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
|
||||
content += "\n"
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def generate_enumeration_markdown(enumeration, enumerations, classes):
|
||||
enum_name = enumeration['name']
|
||||
description = enumeration.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
example = enumeration.get('example', '')
|
||||
|
||||
content = f"# {enum_name}\n\n{description}\n\n"
|
||||
|
||||
if 'TypeUnion' == enumeration['type']['parsedType']['type']:
|
||||
content += "## Type\n\nEnumeration\n\n"
|
||||
content += "## Values\n\n"
|
||||
elements = enumeration['type']['parsedType']['elements']
|
||||
for element in elements:
|
||||
element_name = element['name'] if element['type'] != 'NullLiteral' else 'null'
|
||||
# Check if element is in enumerations or classes before adding link
|
||||
if any(enum['name'] == element_name for enum in enumerations):
|
||||
content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n"
|
||||
elif element_name in classes:
|
||||
content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n"
|
||||
else:
|
||||
content += f"- {element_name}\n"
|
||||
elif enumeration['properties'] is not None:
|
||||
content += "## Type\n\nObject\n\n"
|
||||
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
|
||||
else:
|
||||
content += "## Type\n\n"
|
||||
types = enumeration['type']['names']
|
||||
for t in types:
|
||||
t = generate_data_types_markdown([t], enumerations, classes)
|
||||
content += t + "\n\n"
|
||||
|
||||
if example:
|
||||
# Separate comment and code, and remove comment symbols
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = remove_js_comments(comment)
|
||||
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
|
||||
|
||||
return content
|
||||
|
||||
def process_doclets(data, output_dir, editor_name):
|
||||
classes = {}
|
||||
classes_props = {}
|
||||
enumerations = []
|
||||
editor_dir = os.path.join(output_dir, editor_name)
|
||||
|
||||
for doclet in data:
|
||||
if doclet['kind'] == 'class':
|
||||
class_name = doclet['name']
|
||||
classes[class_name] = []
|
||||
classes_props[class_name] = doclet.get('properties', None)
|
||||
elif doclet['kind'] == 'function':
|
||||
class_name = doclet.get('memberof')
|
||||
if class_name:
|
||||
if class_name not in classes:
|
||||
classes[class_name] = []
|
||||
classes[class_name].append(doclet)
|
||||
elif doclet['kind'] == 'typedef':
|
||||
enumerations.append(doclet)
|
||||
|
||||
# Process classes
|
||||
for class_name, methods in classes.items():
|
||||
class_dir = os.path.join(editor_dir, class_name)
|
||||
methods_dir = os.path.join(class_dir, 'Methods')
|
||||
os.makedirs(methods_dir, exist_ok=True)
|
||||
|
||||
# Write class file
|
||||
class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes)
|
||||
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
|
||||
|
||||
# Write method files
|
||||
for method in methods:
|
||||
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
|
||||
method_content = generate_method_markdown(method, enumerations, classes)
|
||||
write_markdown_file(method_file_path, method_content)
|
||||
if not method.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(method_file_path, output_dir))
|
||||
|
||||
# Process enumerations
|
||||
enum_dir = os.path.join(editor_dir, 'Enumeration')
|
||||
os.makedirs(enum_dir, exist_ok=True)
|
||||
|
||||
for enum in enumerations:
|
||||
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
|
||||
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
|
||||
write_markdown_file(enum_file_path, enum_content)
|
||||
if not enum.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
|
||||
|
||||
def generate(output_dir):
|
||||
print('Generating Markdown documentation...')
|
||||
|
||||
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
|
||||
for editor_name in editors:
|
||||
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
|
||||
os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True)
|
||||
|
||||
data = load_json(input_file)
|
||||
process_doclets(data, output_dir, editor_name.title())
|
||||
|
||||
shutil.rmtree(output_dir + 'tmp_json')
|
||||
print('Done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../../office-js-api/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
print("START_MISSING_EXAMPLES")
|
||||
print(",".join(missing_examples))
|
||||
print("END_MISSING_EXAMPLES")
|
||||
99
scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py
Normal file
99
scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py
Normal file
@ -0,0 +1,99 @@
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
|
||||
# Configuration files
|
||||
configs = [
|
||||
"./config/plugins/common.json",
|
||||
"./config/plugins/word.json",
|
||||
"./config/plugins/cell.json",
|
||||
"./config/plugins/slide.json",
|
||||
"./config/plugins/forms.json"
|
||||
]
|
||||
|
||||
root = '../../../..'
|
||||
|
||||
def generate(output_dir, md=False):
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# Generate JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
command = f"npx jsdoc -c {config} -X > {output_file}"
|
||||
print(f"Generating {editor_name}.json: {command}")
|
||||
subprocess.run(command, shell=True)
|
||||
|
||||
# Append examples to JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
|
||||
# Read the JSON file
|
||||
with open(output_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Modify JSON data
|
||||
for doclet in data:
|
||||
if 'see' in doclet:
|
||||
if doclet['see'] is not None:
|
||||
if editor_name == 'forms':
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
|
||||
else:
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
|
||||
|
||||
file_path = f'{root}/' + doclet['see'][0]
|
||||
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as see_file:
|
||||
example_content = see_file.read()
|
||||
|
||||
# Extract the first line as a comment if it exists
|
||||
lines = example_content.split('\n')
|
||||
if lines[0].startswith('//'):
|
||||
comment = lines[0] + '\n'
|
||||
code_content = '\n'.join(lines[1:])
|
||||
else:
|
||||
comment = ''
|
||||
code_content = example_content
|
||||
|
||||
doclet['examples'] = [remove_js_comments(comment) + code_content]
|
||||
|
||||
if md == False:
|
||||
document_type = editor_name
|
||||
if "forms" == document_type:
|
||||
document_type = "pdf"
|
||||
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
|
||||
|
||||
# Write the modified JSON file back
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
print("Documentation generation for builder completed.")
|
||||
|
||||
def remove_builder_lines(text):
|
||||
lines = text.splitlines() # Split text into lines
|
||||
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
|
||||
return "\n".join(filtered_lines)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/office-js-api-declarations/office-js-api-plugins"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
7
scripts/sdkjs_common/jsdoc/package.json
Normal file
7
scripts/sdkjs_common/jsdoc/package.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"jsdoc-to-markdown": "7.1.1",
|
||||
"dmd": "6.1.0",
|
||||
"handlebars": "4.7.7"
|
||||
}
|
||||
}
|
||||
@ -14,6 +14,15 @@ def is_exist_in_array(projects, proj):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_full_projects_list(json_data, list):
|
||||
result = []
|
||||
for rec in list:
|
||||
if rec in json_data:
|
||||
result += get_full_projects_list(json_data, json_data[rec])
|
||||
else:
|
||||
result.append(rec)
|
||||
return result
|
||||
|
||||
def adjust_project_params(params):
|
||||
ret_params = params
|
||||
|
||||
@ -86,13 +95,9 @@ def get_projects(pro_json_path, platform):
|
||||
|
||||
# check aliases to modules
|
||||
records_src = data[module]
|
||||
records = []
|
||||
records = get_full_projects_list(data, records_src)
|
||||
|
||||
for rec in records_src:
|
||||
if rec in data:
|
||||
records += data[rec]
|
||||
else:
|
||||
records.append(rec)
|
||||
print(records)
|
||||
|
||||
for rec in records:
|
||||
params = []
|
||||
|
||||
18
sln.json
18
sln.json
@ -1,6 +1,10 @@
|
||||
{
|
||||
"root" : "../",
|
||||
|
||||
"spell" : [
|
||||
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
|
||||
],
|
||||
|
||||
"core" : [
|
||||
|
||||
"core/Common/3dParty/cryptopp/project/cryptopp.pro",
|
||||
@ -23,7 +27,6 @@
|
||||
"core/DocxRenderer/DocxRenderer.pro",
|
||||
|
||||
"core/DesktopEditor/doctrenderer/doctrenderer.pro",
|
||||
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro",
|
||||
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro",
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro",
|
||||
@ -51,15 +54,19 @@
|
||||
|
||||
"[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro",
|
||||
|
||||
"spell",
|
||||
|
||||
"[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro"
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro"
|
||||
|
||||
],
|
||||
|
||||
"builder" : [
|
||||
"core"
|
||||
"core",
|
||||
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro"
|
||||
],
|
||||
|
||||
"server" : [
|
||||
@ -70,13 +77,8 @@
|
||||
"[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro"
|
||||
],
|
||||
|
||||
"spell" : [
|
||||
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
|
||||
],
|
||||
|
||||
"desktop" : [
|
||||
"core",
|
||||
"spell",
|
||||
"multimedia",
|
||||
|
||||
"core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",
|
||||
|
||||
Reference in New Issue
Block a user