mirror of
https://github.com/ONLYOFFICE/build_tools.git
synced 2026-04-07 14:06:31 +08:00
Compare commits
353 Commits
fix/bug-59
...
v99.99.99.
| Author | SHA1 | Date | |
|---|---|---|---|
| 23db442c82 | |||
| 4d4d1612ce | |||
| ded3dfa63c | |||
| 227ecbde99 | |||
| d288d6326c | |||
| 1539c187e3 | |||
| 78df8eb494 | |||
| eebbd513d3 | |||
| 74c02f9d50 | |||
| 25a1e16824 | |||
| 2b07d1aa4d | |||
| 4c76406f8c | |||
| 6fd89057ec | |||
| ae8b77628e | |||
| 959d919d9e | |||
| 3589ea0f60 | |||
| ad23ee2803 | |||
| bc59f739f5 | |||
| b8e42184f8 | |||
| 520d779f04 | |||
| c4b938b7db | |||
| 9435cdc99b | |||
| cf90a5ce21 | |||
| 12ce537781 | |||
| 12d824fe2d | |||
| de33755900 | |||
| 6e87116634 | |||
| 029b16ca68 | |||
| 6a9b2bac4a | |||
| 87542f4a56 | |||
| 32b47cd21e | |||
| bf75e1c062 | |||
| 97fccfa34b | |||
| 1ed32fe71c | |||
| 3ce8f251a1 | |||
| e2ad38f297 | |||
| 993303bfa4 | |||
| 50d9460f63 | |||
| 4b02b57c07 | |||
| 1fc9382ce9 | |||
| ea43e67fe8 | |||
| dd28a41e17 | |||
| b11a273d65 | |||
| d4ee25b004 | |||
| a2b7719100 | |||
| 1e6cde4d98 | |||
| 34f627d146 | |||
| 54accd4394 | |||
| 63557fba56 | |||
| 7a4be158c2 | |||
| 810e12bd22 | |||
| 066f7ad8c1 | |||
| e52a654731 | |||
| 370879f636 | |||
| 170a511654 | |||
| 679afe1bc4 | |||
| 8b5cfff24a | |||
| 27de97031e | |||
| 8ee874da14 | |||
| 11c783f088 | |||
| a3cb31291f | |||
| 6a43b86912 | |||
| 21bb535ee0 | |||
| 9ea948b825 | |||
| fe2fad9378 | |||
| d566ffd9fa | |||
| 370b23f38f | |||
| 253ee696be | |||
| e08c6f79bc | |||
| 4240319fef | |||
| e1aaa2415b | |||
| e71eb56630 | |||
| 38496f2971 | |||
| d1c7d8d9f6 | |||
| 36fdfd672f | |||
| 55c0f61189 | |||
| 053e317850 | |||
| 38296bf292 | |||
| f0ba4564cc | |||
| 21ec70214d | |||
| 6d1a8376ba | |||
| 0ca83fe152 | |||
| 2301c407a2 | |||
| d6096431bd | |||
| d7532d5b83 | |||
| c7d805f8df | |||
| d78ab30cdf | |||
| c123f77195 | |||
| a60bc78e23 | |||
| 78ee107e85 | |||
| 12c3310451 | |||
| d525d8f603 | |||
| 337d1095dc | |||
| fab40cb6b3 | |||
| f4cdc1aecd | |||
| f702e3245a | |||
| d890ba4f43 | |||
| d929ed411f | |||
| 55daa28d74 | |||
| 2bab12aad1 | |||
| 80fb376132 | |||
| 1d557f1065 | |||
| 30df3df8cf | |||
| 02b4655a16 | |||
| debf0158d4 | |||
| 0f730c1948 | |||
| fa7e324fe0 | |||
| e2313e6a3d | |||
| 2ce8c42323 | |||
| 684e65adaa | |||
| a8fc3fb2f1 | |||
| 68bcdb2f88 | |||
| af3627bccb | |||
| 4cbe032363 | |||
| 5e4b3cf0d2 | |||
| 593af1048b | |||
| ae00ecb773 | |||
| da83e42172 | |||
| 2895d53f8e | |||
| 10d1f22ec3 | |||
| 4ed1e64a61 | |||
| 6402936285 | |||
| e01e5c145a | |||
| 56f6d82c8f | |||
| 3e79cf0c12 | |||
| efc09657a8 | |||
| 64390c3e01 | |||
| 513edb802d | |||
| 52c35b8e3c | |||
| cf1c25031c | |||
| 7b9f18867a | |||
| 0985b4dbe8 | |||
| 772fb721ae | |||
| 1ef1c795c1 | |||
| 6d956566c5 | |||
| edec5bb25f | |||
| 3534f65f0e | |||
| 6fbea9c8a4 | |||
| 18bba5da3d | |||
| 952270e1ba | |||
| 0c180e6ee5 | |||
| fdd9c329b1 | |||
| 5b80459b37 | |||
| 1b646a6e00 | |||
| cf970efbec | |||
| 4020cdac69 | |||
| 2415c2ffe8 | |||
| d41502ea19 | |||
| f5d0ef4005 | |||
| c4a89ecf61 | |||
| 71eb25e561 | |||
| 486a6683fd | |||
| 2175d8d87c | |||
| f463bff49e | |||
| a817e2b046 | |||
| 3539e36bde | |||
| 6930a9ffe1 | |||
| e0a44502b1 | |||
| 19e1bd5586 | |||
| ea65ba02f1 | |||
| 8406e48009 | |||
| a8f1d11cbc | |||
| f245a4a9c6 | |||
| 597529a16d | |||
| 9b9dba05c2 | |||
| 2d0bbc824f | |||
| fa523c673f | |||
| da1a4ba393 | |||
| e9c9712e52 | |||
| 78561ca659 | |||
| 1ad87383e3 | |||
| c29ac1549f | |||
| f09eeb19e5 | |||
| 4b7b2c78a2 | |||
| 414af6bdb0 | |||
| df7288b275 | |||
| ce80953086 | |||
| d1344dab71 | |||
| 4f2ba4ae76 | |||
| 6bd525c3b4 | |||
| 341671a612 | |||
| 9161aa1556 | |||
| 70e9fbabce | |||
| a2c00deba2 | |||
| 9b4ef9d1d7 | |||
| 3baee0c14e | |||
| 0508bf43d1 | |||
| bd279d1ad7 | |||
| 4d55a66307 | |||
| 9481e01581 | |||
| fe91bf9620 | |||
| d812ba379b | |||
| e1cc7f3c83 | |||
| f50d5d2cd1 | |||
| b3987b0ad5 | |||
| 243946a189 | |||
| 63fbbc5603 | |||
| fcb857df69 | |||
| dabbc31c09 | |||
| 997bfa3dd5 | |||
| 50eca8aab5 | |||
| 6e4a2e4d5e | |||
| 40e9938885 | |||
| 5bc8ca2266 | |||
| 4cdbfbfb86 | |||
| 01575d1f2e | |||
| 8f75c75b80 | |||
| ebc084f9ea | |||
| 626efaf5cf | |||
| 096ce99588 | |||
| 9ce103b31b | |||
| 13cbd84b58 | |||
| a8912dff41 | |||
| 8b773614ba | |||
| d04f04f382 | |||
| 9a44dae4f9 | |||
| 07665dd93e | |||
| eeca17e78b | |||
| f91264bc94 | |||
| 0983e67f21 | |||
| 8e7db87554 | |||
| 9d000b2284 | |||
| e29fd0ca09 | |||
| dcfde5b5e7 | |||
| 871750d6ae | |||
| d6b5dc0830 | |||
| e99a3e8978 | |||
| 13db6d3155 | |||
| f8845d4fc5 | |||
| efcfb00239 | |||
| 1727313e54 | |||
| f6d55d07c1 | |||
| 331bbadaad | |||
| f012c604b8 | |||
| a8f6b0c599 | |||
| e46d73869c | |||
| 6bf413a008 | |||
| 10b7f63f9f | |||
| f2dff2d173 | |||
| 963c3bf212 | |||
| f7071569d9 | |||
| 4e5eadbf82 | |||
| 113e2e7821 | |||
| 21c8c699dd | |||
| db36b7dc40 | |||
| 38522989d3 | |||
| aa49605ac4 | |||
| 3af65bf276 | |||
| 0a51c3bdea | |||
| ba6c3a8f38 | |||
| 66e196b5ec | |||
| d4a49d7137 | |||
| 1cca8af54f | |||
| 7e925fd931 | |||
| 45448171d4 | |||
| 64ae3d9029 | |||
| edccac17f6 | |||
| 1d36cad17e | |||
| 08e6d5ba53 | |||
| 6505ee1b35 | |||
| 709612090a | |||
| 1af5c373e4 | |||
| 8181d187dd | |||
| 4b448e3305 | |||
| fd579511ae | |||
| e166237e5d | |||
| b934429e41 | |||
| d61c1da666 | |||
| 8f633771d9 | |||
| 684f478c54 | |||
| cb0099d746 | |||
| a72ead91dc | |||
| fd7c3c6cf3 | |||
| 5ef8abacfa | |||
| a01221ffc6 | |||
| cbd4ab2e15 | |||
| e70152b85b | |||
| 8a9c9a587e | |||
| 29c15d9acd | |||
| bf6773f666 | |||
| bba0ff87da | |||
| c9de5278ea | |||
| 6f5a791a1f | |||
| 1e7a720e74 | |||
| 10a7080928 | |||
| 7349c64253 | |||
| 88649507c7 | |||
| cc503473f9 | |||
| 10fcec1dd8 | |||
| 0679c0f6d7 | |||
| a1a69bdbab | |||
| da02b358e2 | |||
| 60dcea6ff4 | |||
| b5796d5e6c | |||
| 6338fd58c3 | |||
| 39b6841557 | |||
| f3a20e8e59 | |||
| 830df65573 | |||
| 2aeb9e1315 | |||
| 696c48c251 | |||
| dcf02e7e93 | |||
| 581091591b | |||
| 0e6f1a064d | |||
| 70975098e2 | |||
| 5b27f9843f | |||
| 71e29a6599 | |||
| 6fd43a4b18 | |||
| 11f207fbe2 | |||
| 6559d589dd | |||
| b7e9acc242 | |||
| bfd1cd0555 | |||
| 590dffdb78 | |||
| 0205dd6853 | |||
| cd03a42c1b | |||
| c1a8d181d2 | |||
| a17d5e04bb | |||
| e719ae24f0 | |||
| b4922e6899 | |||
| d8c2505fb8 | |||
| 02426e413f | |||
| bd05971ebb | |||
| 4e12692325 | |||
| f7ea69acc9 | |||
| 3640cea64d | |||
| f5ac8ac39d | |||
| f801e77208 | |||
| 2a8c5ea9eb | |||
| 181a42e344 | |||
| a0511ca3ac | |||
| 0b48f3a67f | |||
| 15727e83cc | |||
| 7d06432a76 | |||
| 761c47e26d | |||
| edc6a38dfb | |||
| 2b79e127c4 | |||
| 449875d5b8 | |||
| bbdb9e0107 | |||
| 0a613734f7 | |||
| ff2aa0434a | |||
| 2fa22ca2b3 | |||
| 25473c1b5c | |||
| 7c087e20b7 | |||
| 7250b59f19 | |||
| e54e7ad6ec | |||
| 4a2fd9fb72 | |||
| afd5f2b3be | |||
| d468b93e9f | |||
| 188ad0057f | |||
| bde91e3dbf | |||
| fa15db70c9 | |||
| 3d884963a7 | |||
| 8eb2d689fd | |||
| a2639afd7a |
13
.github/workflows/check.yml
vendored
13
.github/workflows/check.yml
vendored
@ -1,25 +1,24 @@
|
||||
name: Markdown check
|
||||
name: Markdown Lint
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '**'
|
||||
paths:
|
||||
- '*.md'
|
||||
- 'develop/*.md'
|
||||
- 'scripts/**.md'
|
||||
- '.markdownlint.jsonc'
|
||||
|
||||
jobs:
|
||||
markdownlint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v9
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v16
|
||||
with:
|
||||
command: config
|
||||
globs: |
|
||||
.markdownlint.jsonc
|
||||
*.md
|
||||
develop/*.md
|
||||
scripts/**.md
|
||||
scripts/**.md
|
||||
|
||||
9
.github/workflows/update-version.yml
vendored
9
.github/workflows/update-version.yml
vendored
@ -3,16 +3,13 @@ name: Update hard-coded version
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
|
||||
update-version:
|
||||
if: >-
|
||||
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
|
||||
contains(github.ref, 'refs/heads/release/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PUSH_TOKEN }}
|
||||
|
||||
@ -25,9 +22,9 @@ jobs:
|
||||
run: echo "${{ env.version }}" > version
|
||||
|
||||
- name: Commit & push changes
|
||||
uses: EndBug/add-and-commit@v8
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
author_name: github-actions[bot]
|
||||
author_email: github-actions[bot]@users.noreply.github.com
|
||||
message: Update hard-coded version to v${{ env.version }}
|
||||
message: Update hard-coded version to ${{ env.version }}
|
||||
add: version
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -12,3 +12,5 @@ tests/puppeteer/node_modules
|
||||
tests/puppeteer/work_directory
|
||||
tests/puppeteer/package.json
|
||||
tests/puppeteer/package-lock.json
|
||||
scripts/sdkjs_common/jsdoc/node_modules
|
||||
scripts/sdkjs_common/jsdoc/package-lock.json
|
||||
|
||||
@ -11,5 +11,4 @@ RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python
|
||||
ADD . /build_tools
|
||||
WORKDIR /build_tools
|
||||
|
||||
CMD cd tools/linux && \
|
||||
python3 ./automate.py
|
||||
CMD ["sh", "-c", "cd tools/linux && python3 ./automate.py"]
|
||||
|
||||
@ -196,9 +196,8 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
|
||||
**Note**: The created database must have **onlyoffice** both for user and password.
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
|
||||
```
|
||||
|
||||
3. Configure the database:
|
||||
|
||||
149
build.pro
149
build.pro
@ -1,149 +0,0 @@
|
||||
TEMPLATE = subdirs
|
||||
|
||||
ROOT_DIR=$$PWD/..
|
||||
DEPLOY_DIR=$$PWD/deploy
|
||||
CORE_ROOT_DIR=$$ROOT_DIR/core
|
||||
|
||||
include($$PWD/common.pri)
|
||||
|
||||
CONFIG += ordered
|
||||
|
||||
core:CONFIG += core_libraries
|
||||
builder:CONFIG += core_libraries
|
||||
desktop:CONFIG += core_libraries
|
||||
server:CONFIG += core_libraries
|
||||
mobile:CONFIG += core_libraries
|
||||
|
||||
!core_libraries:CONFIG += no_x2t
|
||||
!core_libraries:CONFIG += no_use_common_binary
|
||||
!core_libraries:CONFIG += no_tests
|
||||
|
||||
core_windows {
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_linux {
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_mac {
|
||||
CONFIG += no_desktop_apps
|
||||
}
|
||||
core_ios {
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
core_android {
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
|
||||
core_libraries {
|
||||
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
|
||||
addSubProject(cfcpp, $$CORE_ROOT_DIR/Common/cfcpp/cfcpp.pro)
|
||||
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
|
||||
cryptopp)
|
||||
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
|
||||
unicodeconverter)
|
||||
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(pdffile, $$CORE_ROOT_DIR/PdfFile/PdfFile.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
|
||||
kernel unicodeconverter graphics pdffile)
|
||||
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
|
||||
kernel unicodeconverter graphics pdffile)
|
||||
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
|
||||
kernel unicodeconverter graphics network)
|
||||
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
|
||||
kernel unicodeconverter graphics htmlfile2)
|
||||
}
|
||||
!no_x2t {
|
||||
addSubProject(docxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro)
|
||||
addSubProject(pptxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro)
|
||||
addSubProject(xlsbformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro)
|
||||
|
||||
addSubProject(docformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro)
|
||||
addSubProject(pptformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro)
|
||||
addSubProject(xlsformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro)
|
||||
addSubProject(vbaformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro)
|
||||
|
||||
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/TxtFile/Projects/Linux/TxtXmlFormatLib.pro)
|
||||
addSubProject(rtfformat, $$CORE_ROOT_DIR/RtfFile/Projects/Linux/RtfFormatLib.pro)
|
||||
addSubProject(odffile, $$CORE_ROOT_DIR/OdfFile/Projects/Linux/OdfFormatLib.pro)
|
||||
|
||||
addSubProject(bindocument, $$CORE_ROOT_DIR/OOXML/Projects/Linux/BinDocument/BinDocument.pro)
|
||||
|
||||
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
|
||||
docxformat pptxformat xlsbformat docformat pptformat xlsformat vbaformat txtxmlformat rtfformat odffile cfcpp bindocument fb2file epubfile docxrenderer)
|
||||
}
|
||||
|
||||
!no_use_common_binary {
|
||||
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
|
||||
kernel unicodeconverter graphics doctrenderer)
|
||||
addSubProject(pluginsmanager, $$CORE_ROOT_DIR/DesktopEditor/pluginsmanager/pluginsmanager.pro,\
|
||||
kernel)
|
||||
addSubProject(vboxtester, $$CORE_ROOT_DIR/DesktopEditor/vboxtester/vboxtester.pro,\
|
||||
kernel)
|
||||
}
|
||||
|
||||
!no_tests {
|
||||
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
|
||||
addSubProject(x2ttester, $$CORE_ROOT_DIR/Test/Applications/x2tTester/x2ttester.pro)
|
||||
addSubProject(metafiletester, $$CORE_ROOT_DIR/Test/Applications/MetafileTester/MetafileTester.pro)
|
||||
|
||||
#TODO:
|
||||
!linux_arm64:addSubProject(ooxml_crypt, $$CORE_ROOT_DIR/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro)
|
||||
}
|
||||
|
||||
core_and_multimedia {
|
||||
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
}
|
||||
desktop {
|
||||
message(desktop)
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
|
||||
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdffile djvufile xpsfile)
|
||||
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
|
||||
documentscore)
|
||||
!core_mac {
|
||||
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
|
||||
documentscore)
|
||||
}
|
||||
|
||||
!no_desktop_apps {
|
||||
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
|
||||
documentscore videoplayer)
|
||||
core_windows:!build_xp:addSubProject(updatedaemon, $$ROOT_DIR/desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro)
|
||||
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
|
||||
documentscore videoplayer)
|
||||
}
|
||||
}
|
||||
|
||||
mobile {
|
||||
message(mobile)
|
||||
!desktop {
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
}
|
||||
}
|
||||
|
||||
osign {
|
||||
addSubProject(osign, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/osign/lib/osign.pro)
|
||||
}
|
||||
@ -21,12 +21,13 @@ parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp"
|
||||
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
|
||||
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
|
||||
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
|
||||
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
|
||||
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
|
||||
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
|
||||
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
|
||||
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
|
||||
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
|
||||
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
|
||||
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")
|
||||
|
||||
4
defaults
4
defaults
@ -1,3 +1,3 @@
|
||||
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, drawio, zotero"
|
||||
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition"
|
||||
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, zotero"
|
||||
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition, drawio"
|
||||
sdkjs-addons="sdkjs-forms"
|
||||
|
||||
@ -45,7 +45,7 @@ instruction show how to use docker without sudo.
|
||||
```bash
|
||||
cd build_tools/develop
|
||||
docker pull onlyoffice/documentserver
|
||||
docker build -t documentserver-develop .
|
||||
docker build --no-cache -t documentserver-develop .
|
||||
```
|
||||
|
||||
**Note**: The dot at the end is required.
|
||||
@ -61,7 +61,6 @@ Clone development modules to the work dir
|
||||
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
|
||||
|
||||
```bash
|
||||
cd ../..
|
||||
git clone https://github.com/ONLYOFFICE/sdkjs.git
|
||||
git clone https://github.com/ONLYOFFICE/web-apps.git
|
||||
git clone https://github.com/ONLYOFFICE/server.git
|
||||
@ -75,14 +74,15 @@ along with the relative paths to the required folders.
|
||||
The folders `sdkjs` and `web-apps` are required for proper development workflow.
|
||||
The folders `server` is optional
|
||||
|
||||
**Note**: Run command with the current working directory
|
||||
containing `sdkjs`, `web-apps`...
|
||||
|
||||
**Note**: ONLYOFFICE server uses port 80.
|
||||
Look for another application using port 80 and stop it
|
||||
|
||||
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
|
||||
and takes 20 minutes with `server`
|
||||
|
||||
**Note**: Run command from work dir with development modules
|
||||
|
||||
### docker run on Windows (PowerShell)
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
|
||||
33
make.py
33
make.py
@ -1,19 +1,32 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append('scripts/develop')
|
||||
sys.path.append('scripts/develop/vendor')
|
||||
sys.path.append('scripts/core_common')
|
||||
sys.path.append('scripts/core_common/modules')
|
||||
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.append(__dir__name__ + '/scripts')
|
||||
sys.path.append(__dir__name__ + '/scripts/develop')
|
||||
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
|
||||
import config
|
||||
import base
|
||||
import build
|
||||
import build_sln
|
||||
import build_js
|
||||
import build_server
|
||||
import deploy
|
||||
import make_common
|
||||
import develop
|
||||
import argparse
|
||||
|
||||
base.check_python()
|
||||
|
||||
parser = argparse.ArgumentParser(description="options")
|
||||
parser.add_argument("--build-only-branding", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if (args.build_only_branding):
|
||||
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
@ -59,13 +72,15 @@ if ("1" == config.option("update")):
|
||||
base.configure_common_apps()
|
||||
|
||||
# developing...
|
||||
develop.make();
|
||||
develop.make()
|
||||
|
||||
# check only js builds
|
||||
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
|
||||
build_js.make()
|
||||
exit(0)
|
||||
|
||||
#base.check_tools()
|
||||
|
||||
# core 3rdParty
|
||||
make_common.make()
|
||||
|
||||
@ -77,9 +92,11 @@ if config.check_option("module", "desktop"):
|
||||
config.extend_option("config", "updmodule")
|
||||
base.set_env("DESKTOP_URL_UPDATES_MAIN_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
|
||||
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
|
||||
base.set_env("DESKTOP_URL_INSTALL_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/distrib/onlyoffice/<file>")
|
||||
base.set_env("DESKTOP_URL_INSTALL_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/onlineinstallerdev/<file>")
|
||||
|
||||
# build
|
||||
build.make()
|
||||
build_sln.make()
|
||||
|
||||
# js
|
||||
build_js.make()
|
||||
|
||||
@ -10,15 +10,17 @@ import package_utils as utils
|
||||
# parse
|
||||
parser = argparse.ArgumentParser(description="Build packages.")
|
||||
parser.add_argument("-P", "--platform", dest="platform", type=str,
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-V", "--version", dest="version", type=str,
|
||||
action="store", help="Defines version")
|
||||
action="store", help="Defines version")
|
||||
parser.add_argument("-B", "--build", dest="build", type=str,
|
||||
action="store", help="Defines build")
|
||||
action="store", help="Defines build")
|
||||
parser.add_argument("-H", "--branch", dest="branch", type=str,
|
||||
action="store", help="Defines branch")
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
args = parser.parse_args()
|
||||
|
||||
# vars
|
||||
@ -29,13 +31,20 @@ common.targets = args.targets
|
||||
common.clean = "clean" in args.targets
|
||||
common.sign = "sign" in args.targets
|
||||
common.deploy = "deploy" in args.targets
|
||||
common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0")
|
||||
common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0")
|
||||
if args.version: common.version = args.version
|
||||
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
|
||||
utils.set_env("PRODUCT_VERSION", common.version)
|
||||
utils.set_env("BUILD_VERSION", common.version)
|
||||
if args.build: common.build = args.build
|
||||
else: common.build = utils.get_env("BUILD_NUMBER", "0")
|
||||
utils.set_env("BUILD_NUMBER", common.build)
|
||||
if args.branch: common.branch = args.branch
|
||||
else: common.branch = utils.get_env("BRANCH_NAME", "null")
|
||||
utils.set_env("BRANCH_NAME", common.branch)
|
||||
common.branding = args.branding
|
||||
common.timestamp = utils.get_timestamp()
|
||||
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
|
||||
common.branding_dir = utils.get_abspath(common.workspace_dir + "/" + args.branding) if args.branding else common.workspace_dir
|
||||
common.deploy_data = utils.get_path(common.workspace_dir + "/deploy.txt")
|
||||
common.summary = []
|
||||
utils.log("os_family: " + common.os_family)
|
||||
utils.log("platform: " + str(common.platform))
|
||||
@ -64,13 +73,14 @@ import package_mobile
|
||||
|
||||
# build
|
||||
utils.set_cwd(common.workspace_dir, verbose=True)
|
||||
utils.delete_file(common.deploy_data)
|
||||
if "core" in common.targets:
|
||||
package_core.make()
|
||||
if "closuremaps_opensource" in common.targets:
|
||||
package_core.deploy_closuremaps("opensource")
|
||||
if "closuremaps_commercial" in common.targets:
|
||||
package_core.deploy_closuremaps("commercial")
|
||||
if "closuremaps_sdkjs_opensource" in common.targets:
|
||||
package_core.deploy_closuremaps_sdkjs("opensource")
|
||||
if "closuremaps_sdkjs_commercial" in common.targets:
|
||||
package_core.deploy_closuremaps_sdkjs("commercial")
|
||||
if "closuremaps_webapps" in common.targets:
|
||||
package_core.deploy_closuremaps_webapps("opensource")
|
||||
if "desktop" in common.targets:
|
||||
package_desktop.make()
|
||||
if "builder" in common.targets:
|
||||
@ -81,6 +91,8 @@ if "server_enterprise" in common.targets:
|
||||
package_server.make("enterprise")
|
||||
if "server_developer" in common.targets:
|
||||
package_server.make("developer")
|
||||
if "server_prerequisites" in common.targets:
|
||||
package_server.make("prerequisites")
|
||||
if "mobile" in common.targets:
|
||||
package_mobile.make()
|
||||
|
||||
|
||||
276
scripts/base.py
276
scripts/base.py
@ -39,6 +39,9 @@ def is_os_arm():
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_platform():
|
||||
return platform.machine().lower()
|
||||
|
||||
def is_python_64bit():
|
||||
return (struct.calcsize("P") == 8)
|
||||
|
||||
@ -172,6 +175,13 @@ def find_file(path, pattern):
|
||||
for filename in fnmatch.filter(filenames, pattern):
|
||||
return os.path.join(root, filename)
|
||||
|
||||
def find_files(path, pattern):
|
||||
result = []
|
||||
for root, dirnames, filenames in os.walk(path):
|
||||
for filename in fnmatch.filter(filenames, pattern):
|
||||
result.append(os.path.join(root, filename))
|
||||
return result
|
||||
|
||||
def create_dir(path):
|
||||
path2 = get_path(path)
|
||||
if not os.path.exists(path2):
|
||||
@ -250,6 +260,9 @@ def copy_lib(src, dst, name):
|
||||
create_dir(dst + "/simulator")
|
||||
copy_dir(src + "/simulator/" + name + ".framework", dst + "/simulator/" + name + ".framework")
|
||||
|
||||
if is_dir(dst + "/" + name + ".xcframework"):
|
||||
delete_dir(dst + "/" + name + ".xcframework")
|
||||
|
||||
cmd("xcodebuild", ["-create-xcframework",
|
||||
"-framework", dst + "/" + name + ".framework",
|
||||
"-framework", dst + "/simulator/" + name + ".framework",
|
||||
@ -292,17 +305,34 @@ def copy_exe(src, dst, name):
|
||||
copy_file(src + "/" + name + exe_ext, dst + "/" + name + exe_ext)
|
||||
return
|
||||
|
||||
def readFileCommon(path):
|
||||
file_data = ""
|
||||
try:
|
||||
with open(get_path(path), "r") as file:
|
||||
file_data = file.read()
|
||||
except Exception as e:
|
||||
with open(get_path(path), "r", encoding="utf-8") as file:
|
||||
file_data = file.read()
|
||||
return file_data
|
||||
|
||||
def writeFileCommon(path, data):
|
||||
file_data = ""
|
||||
try:
|
||||
with open(get_path(path), "w") as file:
|
||||
file.write(data)
|
||||
except Exception as e:
|
||||
with open(get_path(path), "w", encoding="utf-8") as file:
|
||||
file.write(data)
|
||||
return
|
||||
|
||||
def replaceInFile(path, text, textReplace):
|
||||
if not is_file(path):
|
||||
print("[replaceInFile] file not exist: " + path)
|
||||
return
|
||||
filedata = ""
|
||||
with open(get_path(path), "r") as file:
|
||||
filedata = file.read()
|
||||
filedata = readFileCommon(path)
|
||||
filedata = filedata.replace(text, textReplace)
|
||||
delete_file(path)
|
||||
with open(get_path(path), "w") as file:
|
||||
file.write(filedata)
|
||||
writeFileCommon(path, filedata)
|
||||
return
|
||||
def replaceInFileUtf8(path, text, textReplace):
|
||||
if not is_file(path):
|
||||
@ -320,28 +350,21 @@ def replaceInFileRE(path, pattern, textReplace):
|
||||
if not is_file(path):
|
||||
print("[replaceInFile] file not exist: " + path)
|
||||
return
|
||||
filedata = ""
|
||||
with open(get_path(path), "r") as file:
|
||||
filedata = file.read()
|
||||
filedata = readFileCommon(path)
|
||||
filedata = re.sub(pattern, textReplace, filedata)
|
||||
delete_file(path)
|
||||
with open(get_path(path), "w") as file:
|
||||
file.write(filedata)
|
||||
writeFileCommon(path, filedata)
|
||||
return
|
||||
|
||||
def readFile(path):
|
||||
if not is_file(path):
|
||||
return ""
|
||||
filedata = ""
|
||||
with open(get_path(path), "r") as file:
|
||||
filedata = file.read()
|
||||
return filedata
|
||||
return readFileCommon(path)
|
||||
|
||||
def writeFile(path, data):
|
||||
if is_file(path):
|
||||
delete_file(path)
|
||||
with open(get_path(path), "w") as file:
|
||||
file.write(data)
|
||||
writeFileCommon(path, data)
|
||||
return
|
||||
|
||||
# system cmd methods ------------------------------------
|
||||
@ -354,7 +377,7 @@ def cmd(prog, args=[], is_no_errors=False):
|
||||
else:
|
||||
command = prog
|
||||
for arg in args:
|
||||
command += (" \"" + arg + "\"")
|
||||
command += (" \"" + arg.replace('\"', '\\\"') + "\"")
|
||||
ret = subprocess.call(command, stderr=subprocess.STDOUT, shell=True)
|
||||
if ret != 0 and True != is_no_errors:
|
||||
sys.exit("Error (" + prog + "): " + str(ret))
|
||||
@ -371,7 +394,7 @@ def cmd2(prog, args=[], is_no_errors=False):
|
||||
sys.exit("Error (" + prog + "): " + str(ret))
|
||||
return ret
|
||||
|
||||
def cmd_exe(prog, args):
|
||||
def cmd_exe(prog, args, is_no_errors=False):
|
||||
prog_dir = os.path.dirname(prog)
|
||||
env_dir = os.environ
|
||||
if ("linux" == host_platform()):
|
||||
@ -390,10 +413,10 @@ def cmd_exe(prog, args):
|
||||
else:
|
||||
command = prog
|
||||
for arg in args:
|
||||
command += (" \"" + arg + "\"")
|
||||
command += (" \"" + arg.replace('\"', '\\\"') + "\"")
|
||||
process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir)
|
||||
ret = process.wait()
|
||||
if ret != 0:
|
||||
if ret != 0 and True != is_no_errors:
|
||||
sys.exit("Error (" + prog + "): " + str(ret))
|
||||
return ret
|
||||
|
||||
@ -405,6 +428,13 @@ def cmd_in_dir(directory, prog, args=[], is_no_errors=False):
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def cmd_in_dir_qemu(platform, directory, prog, args=[], is_no_errors=False):
|
||||
if (platform == "linux_arm64"):
|
||||
return cmd_in_dir(directory, "qemu-aarch64", ["-L", "/usr/aarch64-linux-gnu", prog] + args, is_no_errors)
|
||||
if (platform == "linux_arm32"):
|
||||
return cmd_in_dir(directory, "qemu-arm", ["-L", "/usr/arm-linux-gnueabi", prog] + args, is_no_errors)
|
||||
return 0
|
||||
|
||||
def cmd_and_return_cwd(prog, args=[], is_no_errors=False):
|
||||
cur_dir = os.getcwd()
|
||||
ret = cmd(prog, args, is_no_errors)
|
||||
@ -413,12 +443,13 @@ def cmd_and_return_cwd(prog, args=[], is_no_errors=False):
|
||||
|
||||
def run_command(sCommand):
|
||||
popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
result = {'stdout' : '', 'stderr' : ''}
|
||||
result = {'stdout' : '', 'stderr' : '', 'returncode' : 0}
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
result['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
|
||||
result['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
|
||||
result['returncode'] = popen.returncode
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
@ -483,12 +514,37 @@ def set_cwd(dir):
|
||||
return
|
||||
|
||||
# git ---------------------------------------------------
|
||||
def git_get_origin():
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(get_script_dir() + "/../")
|
||||
ret = run_command("git config --get remote.origin.url")["stdout"]
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def git_is_ssh():
|
||||
git_protocol = config.option("git-protocol")
|
||||
if (git_protocol == "https"):
|
||||
return False
|
||||
if (git_protocol == "ssh"):
|
||||
return True
|
||||
origin = git_get_origin()
|
||||
if (git_protocol == "auto") and (origin.find(":ONLYOFFICE/") != -1):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_ssh_base_url():
|
||||
cur_origin = git_get_origin()
|
||||
ind = cur_origin.find(":ONLYOFFICE/")
|
||||
if (ind == -1):
|
||||
return "git@github.com:ONLYOFFICE/"
|
||||
return cur_origin[:ind+12]
|
||||
|
||||
def git_update(repo, is_no_errors=False, is_current_dir=False, git_owner=""):
|
||||
print("[git] update: " + repo)
|
||||
owner = git_owner if git_owner else "ONLYOFFICE"
|
||||
url = "https://github.com/" + owner + "/" + repo + ".git"
|
||||
if config.option("git-protocol") == "ssh":
|
||||
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
|
||||
if git_is_ssh():
|
||||
url = get_ssh_base_url() + repo + ".git"
|
||||
folder = get_script_dir() + "/../../" + repo
|
||||
if is_current_dir:
|
||||
folder = repo
|
||||
@ -521,10 +577,12 @@ def get_repositories():
|
||||
result.update(get_sdkjs_addons())
|
||||
result["onlyoffice.github.io"] = [False, False]
|
||||
result["web-apps"] = [False, False]
|
||||
result.update(get_web_apps_addons())
|
||||
result["dictionaries"] = [False, False]
|
||||
result["core-fonts"] = [False, False]
|
||||
|
||||
if config.check_option("module", "server"):
|
||||
result.update(get_web_apps_addons())
|
||||
|
||||
if config.check_option("module", "builder"):
|
||||
result["document-templates"] = [False, False]
|
||||
|
||||
@ -557,8 +615,8 @@ def get_branding_repositories(checker):
|
||||
def create_pull_request(branches_to, repo, is_no_errors=False, is_current_dir=False):
|
||||
print("[git] create pull request: " + repo)
|
||||
url = "https://github.com/ONLYOFFICE/" + repo + ".git"
|
||||
if config.option("git-protocol") == "ssh":
|
||||
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
|
||||
if git_is_ssh():
|
||||
url = get_ssh_base_url() + repo + ".git"
|
||||
folder = get_script_dir() + "/../../" + repo
|
||||
if is_current_dir:
|
||||
folder = repo
|
||||
@ -687,6 +745,22 @@ def check_congig_option_with_platfom(platform, option_name):
|
||||
return True
|
||||
return False
|
||||
|
||||
def correct_makefile_after_qmake(platform, file):
|
||||
if (0 == platform.find("android")):
|
||||
if ("android_arm64_v8a" == platform):
|
||||
replaceInFile(file, "_arm64-v8a.a", ".a")
|
||||
replaceInFile(file, "_arm64-v8a.so", ".so")
|
||||
if ("android_armv7" == platform):
|
||||
replaceInFile(file, "_armeabi-v7a.a", ".a")
|
||||
replaceInFile(file, "_armeabi-v7a.so", ".so")
|
||||
if ("android_x86_64" == platform):
|
||||
replaceInFile(file, "_x86_64.a", ".a")
|
||||
replaceInFile(file, "_x86_64.so", ".so")
|
||||
if ("android_x86" == platform):
|
||||
replaceInFile(file, "_x86.a", ".a")
|
||||
replaceInFile(file, "_x86.so", ".so")
|
||||
return
|
||||
|
||||
def qt_config_platform_addon(platform):
|
||||
config_addon = ""
|
||||
if (0 == platform.find("win")):
|
||||
@ -709,8 +783,9 @@ def qt_config(platform):
|
||||
if (-1 != platform.find("xp")):
|
||||
config_param += " build_xp"
|
||||
if ("ios" == platform):
|
||||
set_env("BITCODE_GENERATION_MODE", "bitcode")
|
||||
set_env("ENABLE_BITCODE", "YES")
|
||||
if (config.check_option("bitcode", "yes")):
|
||||
set_env("BITCODE_GENERATION_MODE", "bitcode")
|
||||
set_env("ENABLE_BITCODE", "YES")
|
||||
config_param = config_param.replace("desktop", "")
|
||||
config_param += " iphoneos device"
|
||||
if (-1 == config_param_lower.find("debug")):
|
||||
@ -741,6 +816,21 @@ def qt_major_version():
|
||||
qt_dir = qt_version()
|
||||
return qt_dir.split(".")[0]
|
||||
|
||||
def qt_version_decimal():
|
||||
qt_dir = qt_version()
|
||||
return 10 * int(qt_dir.split(".")[0]) + int(qt_dir.split(".")[1])
|
||||
|
||||
def qt_config_as_param(value):
|
||||
qt_version = qt_version_decimal()
|
||||
ret_params = []
|
||||
if (66 > qt_version):
|
||||
ret_params.append("CONFIG+=" + value)
|
||||
else:
|
||||
params = value.split()
|
||||
for name in params:
|
||||
ret_params.append("CONFIG+=" + name)
|
||||
return ret_params
|
||||
|
||||
def qt_copy_lib(lib, dir):
|
||||
qt_dir = get_env("QT_DEPLOY")
|
||||
if ("windows" == host_platform()):
|
||||
@ -1002,15 +1092,15 @@ def web_apps_addons_param():
|
||||
def download(url, dst):
|
||||
return cmd_exe("curl", ["-L", "-o", dst, url])
|
||||
|
||||
def extract(src, dst):
|
||||
def extract(src, dst, is_no_errors=False):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
|
||||
return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors)
|
||||
|
||||
def extract_unicode(src, dst):
|
||||
def extract_unicode(src, dst, is_no_errors=False):
|
||||
if "windows" == host_platform():
|
||||
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
|
||||
return
|
||||
return extract(src, dst)
|
||||
return extract(src, dst, is_no_errors)
|
||||
|
||||
def archive_folder(src, dst):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
@ -1143,18 +1233,19 @@ def mac_correct_rpath_x2t(dir):
|
||||
mac_correct_rpath_library("kernel", ["UnicodeConverter"])
|
||||
mac_correct_rpath_library("kernel_network", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("graphics", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("doctrenderer", ["UnicodeConverter", "kernel", "kernel_network", "graphics"])
|
||||
mac_correct_rpath_library("doctrenderer", ["UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "DjVuFile", "DocxRenderer"])
|
||||
mac_correct_rpath_library("HtmlFile2", ["UnicodeConverter", "kernel", "kernel_network", "graphics"])
|
||||
mac_correct_rpath_library("EpubFile", ["UnicodeConverter", "kernel", "HtmlFile2", "graphics"])
|
||||
mac_correct_rpath_library("Fb2File", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("HtmlRenderer", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("PdfFile", ["UnicodeConverter", "kernel", "graphics", "kernel_network"])
|
||||
mac_correct_rpath_library("DjVuFile", ["UnicodeConverter", "kernel", "graphics", "PdfFile"])
|
||||
mac_correct_rpath_library("XpsFile", ["UnicodeConverter", "kernel", "graphics", "PdfFile"])
|
||||
mac_correct_rpath_library("DocxRenderer", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("IWorkFile", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("HWPFile", ["UnicodeConverter", "kernel", "graphics"])
|
||||
cmd("chmod", ["-v", "+x", "./x2t"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./x2t"], True)
|
||||
mac_correct_rpath_binary("./x2t", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
|
||||
mac_correct_rpath_binary("./x2t", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer", "IWorkFile", "HWPFile"])
|
||||
if is_file("./allfontsgen"):
|
||||
cmd("chmod", ["-v", "+x", "./allfontsgen"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allfontsgen"], True)
|
||||
@ -1162,7 +1253,7 @@ def mac_correct_rpath_x2t(dir):
|
||||
if is_file("./allthemesgen"):
|
||||
cmd("chmod", ["-v", "+x", "./allthemesgen"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allthemesgen"], True)
|
||||
mac_correct_rpath_binary("./allthemesgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "kernel_network", "doctrenderer"])
|
||||
mac_correct_rpath_binary("./allthemesgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "kernel_network", "doctrenderer", "PdfFile", "XpsFile", "DjVuFile", "DocxRenderer"])
|
||||
if is_file("./pluginsmanager"):
|
||||
cmd("chmod", ["-v", "+x", "./pluginsmanager"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./pluginsmanager"], True)
|
||||
@ -1179,7 +1270,14 @@ def mac_correct_rpath_docbuilder(dir):
|
||||
os.chdir(dir)
|
||||
cmd("chmod", ["-v", "+x", "./docbuilder"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True)
|
||||
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
|
||||
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "IWorkFile", "HWPFile", "doctrenderer", "DocxRenderer"])
|
||||
mac_correct_rpath_library("docbuilder.c", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "DjVuFile", "DocxRenderer"])
|
||||
|
||||
def add_loader_path_to_rpath(libs):
|
||||
for lib in libs:
|
||||
cmd("install_name_tool", ["-add_rpath", "@loader_path", "lib" + lib + ".dylib"], True)
|
||||
|
||||
add_loader_path_to_rpath(["icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "PdfFile", "XpsFile", "DjVuFile", "DocxRenderer", "docbuilder.c"])
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
@ -1189,9 +1287,9 @@ def mac_correct_rpath_desktop(dir):
|
||||
os.chdir(dir)
|
||||
mac_correct_rpath_library("hunspell", [])
|
||||
mac_correct_rpath_library("ooxmlsignature", ["kernel"])
|
||||
mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "kernel_network", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "kernel_network", "PdfFile", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
cmd("install_name_tool", ["-change", "@executable_path/../Frameworks/Chromium Embedded Framework.framework/Chromium Embedded Framework", "@rpath/Chromium Embedded Framework.framework/Chromium Embedded Framework", "libascdocumentscore.dylib"])
|
||||
mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Frameworks", "./editors_helper.app/Contents/MacOS/editors_helper"], True)
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Resources/converter", "./editors_helper.app/Contents/MacOS/editors_helper"], True)
|
||||
cmd("chmod", ["-v", "+x", "./editors_helper.app/Contents/MacOS/editors_helper"])
|
||||
@ -1215,6 +1313,19 @@ def mac_correct_rpath_desktop(dir):
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
def linux_set_origin_rpath_libraries(dir, libs):
|
||||
tools_dir = get_script_dir() + "/../tools/linux/elf/"
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(dir)
|
||||
for lib in libs:
|
||||
cmd(tools_dir + "patchelf", ["--set-rpath", "\\$ORIGIN", "lib" + lib], True)
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
def linux_correct_rpath_docbuilder(dir):
|
||||
linux_set_origin_rpath_libraries(dir, ["docbuilder.jni.so", "docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so", "PdfFile.so", "XpsFile.so", "DjVuFile.so", "DocxRenderer.so"])
|
||||
return
|
||||
|
||||
def common_check_version(name, good_version, clean_func):
|
||||
version_good = name + "_version_" + good_version
|
||||
version_path = "./" + name + ".data"
|
||||
@ -1268,7 +1379,7 @@ def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=Fal
|
||||
git_dir = __file__script__path__ + "/../.."
|
||||
if False:
|
||||
# old version
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
|
||||
copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
|
||||
return
|
||||
src_dir_path = git_dir + "/onlyoffice.github.io/store/plugin"
|
||||
name = "marketplace"
|
||||
@ -1291,9 +1402,11 @@ def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=Fal
|
||||
delete_dir(dst_dir_path + "/store/plugin-dev")
|
||||
return
|
||||
|
||||
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
|
||||
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, isXp=False):
|
||||
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
|
||||
plugins_list_config = config.option("sdkjs-plugin")
|
||||
if isXp:
|
||||
plugins_list_config="photoeditor, macros, highlightcode, doc2md"
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
plugins_list = plugins_list_config.rsplit(", ")
|
||||
@ -1332,6 +1445,7 @@ def support_old_versions_plugins(out_dir):
|
||||
def generate_sdkjs_plugin_list(dst):
|
||||
plugins_list = config.option("sdkjs-plugin").rsplit(", ") \
|
||||
+ config.option("sdkjs-plugin-server").rsplit(", ")
|
||||
plugins_list = list(filter(None, plugins_list))
|
||||
with open(get_path(dst), 'w') as file:
|
||||
dump = json.dumps(sorted(plugins_list), indent=4)
|
||||
file.write(re.sub(r"^(\s{4})", '\t', dump, 0, re.MULTILINE))
|
||||
@ -1660,3 +1774,83 @@ def check_module_version(actual_version, clear_func):
|
||||
writeFile(module_file, actual_version)
|
||||
clear_func()
|
||||
return
|
||||
|
||||
def check_python():
|
||||
if ("linux" != host_platform()):
|
||||
return
|
||||
directory = __file__script__path__ + "/../tools/linux"
|
||||
directory_bin = __file__script__path__ + "/../tools/linux/python3/bin"
|
||||
|
||||
if not is_dir(directory + "/python3"):
|
||||
cmd("tar", ["xfz", directory + "/python3.tar.gz", "-C", directory])
|
||||
cmd("ln", ["-s", directory_bin + "/python3", directory_bin + "/python"])
|
||||
directory_bin = directory_bin.replace(" ", "\\ ")
|
||||
os.environ["PATH"] = directory_bin + os.pathsep + os.environ["PATH"]
|
||||
return
|
||||
|
||||
def check_tools():
|
||||
if ("linux" == host_platform()):
|
||||
directory = __file__script__path__ + "/../tools/linux"
|
||||
if not is_os_arm() and config.check_option("platform", "linux_arm64"):
|
||||
if not is_dir(directory + "/qt"):
|
||||
create_dir(directory + "/qt")
|
||||
cmd("python", [directory + "/arm/build_qt.py", "--arch", "arm64", directory + "/qt/arm64"])
|
||||
return
|
||||
|
||||
def apply_patch(file, patch):
|
||||
patch_content = readFile(patch)
|
||||
index1 = patch_content.find("<<<<<<<")
|
||||
index2 = patch_content.find("=======")
|
||||
index3 = patch_content.find(">>>>>>>")
|
||||
file_content_old = patch_content[index1 + 7:index2].strip()
|
||||
file_content_new = patch_content[index2 + 7:index3].strip()
|
||||
#file_content_new = "\n#if 0" + file_content_old + "#else" + file_content_new + "#endif\n"
|
||||
replaceInFile(file, file_content_old, file_content_new)
|
||||
return
|
||||
|
||||
def get_autobuild_version(product, platform="", branch="", build=""):
|
||||
download_platform = platform
|
||||
if ("" == download_platform):
|
||||
osType = get_platform()
|
||||
isArm = True if (-1 != osType.find("arm")) else False
|
||||
is64 = True if (osType.endswith("64")) else False
|
||||
|
||||
if ("windows" == host_platform()):
|
||||
download_platform = "win-"
|
||||
elif ("linux" == host_platform()):
|
||||
download_platform = "linux-"
|
||||
else:
|
||||
download_platform = "mac-"
|
||||
|
||||
download_platform += ("arm" if isArm else "")
|
||||
download_platform += ("64" if is64 else "32")
|
||||
else:
|
||||
download_platform = download_platform.replace("_", "-")
|
||||
|
||||
download_build = build
|
||||
if ("" == download_build):
|
||||
download_build = "latest"
|
||||
|
||||
download_branch = branch
|
||||
if ("" == download_branch):
|
||||
download_branch = "develop"
|
||||
|
||||
download_addon = download_branch + "/" + download_build + "/" + product + "-" + download_platform + ".7z"
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/archive/" + download_addon
|
||||
|
||||
def create_x2t_js_cache(dir, product, platform):
|
||||
if is_file(dir + "/libdoctrenderer.dylib") and (os.path.getsize(dir + "/libdoctrenderer.dylib") < 5*1024*1024):
|
||||
return
|
||||
|
||||
if ((platform == "linux_arm64") and not is_os_arm()):
|
||||
cmd_in_dir_qemu(platform, dir, "./x2t", ["-create-js-snapshots"], True)
|
||||
return
|
||||
|
||||
cmd_in_dir(dir, "./x2t", ["-create-js-snapshots"], True)
|
||||
return
|
||||
|
||||
def setup_local_qmake(dir_qmake):
|
||||
dir_base = os.path.dirname(dir_qmake)
|
||||
writeFile(dir_base + "/onlyoffice_qt.conf", "Prefix = " + dir_base)
|
||||
return
|
||||
|
||||
118
scripts/build.py
118
scripts/build.py
@ -1,118 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import multiprocessing
|
||||
|
||||
def make_pro_file(makefiles_dir, pro_file, qmake_config_addon=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
old_env = dict(os.environ)
|
||||
|
||||
# if you need change output libraries path - set the env variable
|
||||
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
|
||||
|
||||
isAndroid = False if (-1 == platform.find("android")) else True
|
||||
if isAndroid:
|
||||
toolchain_platform = "linux-x86_64"
|
||||
if ("mac" == base.host_platform()):
|
||||
toolchain_platform = "darwin-x86_64"
|
||||
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
|
||||
old_path = base.get_env("PATH")
|
||||
new_path = base.qt_setup(platform) + "/bin:"
|
||||
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
|
||||
new_path += old_path
|
||||
base.set_env("PATH", new_path)
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
|
||||
|
||||
if (-1 != platform.find("ios")):
|
||||
base.hack_xcode_ios()
|
||||
|
||||
# makefile suffix
|
||||
file_suff = platform
|
||||
if (config.check_option("config", "debug")):
|
||||
file_suff += "_debug_"
|
||||
file_suff += config.option("branding")
|
||||
|
||||
# setup qt
|
||||
qt_dir = base.qt_setup(platform)
|
||||
base.set_env("OS_DEPLOY", platform)
|
||||
|
||||
# qmake CONFIG+=...
|
||||
config_param = base.qt_config(platform)
|
||||
if ("" != qmake_config_addon):
|
||||
config_param += (" " + qmake_config_addon)
|
||||
|
||||
# qmake ADDON
|
||||
qmake_addon = []
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon = config.option("qmake_addon").split()
|
||||
|
||||
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
continue
|
||||
|
||||
# non windows platform
|
||||
if not base.is_windows():
|
||||
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
|
||||
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
|
||||
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if not base.is_file(pro_file):
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
|
||||
else:
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
|
||||
else:
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("1" == config.option("clean")):
|
||||
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.run_as_bat(qmake_bat)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
|
||||
base.delete_file(".qmake.stash")
|
||||
|
||||
# make build.pro
|
||||
def make():
|
||||
make_pro_file("makefiles", "build.pro")
|
||||
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
|
||||
make_pro_file("makefiles", "build.pro", "xcframework_platform_ios_simulator")
|
||||
|
||||
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
|
||||
# check replace
|
||||
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
|
||||
if ("2019" == config.option("vs-version")):
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
|
||||
if (True):
|
||||
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
|
||||
base.restorePathForBuilder(new_path_net)
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
return
|
||||
@ -41,6 +41,7 @@ def make():
|
||||
base.create_dir(out_dir)
|
||||
|
||||
# builder
|
||||
base.cmd_in_dir(base_dir + "/../web-apps/translation", "python", ["merge_and_check.py"])
|
||||
build_interface(base_dir + "/../web-apps/build")
|
||||
build_sdk_builder(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/builder")
|
||||
@ -55,17 +56,20 @@ def make():
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/mobile")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/mobile")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/mobile")
|
||||
|
||||
deldirs = ['ie', 'mobile', 'embed']
|
||||
[base.delete_dir(root + "/" + d) for root, dirs, f in os.walk(out_dir + "/desktop/web-apps/apps") for d in dirs if d in deldirs]
|
||||
|
||||
# for bug 62528. remove empty folders
|
||||
walklist = list(os.walk(out_dir + "/desktop/sdkjs"))
|
||||
[os.remove(p) for p, _, _ in walklist[::-1] if len(os.listdir(p)) == 0]
|
||||
|
||||
base.copy_file(base_dir + "/../web-apps/apps/api/documents/index.html.desktop", out_dir + "/desktop/web-apps/apps/api/documents/index.html")
|
||||
|
||||
build_interface(base_dir + "/../desktop-apps/common/loginpage/build")
|
||||
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/index.html", out_dir + "/desktop/index.html")
|
||||
|
||||
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/noconnect.html", out_dir + "/desktop/noconnect.html")
|
||||
|
||||
# mobile
|
||||
if config.check_option("module", "mobile"):
|
||||
build_sdk_native(base_dir + "/../sdkjs/build", False)
|
||||
@ -73,20 +77,27 @@ def make():
|
||||
base.create_dir(out_dir + "/mobile/sdkjs")
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
|
||||
prefix_js = [
|
||||
vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs-native/common/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"
|
||||
]
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners.js")
|
||||
postfix_js = [
|
||||
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
|
||||
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
|
||||
]
|
||||
|
||||
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
|
||||
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/word")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/cell")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/slide")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
|
||||
return
|
||||
@ -132,21 +143,35 @@ def build_sdk_builder(directory):
|
||||
def build_sdk_native(directory, minimize=True):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
|
||||
addons = base.sdkjs_addons_param()
|
||||
if not config.check_option("sdkjs-addons", "sdkjs-native"):
|
||||
addons.append("--addon=sdkjs-native")
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
|
||||
return
|
||||
|
||||
|
||||
def build_sdkjs_develop(root_dir):
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
|
||||
|
||||
def build_js_develop(root_dir):
|
||||
#_run_npm_cli(root_dir + "/sdkjs/build")
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
build_sdkjs_develop(root_dir)
|
||||
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build")
|
||||
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
|
||||
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
|
||||
base.cmd_in_dir(root_dir + external_folder + "/web-apps/translation", "python", ["merge_and_check.py"])
|
||||
|
||||
old_cur = os.getcwd()
|
||||
old_product_version = base.get_env("PRODUCT_VERSION")
|
||||
|
||||
@ -14,6 +14,9 @@ parser.add_option("--output",
|
||||
parser.add_option("--write-version",
|
||||
action="store_true", dest="write_version", default=False,
|
||||
help="Create version file of build")
|
||||
parser.add_option("--minimize",
|
||||
action="store", type="string", dest="minimize", default="0",
|
||||
help="Is minimized version")
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
|
||||
def write_version_files(output_dir):
|
||||
@ -32,7 +35,11 @@ def write_version_files(output_dir):
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
config.extend_option("jsminimize", "0")
|
||||
|
||||
isMinimize = False
|
||||
if ("1" == options.minimize or "true" == options.minimize):
|
||||
isMinimize = True
|
||||
config.set_option("jsminimize", "disable")
|
||||
|
||||
branding = config.option("branding-name")
|
||||
if ("" == branding):
|
||||
@ -46,23 +53,30 @@ if (options.output):
|
||||
|
||||
base.create_dir(out_dir)
|
||||
|
||||
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
|
||||
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners.js")
|
||||
prefix_js = [
|
||||
vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs-native/common/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"
|
||||
]
|
||||
|
||||
postfix_js = [
|
||||
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
|
||||
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
|
||||
]
|
||||
|
||||
base.join_scripts(prefix_js, out_dir + "/banners.js")
|
||||
|
||||
base.create_dir(out_dir + "/word")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
|
||||
base.create_dir(out_dir + "/cell")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
|
||||
base.create_dir(out_dir + "/slide")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/banners.js")
|
||||
|
||||
|
||||
@ -41,12 +41,13 @@ def make():
|
||||
pkg_target = "node16"
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
pkg_target += "-linux"
|
||||
#node22 packaging has issue https://github.com/yao-pkg/pkg/issues/87
|
||||
pkg_target = "node20-linux"
|
||||
if (-1 != config.option("platform").find("linux_arm64")):
|
||||
pkg_target += "-arm64"
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
pkg_target += "-win"
|
||||
pkg_target = "node16-win"
|
||||
|
||||
base.cmd_in_dir(server_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
|
||||
base.cmd_in_dir(server_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
|
||||
@ -64,10 +65,9 @@ def build_server_with_addons():
|
||||
for addon in addons:
|
||||
if (addon):
|
||||
addon_dir = base.get_script_dir() + "/../../" + addon
|
||||
base.cmd_in_dir(addon_dir, "npm", ["ci"])
|
||||
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
|
||||
if (base.is_exist(addon_dir)):
|
||||
base.cmd_in_dir(addon_dir, "npm", ["ci"])
|
||||
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
|
||||
|
||||
def build_server_develop():
|
||||
server_dir = base.get_script_dir() + "/../../server"
|
||||
base.cmd_in_dir(server_dir, "npm", ["ci"])
|
||||
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())
|
||||
build_server_with_addons()
|
||||
|
||||
75
scripts/build_sln.py
Normal file
75
scripts/build_sln.py
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(__file__) + "/..")
|
||||
import sln
|
||||
import qmake
|
||||
|
||||
# make solution
|
||||
def make(solution=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
|
||||
if ("" == solution):
|
||||
solution = "./sln.json"
|
||||
projects = sln.get_projects(solution, platform)
|
||||
|
||||
for pro in projects:
|
||||
qmake_main_addon = ""
|
||||
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
|
||||
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
|
||||
print("[WARNING:] temporary enable strip for x2t library in debug")
|
||||
qmake_main_addon += "build_strip_debug"
|
||||
|
||||
qmake.make(platform, pro, qmake_main_addon)
|
||||
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
|
||||
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
|
||||
|
||||
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
|
||||
# check branding libs
|
||||
if (config.option("branding-name") == "onlyoffice"):
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
core_lib_unbranding_dir = os.getcwd() + "/../core/build/lib/" + platform + base.qt_dst_postfix()
|
||||
if not base.is_dir(core_lib_unbranding_dir):
|
||||
base.create_dir(core_lib_unbranding_dir)
|
||||
core_lib_branding_dir = os.getcwd() + "/../core/build/onlyoffice/lib/" + platform + base.qt_dst_postfix()
|
||||
base.copy_file(core_lib_branding_dir + "/doctrenderer.dll", core_lib_unbranding_dir + "/doctrenderer.dll")
|
||||
base.copy_file(core_lib_branding_dir + "/doctrenderer.lib", core_lib_unbranding_dir + "/doctrenderer.lib")
|
||||
|
||||
# check replace
|
||||
directory_builder_branding = os.getcwd() + "/../core/DesktopEditor/doctrenderer"
|
||||
if base.is_dir(directory_builder_branding):
|
||||
new_replace_path = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.com/src/docbuilder.h")
|
||||
if ("2019" == config.option("vs-version")):
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
|
||||
if (True):
|
||||
new_path_net = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.net/src/docbuilder.net.cpp")
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
|
||||
base.restorePathForBuilder(new_path_net)
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
|
||||
# build Java docbuilder wrapper
|
||||
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
# build JNI library
|
||||
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
|
||||
# build Java code to JAR
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
|
||||
|
||||
return
|
||||
@ -182,6 +182,9 @@ def extend_option(name, value):
|
||||
else:
|
||||
options[name] = value
|
||||
|
||||
def set_option(name, value):
|
||||
options[name] = value
|
||||
|
||||
def branding():
|
||||
branding = option("branding-name")
|
||||
if ("" == branding):
|
||||
|
||||
@ -13,9 +13,10 @@ import cef
|
||||
import icu
|
||||
import openssl
|
||||
import curl
|
||||
import websocket
|
||||
import websocket_all
|
||||
import v8
|
||||
import html2
|
||||
import iwork
|
||||
import hunspell
|
||||
import glew
|
||||
import harfbuzz
|
||||
@ -42,6 +43,7 @@ def make():
|
||||
openssl.make()
|
||||
v8.make()
|
||||
html2.make()
|
||||
iwork.make(False)
|
||||
hunspell.make(False)
|
||||
harfbuzz.make()
|
||||
glew.make()
|
||||
@ -54,5 +56,5 @@ def make():
|
||||
if config.check_option("module", "mobile"):
|
||||
if (config.check_option("platform", "android")):
|
||||
curl.make()
|
||||
websocket.make()
|
||||
websocket_all.make()
|
||||
return
|
||||
|
||||
170
scripts/core_common/modules/android/android_ndk.py
Executable file
170
scripts/core_common/modules/android/android_ndk.py
Executable file
@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import re
|
||||
|
||||
def get_android_ndk_version():
|
||||
env_val = base.get_env("ANDROID_NDK_ROOT")
|
||||
if (env_val == ""):
|
||||
env_val = "21.1.6352462"
|
||||
return env_val.strip("/").split("/")[-1]
|
||||
|
||||
def get_android_ndk_version_major():
|
||||
val = get_android_ndk_version().split(".")[0]
|
||||
val = re.sub("[^0-9]", "", val)
|
||||
return int(val)
|
||||
|
||||
def get_sdk_api():
|
||||
if (23 > get_android_ndk_version_major()):
|
||||
return "21"
|
||||
return "23"
|
||||
|
||||
global archs
|
||||
archs = ["arm64", "arm", "x86_64", "x86"]
|
||||
|
||||
global platforms
|
||||
platforms = {
|
||||
"arm64" : {
|
||||
"abi" : "arm64-v8a",
|
||||
"target" : "aarch64-linux-android",
|
||||
"dst" : "arm64_v8a",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "aarch64-linux-android"
|
||||
},
|
||||
"arm" : {
|
||||
"abi" : "armeabi-v7a",
|
||||
"target" : "armv7a-linux-androideabi",
|
||||
"dst" : "armv7",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "arm-linux-android"
|
||||
},
|
||||
"x86_64" : {
|
||||
"arch" : "x86_64",
|
||||
"target" : "x86_64-linux-android",
|
||||
"dst" : "x86_64",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "x86_64-linux-android"
|
||||
},
|
||||
"x86" : {
|
||||
"arch" : "x86",
|
||||
"target" : "i686-linux-android",
|
||||
"dst" : "x86",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "i686-linux-android"
|
||||
}
|
||||
}
|
||||
|
||||
# todo: check arm host!
|
||||
global host
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
host = {
|
||||
"name" : "linux",
|
||||
"arch" : "linux-x86_64"
|
||||
}
|
||||
else:
|
||||
host = {
|
||||
"name" : "darwin",
|
||||
"arch" : "darwin-x86_64"
|
||||
}
|
||||
|
||||
def get_android_ndk_version():
|
||||
#return "26.2.11394342"
|
||||
return "21.1.6352462"
|
||||
|
||||
def get_android_ndk_version_major():
|
||||
return int(get_android_ndk_version().split(".")[0])
|
||||
|
||||
def get_options_dict_as_array(opts):
|
||||
value = []
|
||||
for key in opts:
|
||||
value.append(key + "=" + opts[key])
|
||||
return value
|
||||
|
||||
def get_options_array_as_string(opts):
|
||||
return " ".join(opts)
|
||||
|
||||
def ndk_dir():
|
||||
return base.get_env("ANDROID_NDK_ROOT")
|
||||
|
||||
def sdk_dir():
|
||||
ndk_path = ndk_dir()
|
||||
if (-1 != ndk_path.find("/ndk/")):
|
||||
return ndk_path + "/../.."
|
||||
return ndk_path + "/.."
|
||||
|
||||
def toolchain_dir():
|
||||
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
|
||||
|
||||
def prepare_platform(arch, cpp_standard=11):
|
||||
target = platforms[arch]["target"]
|
||||
api = platforms[arch]["api"]
|
||||
|
||||
ndk_directory = ndk_dir()
|
||||
toolchain = toolchain_dir()
|
||||
|
||||
base.set_env("TARGET", target)
|
||||
base.set_env("TOOLCHAIN", toolchain)
|
||||
base.set_env("NDK_STANDARD_ROOT", toolchain)
|
||||
base.set_env("ANDROIDVER", api)
|
||||
base.set_env("ANDROID_API", api)
|
||||
|
||||
base.set_env("AR", toolchain + "/bin/llvm-ar")
|
||||
base.set_env("AS", toolchain + "/bin/llvm-as")
|
||||
base.set_env("LD", toolchain + "/bin/ld")
|
||||
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
|
||||
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
|
||||
|
||||
base.set_env("CC", target + api + "-clang")
|
||||
base.set_env("CXX", target + api + "-clang++")
|
||||
|
||||
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
|
||||
if (23 > get_android_ndk_version_major()):
|
||||
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
|
||||
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
|
||||
|
||||
base.set_env("LDFLAGS", ld_flags)
|
||||
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
|
||||
|
||||
cflags = [
|
||||
"-Os",
|
||||
"-ffunction-sections",
|
||||
"-fdata-sections",
|
||||
"-fvisibility=hidden",
|
||||
|
||||
"-Wno-unused-function",
|
||||
|
||||
"-fPIC",
|
||||
|
||||
"-I" + toolchain + "/sysroot/usr/include",
|
||||
|
||||
"-D__ANDROID_API__=" + api,
|
||||
"-DANDROID"
|
||||
]
|
||||
|
||||
cflags_string = " ".join(cflags)
|
||||
cppflags_string = cflags_string
|
||||
|
||||
if (cpp_standard >= 11):
|
||||
cppflags_string += " -std=c++11"
|
||||
|
||||
base.set_env("CFLAGS", cflags_string)
|
||||
base.set_env("CXXFLAGS", cppflags_string)
|
||||
base.set_env("CPPPLAGS", cflags_string)
|
||||
return
|
||||
|
||||
def extend_cflags(params):
|
||||
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
|
||||
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
|
||||
return
|
||||
|
||||
def extend_cxxflags(params):
|
||||
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
|
||||
return
|
||||
|
||||
def extend_ldflags(params):
|
||||
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
|
||||
return
|
||||
94
scripts/core_common/modules/android/curl_android.py
Executable file
94
scripts/core_common/modules/android/curl_android.py
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
lib_version = "curl-7_68_0"
|
||||
lib_name = "curl-7.68.0"
|
||||
|
||||
def fetch():
|
||||
if not base.is_dir(current_dir + lib_name):
|
||||
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
|
||||
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
|
||||
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
|
||||
return
|
||||
|
||||
def build_host():
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
base.set_env("ANDROID_NDK_HOME", ndk_dir)
|
||||
base.set_env("ANDROID_NDK", ndk_dir)
|
||||
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(current_dir + lib_name)
|
||||
|
||||
params = []
|
||||
if ("arm64" == arch):
|
||||
params.append("--host=aarch64-linux-android")
|
||||
elif ("arm" == arch):
|
||||
params.append("--host=arm-linux-androideabi")
|
||||
elif ("x86_64" == arch):
|
||||
params.append("--host=x86_64-linux-android")
|
||||
elif ("x86" == arch):
|
||||
params.append("--host=i686-linux-android")
|
||||
|
||||
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
|
||||
|
||||
params.append("--enable-ipv6")
|
||||
params.append("--enable-static")
|
||||
params.append("--disable-shared")
|
||||
params.append("--prefix=" + arch_build_dir)
|
||||
params.append("--with-ssl=" + openssl_dir)
|
||||
|
||||
base.cmd("./configure", params)
|
||||
|
||||
base.cmd("make", ["clean"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
|
||||
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch()
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
147
scripts/core_common/modules/android/icu_android.py
Executable file
147
scripts/core_common/modules/android/icu_android.py
Executable file
@ -0,0 +1,147 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
icu_major = "58"
|
||||
icu_minor = "3"
|
||||
|
||||
options = {
|
||||
"--enable-strict" : "no",
|
||||
"--enable-extras" : "no",
|
||||
"--enable-draft" : "yes",
|
||||
"--enable-samples" : "no",
|
||||
"--enable-tests" : "no",
|
||||
"--enable-renaming" : "yes",
|
||||
"--enable-icuio" : "no",
|
||||
"--enable-layoutex" : "no",
|
||||
"--with-library-bits" : "nochange",
|
||||
"--with-library-suffix" : "",
|
||||
"--enable-static" : "yes",
|
||||
"--enable-shared" : "no",
|
||||
"--with-data-packaging" : "archive"
|
||||
}
|
||||
|
||||
cpp_flags_base = [
|
||||
"-Os",
|
||||
"-ffunction-sections",
|
||||
"-fdata-sections",
|
||||
"-fvisibility=hidden",
|
||||
"-fPIC"
|
||||
]
|
||||
|
||||
cpp_flags = [
|
||||
"-fno-short-wchar",
|
||||
"-fno-short-enums",
|
||||
|
||||
"-DU_USING_ICU_NAMESPACE=0",
|
||||
"-DU_HAVE_NL_LANGINFO_CODESET=0",
|
||||
"-DU_TIMEZONE=0",
|
||||
"-DU_DISABLE_RENAMING=0",
|
||||
|
||||
"-DUCONFIG_NO_COLLATION=0",
|
||||
"-DUCONFIG_NO_FORMATTING=0",
|
||||
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
|
||||
"-DUCONFIG_NO_TRANSLITERATION=0",
|
||||
|
||||
"-DU_STATIC_IMPLEMENTATION"
|
||||
]
|
||||
|
||||
def fetch_icu():
|
||||
if not base.is_dir(current_dir + "icu"):
|
||||
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + icu_major, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
|
||||
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
|
||||
base.delete_dir_with_access_error(current_dir + "icu2")
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
if False and ("mac" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
|
||||
return
|
||||
|
||||
def build_host():
|
||||
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
|
||||
if not base.is_dir(cross_build_dir):
|
||||
base.create_dir(cross_build_dir)
|
||||
os.chdir(cross_build_dir)
|
||||
|
||||
ld_flags = "-pthread"
|
||||
if ("linux" == base.host_platform()):
|
||||
ld_flags += " -Wl,--gc-sections"
|
||||
else:
|
||||
# gcc on OSX does not support --gc-sections
|
||||
ld_flags += " -Wl,-dead_strip"
|
||||
|
||||
base.set_env("LDFLAGS", ld_flags)
|
||||
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
|
||||
|
||||
host_type = "Linux"
|
||||
if ("mac" == base.host_platform()):
|
||||
host_type = "MacOSX/GCC"
|
||||
|
||||
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
base.create_dir(current_dir + "build")
|
||||
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
|
||||
|
||||
os.chdir(current_dir)
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
android_ndk.extend_cflags(" ".join(cpp_flags))
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
os.chdir(arch_build_dir)
|
||||
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
|
||||
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
|
||||
base.cmd("make", ["-j4"])
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
|
||||
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
|
||||
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch_icu()
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
94
scripts/core_common/modules/android/openssl_android.py
Executable file
94
scripts/core_common/modules/android/openssl_android.py
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
lib_name="openssl-1.1.1t"
|
||||
|
||||
options = [
|
||||
"no-shared",
|
||||
"no-tests",
|
||||
"enable-ssl3",
|
||||
"enable-ssl3-method",
|
||||
"enable-md2",
|
||||
"no-asm"
|
||||
]
|
||||
|
||||
def fetch():
|
||||
if not base.is_dir(current_dir + lib_name):
|
||||
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
|
||||
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
|
||||
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
|
||||
return
|
||||
|
||||
def build_host():
|
||||
# not needed, just create directories
|
||||
if not base.is_dir(current_dir + "/build"):
|
||||
base.create_dir(current_dir + "/build")
|
||||
if not base.is_dir(current_dir + "/build/android"):
|
||||
base.create_dir(current_dir + "/build/android")
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
base.set_env("ANDROID_NDK_HOME", ndk_dir)
|
||||
base.set_env("ANDROID_NDK", ndk_dir)
|
||||
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(current_dir + lib_name)
|
||||
|
||||
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
|
||||
|
||||
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
|
||||
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
|
||||
|
||||
base.cmd("make", ["clean"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.create_dir(dst_dir + "/lib")
|
||||
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
|
||||
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
|
||||
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch()
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
@ -1,108 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
platforms = {
|
||||
"arm64_v8a" : {
|
||||
"name" : "arm64-v8a",
|
||||
"toolset" : "arm64v8a",
|
||||
"clang_triple" : "aarch64-linux-android21",
|
||||
"tool_triple" : "aarch64-linux-android",
|
||||
"abi" : "aapcs",
|
||||
"arch" : "arm",
|
||||
"address_model" : "64",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
},
|
||||
"armv7" : {
|
||||
"name" : "armeabi-v7a",
|
||||
"toolset" : "armeabiv7a",
|
||||
"clang_triple" : "armv7a-linux-androideabi16",
|
||||
"tool_triple" : "arm-linux-androideabi",
|
||||
"abi" : "aapcs",
|
||||
"arch" : "arm",
|
||||
"address_model" : "32",
|
||||
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
|
||||
"linker_flags" : "-Wl,--fix-cortex-a8"
|
||||
},
|
||||
"x86" : {
|
||||
"name" : "x86",
|
||||
"toolset" : "x86",
|
||||
"clang_triple" : "i686-linux-android16",
|
||||
"tool_triple" : "i686-linux-android",
|
||||
"abi" : "sysv",
|
||||
"arch" : "x86",
|
||||
"address_model" : "32",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
},
|
||||
"x86_64" : {
|
||||
"name" : "x86_64",
|
||||
"toolset" : "x8664",
|
||||
"clang_triple" : "x86_64-linux-android21",
|
||||
"tool_triple" : "x86_64-linux-android",
|
||||
"abi" : "sysv",
|
||||
"arch" : "x86",
|
||||
"address_model" : "64",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
}
|
||||
}
|
||||
|
||||
base_dir = base.get_script_dir()
|
||||
|
||||
def make(platform):
|
||||
tmp_build_dir = base_dir + "/core_common/modules/boost"
|
||||
if (base.is_dir(tmp_build_dir)):
|
||||
base.delete_dir(tmp_build_dir)
|
||||
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
|
||||
|
||||
current_platform = platforms[platform]
|
||||
|
||||
if (base.host_platform() == "mac"):
|
||||
source = "prebuilt/linux-x86_64"
|
||||
dest = "prebuilt/darwin-x86_64"
|
||||
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
|
||||
|
||||
build_dir_tmp = tmp_build_dir + "/tmp"
|
||||
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
|
||||
old_path = base.get_env("PATH")
|
||||
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
|
||||
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
|
||||
|
||||
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
|
||||
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
|
||||
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
|
||||
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
|
||||
|
||||
print(current_platform)
|
||||
base.cmd("./b2", ["-q", "-j4",
|
||||
"toolset=clang-" + current_platform["toolset"],
|
||||
"binary-format=elf",
|
||||
"address-model=" + current_platform["address_model"],
|
||||
"architecture=" + current_platform["arch"],
|
||||
"abi=" + current_platform["abi"],
|
||||
"link=static",
|
||||
"threading=multi",
|
||||
"target-os=android",
|
||||
"--user-config=" + tmp_build_dir + "/user-config.jam",
|
||||
"--ignore-site-config",
|
||||
"--layout=system",
|
||||
"install"], True)
|
||||
|
||||
base.set_env("PATH", old_path)
|
||||
base.delete_dir(tmp_build_dir)
|
||||
return
|
||||
@ -5,7 +5,7 @@ sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import build
|
||||
import qmake
|
||||
|
||||
def make(src_dir, modules, build_platform="android", qmake_addon=""):
|
||||
old_cur = os.getcwd()
|
||||
@ -23,17 +23,13 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
|
||||
pro_file_content.append("TARGET = boost_" + module)
|
||||
pro_file_content.append("TEMPLATE = lib")
|
||||
pro_file_content.append("CONFIG += staticlib")
|
||||
if (build_platform == "android"):
|
||||
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
|
||||
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
|
||||
pro_file_content.append("include($$PWD/../../../../../base.pri)")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
|
||||
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
|
||||
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
|
||||
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
|
||||
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
|
||||
pro_file_content.append("INCLUDEPATH += $$PWD/include")
|
||||
@ -43,7 +39,7 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
|
||||
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
|
||||
os.chdir(module_dir)
|
||||
build.make_pro_file("./", module + ".pro", qmake_addon)
|
||||
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
@ -2,21 +2,19 @@
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import config
|
||||
import subprocess
|
||||
import os
|
||||
import base
|
||||
import curl_android
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
if base.is_dir(path + "/build/android"):
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
subprocess.call(["./build-android-curl.sh"])
|
||||
|
||||
curl_android.make()
|
||||
elif (-1 != config.option("platform").find("ios")):
|
||||
if base.is_dir(path + "/build/ios"):
|
||||
os.chdir(old_cur)
|
||||
|
||||
@ -7,7 +7,27 @@ import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def clear_module():
|
||||
directories = ["gumbo-parser", "katana-parser"]
|
||||
|
||||
for dir in directories:
|
||||
if base.is_dir(dir):
|
||||
base.delete_dir_with_access_error(dir)
|
||||
|
||||
def make():
|
||||
old_cur_dir = os.getcwd()
|
||||
|
||||
print("[fetch]: html")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/html"
|
||||
|
||||
os.chdir(base_dir)
|
||||
base.check_module_version("2", clear_module)
|
||||
os.chdir(old_cur_dir)
|
||||
|
||||
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make()
|
||||
|
||||
@ -3,6 +3,11 @@ sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
|
||||
def clean():
|
||||
if base.is_dir("hunspell"):
|
||||
base.delete_dir_with_access_error("hunspell")
|
||||
return
|
||||
|
||||
def make(build_js = True):
|
||||
|
||||
old_cur_dir = os.getcwd()
|
||||
@ -11,6 +16,8 @@ def make(build_js = True):
|
||||
core_common_dir = base.get_script_dir() + "/../../core/Common"
|
||||
|
||||
os.chdir(core_common_dir + "/3dParty/hunspell")
|
||||
|
||||
base.common_check_version("hunspell", "1", clean)
|
||||
base.cmd("python", ["./before.py"])
|
||||
|
||||
if (build_js):
|
||||
|
||||
@ -2,26 +2,52 @@
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import icu_android
|
||||
|
||||
def fetch_icu(major, minor):
|
||||
if (base.is_dir("./icu2")):
|
||||
base.delete_dir_with_access_error("icu2")
|
||||
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + major, "https://github.com/unicode-org/icu.git", "./icu2"])
|
||||
base.copy_dir("./icu2/icu4c", "./icu")
|
||||
base.delete_dir_with_access_error("icu2")
|
||||
#base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
return
|
||||
|
||||
def clear_module():
|
||||
if base.is_dir("icu"):
|
||||
base.delete_dir_with_access_error("icu")
|
||||
|
||||
# remove build
|
||||
for child in glob.glob("./*"):
|
||||
if base.is_dir(child):
|
||||
base.delete_dir(child)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: icu")
|
||||
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
icu_android.make()
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
base.check_module_version("3", clear_module)
|
||||
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
icu_android.make()
|
||||
|
||||
os.chdir(base_dir)
|
||||
|
||||
icu_major = "58"
|
||||
icu_minor = "3"
|
||||
|
||||
if not base.is_dir("icu"):
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
fetch_icu(icu_major, icu_minor)
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
platformToolset = "v140"
|
||||
|
||||
@ -1,172 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
|
||||
|
||||
toolshains_dir = current_dir + "/toolchains"
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
icu_is_shared = False
|
||||
|
||||
current_path = base.get_env("PATH")
|
||||
|
||||
platforms = {
|
||||
"arm64" : {
|
||||
"arch" : "aarch64-linux-android",
|
||||
"bin" : "aarch64-linux-android"
|
||||
},
|
||||
"arm" : {
|
||||
"arch" : "arm-linux-androideabi",
|
||||
"bin" : "arm-linux-androideabi"
|
||||
},
|
||||
"x86_64" : {
|
||||
"arch" : "x86_64-linux-android",
|
||||
"bin" : "x86_64-linux-android"
|
||||
},
|
||||
"x86" : {
|
||||
"arch" : "x86-linux-android",
|
||||
"bin" : "i686-linux-android"
|
||||
}
|
||||
}
|
||||
|
||||
def build_arch(arch, api_version):
|
||||
print("icu build: " + arch + " ----------------------------------------")
|
||||
|
||||
if base.is_dir(current_dir + "/icu/" + arch):
|
||||
base.delete_dir(current_dir + "/icu/" + arch)
|
||||
base.create_dir(current_dir + "/icu/" + arch)
|
||||
os.chdir(current_dir + "/icu/" + arch)
|
||||
|
||||
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
|
||||
"--platform=android-" + api_version,
|
||||
"--install-dir=" + current_dir + "/toolchain/" + arch,
|
||||
"--toolchain=" + platforms[arch]["arch"],
|
||||
"--force"
|
||||
])
|
||||
|
||||
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
|
||||
|
||||
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
|
||||
if not icu_is_shared:
|
||||
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
|
||||
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
|
||||
|
||||
base.cmd("../source/configure", command_args.split())
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.set_env("PATH", current_path)
|
||||
os.chdir(current_dir)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
if base.is_dir(current_dir + "/build"):
|
||||
return
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
|
||||
print("[fetch & build]: icu_android")
|
||||
os.chdir(current_dir)
|
||||
|
||||
if not base.is_dir("icu"):
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
if ("linux" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
if ("mac" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
|
||||
|
||||
if not base.is_dir(current_dir + "/icu/cross_build"):
|
||||
base.create_dir(current_dir + "/icu/cross_build")
|
||||
os.chdir(current_dir + "/icu/cross_build")
|
||||
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
|
||||
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
build_arch("arm64", "21")
|
||||
build_arch("arm", "16")
|
||||
build_arch("x86_64","21")
|
||||
build_arch("x86", "16")
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_dir(current_dir + "/build")
|
||||
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
|
||||
|
||||
if icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
|
||||
|
||||
# patch elf information
|
||||
os.chdir(current_dir + "/build")
|
||||
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
|
||||
os.chdir("./patchelf")
|
||||
base.cmd("./bootstrap.sh")
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
|
||||
base.cmd("make")
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
|
||||
|
||||
base.delete_dir(current_dir + "/build/patchelf")
|
||||
|
||||
if not icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
@ -35,7 +35,7 @@ def restore_icu_defs(current_dir):
|
||||
return
|
||||
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
icu_minor = "3"
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
|
||||
|
||||
38
scripts/core_common/modules/iwork.py
Normal file
38
scripts/core_common/modules/iwork.py
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def clear_module():
|
||||
directories = ["glm", "libetonyek", "libodfgen", "librevenge", "mdds"]
|
||||
|
||||
for dir in directories:
|
||||
if base.is_dir(dir):
|
||||
base.delete_dir_with_access_error(dir)
|
||||
|
||||
def make(use_gperf = True):
|
||||
old_cur_dir = os.getcwd()
|
||||
|
||||
print("[fetch & build]: iwork")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/apple"
|
||||
|
||||
os.chdir(base_dir)
|
||||
base.check_module_version("3", clear_module)
|
||||
os.chdir(old_cur_dir)
|
||||
|
||||
cmd_args = ["fetch.py"]
|
||||
|
||||
if use_gperf:
|
||||
cmd_args.append("--gperf")
|
||||
|
||||
base.cmd_in_dir(base_dir, "python", cmd_args)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make(False)
|
||||
@ -62,6 +62,7 @@ def make():
|
||||
vlc_dir = base_dir + "/vlc"
|
||||
vlc_version = "3.0.18"
|
||||
|
||||
tools_dir = base.get_script_dir() + "/../tools"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
@ -73,7 +74,7 @@ def make():
|
||||
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
|
||||
if "windows" == base.host_platform():
|
||||
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
|
||||
|
||||
|
||||
base.create_dir("build")
|
||||
base.copy_file("tools/ignore-cache-time.patch", "vlc")
|
||||
|
||||
@ -83,7 +84,7 @@ def make():
|
||||
base.copy_file("tools/win_64/build.patch", "vlc")
|
||||
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
|
||||
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
|
||||
|
||||
|
||||
if config.check_option("platform", "win_32"):
|
||||
base.copy_file("tools/win_32/build.patch", "vlc")
|
||||
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
|
||||
@ -91,9 +92,11 @@ def make():
|
||||
|
||||
# linux
|
||||
if config.check_option("platform", "linux_64"):
|
||||
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
|
||||
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
|
||||
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
|
||||
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
|
||||
|
||||
|
||||
# mac
|
||||
if "mac" == base.host_platform():
|
||||
os.chdir(vlc_dir)
|
||||
|
||||
@ -1,18 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import base
|
||||
import config
|
||||
import os
|
||||
import subprocess
|
||||
import openssl_android
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
|
||||
|
||||
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
|
||||
subprocess.call(["./build-android-openssl.sh"])
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
openssl_android.make()
|
||||
|
||||
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
|
||||
subprocess.call(["./build-ios-openssl.sh"])
|
||||
|
||||
@ -27,8 +27,16 @@ def make():
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
|
||||
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
|
||||
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
|
||||
# patches
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
|
||||
|
||||
# no tls realization (remove if socket.io fix this)
|
||||
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"
|
||||
|
||||
@ -93,6 +93,7 @@ def make():
|
||||
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
v8_89.change_bootstrap()
|
||||
if ("windows" == base.host_platform()):
|
||||
# hack for 32 bit system!!!
|
||||
if base.is_file("depot_tools/cipd.ps1"):
|
||||
@ -118,7 +119,7 @@ def make():
|
||||
# windows hack (delete later) ----------------------
|
||||
if ("windows" == base.host_platform()):
|
||||
base.delete_dir_with_access_error("v8/buildtools/win")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
else:
|
||||
base.cmd("gclient", ["sync"], True)
|
||||
@ -225,6 +226,7 @@ def make_xp():
|
||||
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
v8_89.change_bootstrap()
|
||||
if ("windows" == base.host_platform()):
|
||||
# hack for 32 bit system!!!
|
||||
if base.is_file("depot_tools/cipd.ps1"):
|
||||
@ -232,7 +234,7 @@ def make_xp():
|
||||
|
||||
# old variant
|
||||
#path_to_python2 = "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin"
|
||||
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_26_bin/python/bin"
|
||||
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_23_bin/python/bin"
|
||||
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
|
||||
base_dir + path_to_python2,
|
||||
config.option("vs-path") + "/../Common7/IDE",
|
||||
@ -244,7 +246,7 @@ def make_xp():
|
||||
base.cmd("./depot_tools/fetch", ["v8"], True)
|
||||
base.cmd("./depot_tools/gclient", ["sync", "-r", "4.10.253"], True)
|
||||
base.delete_dir_with_access_error("v8/buildtools/win")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
|
||||
# save common py script
|
||||
@ -269,6 +271,13 @@ def make_xp():
|
||||
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
|
||||
]);
|
||||
|
||||
programFilesDir = base.get_env("ProgramFiles")
|
||||
if ("" != base.get_env("ProgramFiles(x86)")):
|
||||
programFilesDir = base.get_env("ProgramFiles(x86)")
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
|
||||
if (base.is_dir(dev_path)):
|
||||
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
|
||||
if config.check_option("platform", "win_64_xp"):
|
||||
if not base.is_dir("win_64/release"):
|
||||
|
||||
@ -7,6 +7,29 @@ import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def change_bootstrap():
|
||||
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
|
||||
content = "# changed by build_tools\n\n"
|
||||
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
|
||||
|
||||
content += "@Subdir python\n"
|
||||
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
|
||||
|
||||
content += "@Subdir python3\n"
|
||||
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
|
||||
|
||||
content += "@Subdir git\n"
|
||||
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
|
||||
|
||||
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
|
||||
"raise subprocess.CalledProcessError(proc.returncode, argv, None)", "return")
|
||||
|
||||
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
|
||||
" _win_git_bootstrap_config()", " #_win_git_bootstrap_config()")
|
||||
|
||||
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
|
||||
return
|
||||
|
||||
def make_args(args, platform, is_64=True, is_debug=False):
|
||||
args_copy = args[:]
|
||||
if is_64:
|
||||
@ -46,6 +69,12 @@ def ninja_windows_make(args, is_64=True, is_debug=False):
|
||||
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
|
||||
|
||||
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
|
||||
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
|
||||
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
|
||||
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
|
||||
|
||||
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
|
||||
base.cmd("ninja", ["-C", directory_out])
|
||||
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
|
||||
@ -82,9 +111,13 @@ def make():
|
||||
if not base.is_dir(base_dir):
|
||||
base.create_dir(base_dir)
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
base.cmd("git", ["config", "--global", "http.postBuffer", "157286400"], True)
|
||||
|
||||
os.chdir(base_dir)
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
change_bootstrap()
|
||||
|
||||
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
|
||||
|
||||
@ -97,7 +130,7 @@ def make():
|
||||
base.copy_dir("./v8/third_party", "./v8/third_party_new")
|
||||
if ("windows" == base.host_platform()):
|
||||
os.chdir("v8")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
os.chdir("../")
|
||||
v8_branch_version = "remotes/branch-heads/8.9"
|
||||
if ("mac" == base.host_platform()):
|
||||
@ -113,6 +146,11 @@ def make():
|
||||
else:
|
||||
base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "")
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
if not base.is_file("v8/build/config/compiler/BUILD.gn.bak"):
|
||||
base.copy_file("v8/build/config/compiler/BUILD.gn", "v8/build/config/compiler/BUILD.gn.bak")
|
||||
base.replaceInFile("v8/build/config/compiler/BUILD.gn", "\"-Wloop-analysis\",", "\"-Wloop-analysis\", \"-D_Float16=short\",")
|
||||
|
||||
if not base.is_file("v8/third_party/jinja2/tests.py.bak"):
|
||||
base.copy_file("v8/third_party/jinja2/tests.py", "v8/third_party/jinja2/tests.py.bak")
|
||||
base.replaceInFile("v8/third_party/jinja2/tests.py", "from collections import Mapping", "try:\n from collections.abc import Mapping\nexcept ImportError:\n from collections import Mapping")
|
||||
|
||||
6
scripts/core_common/modules/websocket.py → scripts/core_common/modules/websocket_all.py
Executable file → Normal file
6
scripts/core_common/modules/websocket.py → scripts/core_common/modules/websocket_all.py
Executable file → Normal file
@ -4,12 +4,10 @@ import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import ixwebsocket
|
||||
import socketrocket
|
||||
#import ixwebsocket
|
||||
#import socketrocket
|
||||
import socket_io
|
||||
|
||||
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
|
||||
|
||||
def make():
|
||||
#ixwebsocket.make()
|
||||
#socketrocket.make()
|
||||
@ -40,9 +40,10 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
@ -76,6 +77,12 @@ def make():
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
|
||||
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
|
||||
# python wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
|
||||
# java wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
|
||||
|
||||
# app
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
|
||||
@ -111,9 +118,31 @@ def make():
|
||||
if ("ios" == platform):
|
||||
base.generate_plist(root_dir)
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.linux_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.mac_correct_rpath_x2t(root_dir)
|
||||
base.mac_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
base.create_x2t_js_cache(root_dir, "builder", platform)
|
||||
|
||||
# delete unnecessary builder files
|
||||
def delete_files(files):
|
||||
for file in files:
|
||||
base.delete_file(file)
|
||||
|
||||
delete_files(base.find_files(root_dir, "*.wasm"))
|
||||
delete_files(base.find_files(root_dir, "*_ie.js"))
|
||||
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/cmap.bin")
|
||||
if 0 != platform.find("mac"):
|
||||
delete_files(base.find_files(root_dir, "sdk-all.js"))
|
||||
delete_files(base.find_files(root_dir, "sdk-all-min.js"))
|
||||
base.delete_dir(root_dir + "/sdkjs/slide/themes")
|
||||
base.delete_dir(root_dir + "/sdkjs/cell/css")
|
||||
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/viewer.js")
|
||||
base.delete_file(root_dir + "/sdkjs/common/spell/spell/spell.js.mem")
|
||||
base.delete_dir(root_dir + "/sdkjs/common/Images")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -30,7 +30,6 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfFile")
|
||||
@ -38,7 +37,10 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
|
||||
|
||||
@ -61,13 +63,14 @@ def make():
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
|
||||
|
||||
# js cache
|
||||
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
|
||||
base.create_x2t_js_cache(archive_dir, "core", platform)
|
||||
base.delete_file(archive_dir + "/DoctRenderer.config")
|
||||
|
||||
# dictionaries
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
|
||||
|
||||
if base.is_file(archive_dir + ".7z"):
|
||||
base.delete_file(archive_dir + ".7z")
|
||||
base.archive_folder(archive_dir + "/*", archive_dir + ".7z")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -66,9 +66,10 @@ def make():
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "DjVuFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "XpsFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlFile2")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlRenderer")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "Fb2File")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "EpubFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "IWorkFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "HWPFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "DocxRenderer")
|
||||
|
||||
if ("ios" == platform):
|
||||
@ -102,12 +103,18 @@ def make():
|
||||
|
||||
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop", "", "../dictionaries")
|
||||
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/templates", root_dir + "/converter/templates")
|
||||
|
||||
# dictionaries
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries")
|
||||
|
||||
# base.copy_dir(git_dir + "/desktop-apps/common/package/fonts", root_dir + "/fonts") # TODO: remove for ver 7.7 if core-fonts enough
|
||||
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
|
||||
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
|
||||
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
|
||||
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
|
||||
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
|
||||
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
|
||||
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
|
||||
|
||||
base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE")
|
||||
|
||||
# cef
|
||||
@ -177,6 +184,8 @@ def make():
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/projicons/" + apps_postfix + "/projicons.exe", root_dir + "/DesktopEditors.exe")
|
||||
if not isWindowsXP:
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/update-daemon/" + apps_postfix + "/updatesvc.exe", root_dir + "/updatesvc.exe")
|
||||
else:
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/online-installer/" + apps_postfix + "/online-installer.exe", root_dir + "/online-installer.exe")
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors.exe", root_dir + "/editors.exe")
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/res/icons/desktopeditors.ico", root_dir + "/app.ico")
|
||||
elif (0 == platform.find("linux")):
|
||||
@ -209,11 +218,14 @@ def make():
|
||||
base.create_dir(root_dir + "/editors")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
|
||||
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
|
||||
base.delete_file(file)
|
||||
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
|
||||
|
||||
base.create_dir(root_dir + "/editors/sdkjs-plugins")
|
||||
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
|
||||
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
|
||||
if not isWindowsXP:
|
||||
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
|
||||
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
|
||||
# remove some default plugins
|
||||
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
|
||||
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
|
||||
@ -231,6 +243,8 @@ def make():
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
|
||||
|
||||
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
|
||||
base.create_dir(root_dir + "/editors/webext")
|
||||
base.copy_file(base_dir + "/js/" + branding + "/desktop/noconnect.html", root_dir + "/editors/webext/noconnect.html")
|
||||
|
||||
if isWindowsXP:
|
||||
base.create_dir(root_dir + "/providers")
|
||||
@ -248,6 +262,8 @@ def make():
|
||||
if isUseJSC:
|
||||
base.delete_file(root_dir + "/converter/icudtl.dat")
|
||||
|
||||
base.create_x2t_js_cache(root_dir + "/converter", "desktop", platform)
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.delete_file(root_dir + "/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/libcef.lib")
|
||||
|
||||
@ -58,10 +58,11 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import base
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import glob
|
||||
from tempfile import mkstemp
|
||||
|
||||
def make():
|
||||
@ -77,10 +78,11 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", converter_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "x2t")
|
||||
@ -113,6 +115,11 @@ def make():
|
||||
js_dir = root_dir
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
|
||||
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
|
||||
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
|
||||
base.delete_file(file)
|
||||
|
||||
base.create_x2t_js_cache(converter_dir, "server", platform)
|
||||
|
||||
# add embed worker code
|
||||
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
|
||||
|
||||
104
scripts/develop/build_lo_linux.py
Normal file
104
scripts/develop/build_lo_linux.py
Normal file
@ -0,0 +1,104 @@
|
||||
# This script was successfully executed on Ubuntu 22.04.5 LTS
|
||||
|
||||
# Before starting, make sure that:
|
||||
# 1. Python >= 3.9
|
||||
# 2. The current working folder with the script and its path do not contain spaces and use Latin characters.
|
||||
# 3. Antivirus is turned off
|
||||
# 4. There is enough free space on the disk (50GB Libre Office and during the unpacking of packages, it's recommended that you allocate at least 80 gigabytes of free space)
|
||||
# 5. The current working folder with the script and its path do not contain spaces and use Latin characters.
|
||||
|
||||
# If the error "You must put some 'source' URIs in your sources.list" occurs, you need to run the command:
|
||||
# software-properties-gtk
|
||||
# in the terminal, and then under the "Ubuntu Software" tab, click "Source code" if it's not turned on and submit
|
||||
|
||||
# after completion, the file will appear:
|
||||
# current_folder_with_script/libreoffice_build/instdir/soffice
|
||||
# debugging can be done via MVS 2022
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
|
||||
# or via VS Code with c/c++ tools
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Visual_Studio_Code_(VSCode)
|
||||
# or via Qt Creator
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Qt_Creator
|
||||
# or via attatch to the soffice.bin process
|
||||
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
CONFIGURE_PARAMS = [
|
||||
"--enable-dbgutil",
|
||||
"--without-doxygen",
|
||||
"--enable-pch",
|
||||
"--disable-ccache",
|
||||
# "--with-visual-studio=2022",
|
||||
'--enable-symbols="all"'
|
||||
]
|
||||
|
||||
SUDO_DEPENDENCIES = [
|
||||
"git", "build-essential", "zip", "ccache", "junit4", "libkrb5-dev", "nasm", "graphviz", "python3",
|
||||
"python3-dev", "python3-setuptools", "qtbase5-dev", "libkf5coreaddons-dev", "libkf5i18n-dev",
|
||||
"libkf5config-dev", "libkf5windowsystem-dev", "libkf5kio-dev", "libqt5x11extras5-dev", "autoconf",
|
||||
"libcups2-dev", "libfontconfig1-dev", "gperf", "openjdk-17-jdk", "doxygen", "libxslt1-dev",
|
||||
"xsltproc", "libxml2-utils", "libxrandr-dev", "libx11-dev", "bison", "flex", "libgtk-3-dev",
|
||||
"libgstreamer-plugins-base1.0-dev", "libgstreamer1.0-dev", "ant", "ant-optional", "libnss3-dev",
|
||||
"libavahi-client-dev", "libxt-dev"
|
||||
]
|
||||
|
||||
DIR_NAME = "libreoffice"
|
||||
OFFICE_PATH = "instdir/program/soffice"
|
||||
|
||||
class bcolors:
|
||||
OKBLUE = '\033[94m'
|
||||
OKCYAN = '\033[96m'
|
||||
OKGREEN = '\033[92m'
|
||||
FAIL = '\033[91m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
def run_command(command, exit_on_error=True):
|
||||
try:
|
||||
subprocess.run(command, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"{bcolors.FAIL}Error executing command: {command}{bcolors.RESET}")
|
||||
if exit_on_error:
|
||||
sys.exit(1)
|
||||
|
||||
def install_dependencies():
|
||||
print("Updating package list...")
|
||||
run_command("sudo apt update")
|
||||
|
||||
print("Adding PPA for GCC/G++ update...")
|
||||
run_command("sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test")
|
||||
run_command("sudo apt update")
|
||||
|
||||
print("Installing dependencies for LibreOffice...")
|
||||
run_command("sudo apt-get build-dep -y libreoffice")
|
||||
run_command(f"sudo apt-get install {' '.join(map(str, SUDO_DEPENDENCIES))}")
|
||||
|
||||
print("Updating GCC/G++ to v12...")
|
||||
run_command("sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 60 --slave /usr/bin/g++ g++ /usr/bin/g++-12", exit_on_error=False)
|
||||
|
||||
print(bcolors.OKGREEN + "All dependencies successfully installed!" + bcolors.RESET)
|
||||
|
||||
def build_libreoffice():
|
||||
print("Cloning LibreOffice repository...")
|
||||
run_command(f"git clone https://git.libreoffice.org/core {DIR_NAME}", exit_on_error=False)
|
||||
|
||||
print("Changing to build directory...")
|
||||
os.chdir(f"./{DIR_NAME}")
|
||||
|
||||
print("Start configurator autogen.sh...")
|
||||
run_command(f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}")
|
||||
|
||||
print(bcolors.OKCYAN + "Starting libreoffice build, this may take up to 24 hours and takes up about 20 GB of drive space. You will also most likely need at least 8 GBs of RAM, otherwise the machine might fall into swap and appear to freeze up..." + bcolors.RESET)
|
||||
run_command("make")
|
||||
|
||||
print(bcolors.OKGREEN + "LibreOffice build completed!" + bcolors.RESET)
|
||||
|
||||
# print(bcolors.OKCYAN + "Running LibreOffice..." + bcolors.RESET)
|
||||
# run_command(OFFICE_PATH, exit_on_error=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
install_dependencies()
|
||||
build_libreoffice()
|
||||
202
scripts/develop/build_lo_windows.py
Normal file
202
scripts/develop/build_lo_windows.py
Normal file
@ -0,0 +1,202 @@
|
||||
# Before starting, make sure that:
|
||||
# 1. MVS 2022 is installed and the necessary individual components are in its installer
|
||||
# <20> Windows Universal C Runtime
|
||||
# <20> .NET Framework 4.x SDK (.NET Framework 5.x SDK and later are currently not supported. These don't register their information to registry, don't have csc.exe and they use dotnet command with csc.dll instead for compiling.)
|
||||
# <20> C++ 20xx Redistributable MSMs (only required to build MSI installer)
|
||||
# <20> C++ Clang Compiler for Windows (x.x.x)
|
||||
# 2. Java JDK >= 17
|
||||
# 3. Antivirus is turned off
|
||||
# 4. There is enough free space on the disk (50GB Libre Office, 50Gb cygwin64)
|
||||
|
||||
# after completion, the files will appear:
|
||||
# {LO_BUILD_PATH}/sources/libo-core/instdir/program/soffice.exe
|
||||
# {LO_BUILD_PATH}/sources/libo-core/LibreOffice.sln
|
||||
# debugging can be done via MVS 2022
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
|
||||
# or via attatch to the soffice.bin process
|
||||
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append('../../scripts')
|
||||
import threading
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import argparse
|
||||
import base
|
||||
|
||||
CYGWIN_DOWNLOAD_URL = 'https://cygwin.com/setup-x86_64.exe'
|
||||
CYGWIN_TEMP_PATH = './tmp'
|
||||
CYGWIN_SETUP_FILENAME = 'setup-x86_64.exe'
|
||||
CYGWIN_SETUP_PARAMS = [
|
||||
"-P", "autoconf",
|
||||
"-P", "automake",
|
||||
"-P", "bison",
|
||||
"-P", "cabextract",
|
||||
"-P", "doxygen",
|
||||
"-P", "flex",
|
||||
"-P", "gawk=5.2.2-1",
|
||||
"-P", "gcc-g++",
|
||||
"-P", "gettext-devel",
|
||||
"-P", "git",
|
||||
"-P", "gnupg",
|
||||
"-P", "gperf",
|
||||
"-P", "make",
|
||||
"-P", "mintty",
|
||||
"-P", "nasm",
|
||||
"-P", "openssh",
|
||||
"-P", "openssl",
|
||||
"-P", "patch",
|
||||
"-P", "perl",
|
||||
"-P", "python",
|
||||
"-P", "python3",
|
||||
"-P", "pkg-config",
|
||||
"-P", "rsync",
|
||||
"-P", "unzip",
|
||||
"-P", "vim",
|
||||
"-P", "wget",
|
||||
"-P", "zip",
|
||||
"-P", "perl-Archive-Zip",
|
||||
"-P", "perl-Font-TTF",
|
||||
"-P", "perl-IO-String",
|
||||
"--no-admin",
|
||||
"--quiet-mode"
|
||||
]
|
||||
CYGWIN_BAT_PATH = 'C:/cygwin64/Cygwin.bat'
|
||||
LO_BUILD_PATH = os.path.normpath(os.path.join(os.getcwd(), '../../../LO'))
|
||||
|
||||
CONFIGURE_PARAMS = [f'--with-external-tar="{LO_BUILD_PATH}/sources/lo-externalsrc"',
|
||||
f'--with-junit="{LO_BUILD_PATH}/sources/junit-4.10.jar"',
|
||||
f'--with-ant-home="{LO_BUILD_PATH}/sources/apache-ant-1.9.5"',
|
||||
"--enable-pch",
|
||||
"--disable-ccache",
|
||||
"--with-visual-studio=2022",
|
||||
"--enable-dbgutil",
|
||||
'--enable-symbols="all"']
|
||||
|
||||
|
||||
def create_folder_safe(folder_path):
|
||||
if not os.path.exists(folder_path):
|
||||
try:
|
||||
os.mkdir(folder_path)
|
||||
print(f"Folder '{folder_path}' created successfully.")
|
||||
except Exception as e:
|
||||
print(f"Error creating folder: {e}")
|
||||
else:
|
||||
print(f"Folder '{folder_path}' already exists.")
|
||||
|
||||
|
||||
class CygwinRunner:
|
||||
@staticmethod
|
||||
def process_commands(commands: list[str]):
|
||||
proc = subprocess.Popen(
|
||||
[CYGWIN_BAT_PATH], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,
|
||||
shell=True, creationflags=subprocess.CREATE_NEW_CONSOLE
|
||||
)
|
||||
|
||||
def read_stdout():
|
||||
for line in iter(proc.stdout.readline, ''):
|
||||
sys.stdout.write(line)
|
||||
proc.stdout.close()
|
||||
|
||||
def read_stderr():
|
||||
for line in iter(proc.stderr.readline, ''):
|
||||
sys.stderr.write(line)
|
||||
proc.stderr.close()
|
||||
|
||||
stdout_thread = threading.Thread(target=read_stdout)
|
||||
stderr_thread = threading.Thread(target=read_stderr)
|
||||
|
||||
stdout_thread.start()
|
||||
stderr_thread.start()
|
||||
|
||||
for command in commands:
|
||||
proc.stdin.write(command + '\n')
|
||||
proc.stdin.flush()
|
||||
|
||||
stdout_thread.join()
|
||||
stderr_thread.join()
|
||||
|
||||
proc.stdin.close()
|
||||
|
||||
proc.wait()
|
||||
|
||||
@staticmethod
|
||||
def install_gnu_make():
|
||||
base.print_info("install_gnu_make")
|
||||
commands = ['mkdir -p /opt/lo/bin',
|
||||
'cd /opt/lo/bin',
|
||||
'wget https://dev-www.libreoffice.org/bin/cygwin/make-4.2.1-msvc.exe',
|
||||
'cp make-4.2.1-msvc.exe make',
|
||||
'chmod +x make',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def install_ant_and_junit():
|
||||
base.print_info("install_ant_and_junit")
|
||||
commands = [f'mkdir -p {LO_BUILD_PATH}/sources',
|
||||
f'cd {LO_BUILD_PATH}/sources',
|
||||
'wget https://archive.apache.org/dist/ant/binaries/apache-ant-1.9.5-bin.tar.bz2',
|
||||
'tar -xjvf apache-ant-1.9.5-bin.tar.bz2',
|
||||
'wget http://downloads.sourceforge.net/project/junit/junit/4.10/junit-4.10.jar',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def clone_lo():
|
||||
base.print_info("clone_lo")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources',
|
||||
'git clone https://gerrit.libreoffice.org/core libo-core',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def build_autogen():
|
||||
base.print_info("build_autogen")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}",
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def run_make_build():
|
||||
base.print_info("run_make")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f'/opt/lo/bin/make gb_COLOR=1',
|
||||
"exit"]
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def build_vs_integration():
|
||||
base.print_info("run_make")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f'/opt/lo/bin/make gb_COLOR=1 vs-ide-integration',
|
||||
"exit"]
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="options")
|
||||
parser.add_argument("--lo_build_path", dest="build_path", default=f'../../../LO')
|
||||
parser.add_argument("--disable_sln", dest="disable_sln", action=argparse.BooleanOptionalAction)
|
||||
args = parser.parse_args()
|
||||
|
||||
LO_BUILD_PATH = args.build_path
|
||||
DISABLE_SLN = args.disable_sln
|
||||
create_folder_safe(f'{LO_BUILD_PATH}/sources/lo-externalsrc')
|
||||
create_folder_safe(CYGWIN_TEMP_PATH)
|
||||
os.chdir(CYGWIN_TEMP_PATH)
|
||||
base.download(CYGWIN_DOWNLOAD_URL, CYGWIN_SETUP_FILENAME)
|
||||
subprocess.run([CYGWIN_SETUP_FILENAME] + CYGWIN_SETUP_PARAMS)
|
||||
os.chdir('..')
|
||||
shutil.rmtree(CYGWIN_TEMP_PATH)
|
||||
CygwinRunner.install_gnu_make()
|
||||
CygwinRunner.install_ant_and_junit()
|
||||
CygwinRunner.clone_lo()
|
||||
CygwinRunner.build_autogen()
|
||||
CygwinRunner.run_make_build()
|
||||
if not DISABLE_SLN:
|
||||
CygwinRunner.build_vs_integration()
|
||||
@ -5,9 +5,6 @@ import base
|
||||
import os
|
||||
import json
|
||||
|
||||
def get_core_url(arch, branch):
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
|
||||
|
||||
def make():
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
old_cur = os.getcwd()
|
||||
@ -18,16 +15,10 @@ def make():
|
||||
|
||||
os.chdir(work_dir)
|
||||
|
||||
arch = "x64"
|
||||
arch2 = "_64"
|
||||
if ("windows" == base.host_platform()) and not base.host_platform_is64():
|
||||
arch = "x86"
|
||||
arch2 = "_32"
|
||||
|
||||
url = get_core_url(arch, config.option("branch"))
|
||||
url = base.get_autobuild_version("core", "", config.option("branch"))
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
if (data_url == "" and config.option("branch") != "develop"):
|
||||
url = get_core_url(arch, "develop")
|
||||
url = base.get_autobuild_version("core", "", "develop")
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
|
||||
old_data_url = base.readFile("./core.7z.data")
|
||||
@ -49,12 +40,6 @@ def make():
|
||||
base.extract("./core.7z", "./")
|
||||
base.writeFile("./core.7z.data", data_url)
|
||||
|
||||
platform = ""
|
||||
if ("windows" == base.host_platform()):
|
||||
platform = "win" + arch2
|
||||
else:
|
||||
platform = base.host_platform() + arch2
|
||||
|
||||
base.copy_files("./core/*", "./")
|
||||
else:
|
||||
print("-----------------------------------------------------------")
|
||||
@ -66,6 +51,12 @@ def make():
|
||||
if not base.is_dir(git_dir + "/sdkjs-plugins"):
|
||||
base.create_dir(git_dir + "/sdkjs-plugins")
|
||||
|
||||
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
|
||||
base.create_dir(git_dir + "/sdkjs-plugins/v1")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
|
||||
|
||||
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
|
||||
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
|
||||
|
||||
@ -101,7 +92,8 @@ def make():
|
||||
server_addons = []
|
||||
if (config.option("server-addons") != ""):
|
||||
server_addons = config.option("server-addons").rsplit(", ")
|
||||
if ("server-lockstorage" in server_addons):
|
||||
#server-lockstorage is private
|
||||
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
|
||||
server_config["editorDataStorage"] = "editorDataRedis"
|
||||
|
||||
sdkjs_addons = []
|
||||
@ -125,6 +117,8 @@ def make():
|
||||
sql["type"] = config.option("sql-type")
|
||||
if (config.option("db-port") != ""):
|
||||
sql["dbPort"] = config.option("db-port")
|
||||
if (config.option("db-name") != ""):
|
||||
sql["dbName"] = config.option("db-name")
|
||||
if (config.option("db-user") != ""):
|
||||
sql["dbUser"] = config.option("db-user")
|
||||
if (config.option("db-pass") != ""):
|
||||
|
||||
@ -483,8 +483,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
|
||||
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
|
||||
mysqlPath += 'bin'
|
||||
return mysqlPath
|
||||
def get_mysqlLoginSrting():
|
||||
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
|
||||
def get_mysqlLoginString():
|
||||
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
|
||||
def get_mysqlServersInfo():
|
||||
arrInfo = []
|
||||
|
||||
@ -511,14 +511,14 @@ def get_mysqlServersInfo():
|
||||
def check_mysqlServer():
|
||||
base.print_info('Check MySQL Server')
|
||||
dependence = CDependencies()
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
|
||||
|
||||
if (host_platform != 'windows'):
|
||||
result = os.system(mysqlLoginSrt + ' -e "exit"')
|
||||
if (result == 0):
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
|
||||
print('MySQL configuration is valid')
|
||||
dependence.sqlPath = 'mysql'
|
||||
return dependence
|
||||
@ -535,11 +535,13 @@ def check_mysqlServer():
|
||||
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
|
||||
|
||||
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
|
||||
print(mysql_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
print(mysql_full_name + 'configuration is not valid')
|
||||
print(mysql_full_name + 'configuration is not valid:' + connectionResult)
|
||||
# if path exists, then further removal and installation fails(according to startup statistics). it is better to fix issue manually.
|
||||
return dependence
|
||||
|
||||
print('Valid MySQL Server not found')
|
||||
dependence.append_uninstall('MySQL Server')
|
||||
@ -559,23 +561,43 @@ def check_mysqlServer():
|
||||
return dependence
|
||||
def check_MySQLConfig(mysqlPath = ''):
|
||||
result = True
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
|
||||
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
|
||||
print('Database onlyoffice not found')
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
|
||||
print('Database "' + config.option("db-name") + '" not found')
|
||||
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
|
||||
if (not result):
|
||||
return False
|
||||
print('Creating ' + config.option("db-name") + ' tables ...')
|
||||
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
|
||||
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
|
||||
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
|
||||
print('Password encryption is not valid')
|
||||
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
|
||||
|
||||
return result
|
||||
def execMySQLScript(mysql_path_to_bin, scriptPath):
|
||||
print('Execution ' + scriptPath)
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
print('CREATE DATABASE ' + dbName + ';')
|
||||
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
|
||||
print('failed CREATE DATABASE ' + dbName + ';')
|
||||
return False
|
||||
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
|
||||
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
|
||||
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
|
||||
# return False
|
||||
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
|
||||
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
|
||||
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
|
||||
# return False
|
||||
return True
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
|
||||
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
|
||||
print('Execution ' + scriptPath)
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
|
||||
if (code != 0):
|
||||
print('Execution failed!')
|
||||
return False
|
||||
@ -584,7 +606,7 @@ def execMySQLScript(mysql_path_to_bin, scriptPath):
|
||||
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
|
||||
print('Setting MySQL password encrypting...')
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
|
||||
if (code != 0):
|
||||
print('Setting password encryption failed!')
|
||||
return False
|
||||
@ -611,7 +633,7 @@ def get_postrgre_path_to_bin(postgrePath = ''):
|
||||
def get_postgreLoginSrting(userName):
|
||||
if (host_platform == 'windows'):
|
||||
return 'psql -U' + userName + ' '
|
||||
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
|
||||
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U' + userName + ' -hlocalhost '
|
||||
def get_postgreSQLInfoByFlag(flag):
|
||||
arrInfo = []
|
||||
|
||||
@ -647,7 +669,7 @@ def check_postgreSQL():
|
||||
result = os.system(postgreLoginSrt + ' -c "\q"')
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
|
||||
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
|
||||
if (result != 0 or connectionResult.find(config.option("db-port")) == -1):
|
||||
print('Valid PostgreSQL not found!')
|
||||
dependence.append_install('PostgreSQL')
|
||||
dependence.append_uninstall('PostgreSQL')
|
||||
@ -657,7 +679,7 @@ def check_postgreSQL():
|
||||
return dependence
|
||||
|
||||
arrInfo = get_postgreSQLInfo()
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
base.set_env('PGPASSWORD', config.option("db-pass"))
|
||||
for info in arrInfo:
|
||||
if (base.is_dir(info['Location']) == False):
|
||||
continue
|
||||
@ -665,7 +687,7 @@ def check_postgreSQL():
|
||||
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
|
||||
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
|
||||
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
|
||||
if (connectionResult.find(config.option("db-port")) != -1):
|
||||
print(postgre_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
@ -683,12 +705,12 @@ def check_postgreSQL():
|
||||
def check_postgreConfig(postgrePath = ''):
|
||||
result = True
|
||||
if (host_platform == 'windows'):
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
base.set_env('PGPASSWORD', config.option("db-pass"))
|
||||
|
||||
rootUser = install_params['PostgreSQL']['root']
|
||||
dbUser = install_params['PostgreSQL']['dbUser']
|
||||
dbName = install_params['PostgreSQL']['dbName']
|
||||
dbPass = install_params['PostgreSQL']['dbPass']
|
||||
dbUser = config.option("db-user")
|
||||
dbName = config.option("db-name")
|
||||
dbPass = config.option("db-pass")
|
||||
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
|
||||
postgreLoginRoot = get_postgreLoginSrting(rootUser)
|
||||
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
|
||||
@ -705,7 +727,7 @@ def check_postgreConfig(postgrePath = ''):
|
||||
base.print_info('Creating ' + dbName + ' user...')
|
||||
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find(config.option("db-name")) == -1):
|
||||
print('Database ' + dbName + ' not found')
|
||||
base.print_info('Creating ' + dbName + ' database...')
|
||||
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
|
||||
@ -884,13 +906,13 @@ def install_gruntcli():
|
||||
|
||||
def install_mysqlserver():
|
||||
if (host_platform == 'windows'):
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
|
||||
elif (host_platform == 'linux'):
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
|
||||
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
|
||||
code = os.system('sudo apt-get -y install zsh htop') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
|
||||
return os.system('yes | sudo apt install mysql-server') and code
|
||||
return 1
|
||||
|
||||
@ -912,7 +934,7 @@ def install_postgresql():
|
||||
file_name = "install.exe"
|
||||
base.download(download_url, file_name)
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
|
||||
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
|
||||
else:
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = 'sudo apt install postgresql -y'
|
||||
@ -923,7 +945,7 @@ def install_postgresql():
|
||||
if (host_platform == 'windows'):
|
||||
base.delete_file(file_name)
|
||||
else:
|
||||
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
|
||||
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
|
||||
|
||||
return code
|
||||
|
||||
@ -974,18 +996,11 @@ install_params = {
|
||||
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
|
||||
'Git': '/VERYSILENT /NORESTART',
|
||||
'MySQLServer': {
|
||||
'port': '3306',
|
||||
'user': 'root',
|
||||
'pass': 'onlyoffice',
|
||||
'version': '8.0.21'
|
||||
},
|
||||
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
|
||||
'PostgreSQL': {
|
||||
'root': 'postgres',
|
||||
'dbPort': '5432',
|
||||
'dbName': 'onlyoffice',
|
||||
'dbUser': 'onlyoffice',
|
||||
'dbPass': 'onlyoffice'
|
||||
'root': 'postgres'
|
||||
}
|
||||
}
|
||||
uninstall_params = {
|
||||
|
||||
@ -6,6 +6,7 @@ import base
|
||||
import shutil
|
||||
import optparse
|
||||
import dependence
|
||||
import config
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
@ -17,6 +18,10 @@ parser.add_option("--remove-path", action="append", type="string", dest="remove-
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
configOptions = vars(options)
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
|
||||
for item in configOptions["uninstall"]:
|
||||
dependence.uninstallProgram(item)
|
||||
for item in configOptions["remove-path"]:
|
||||
|
||||
@ -8,6 +8,13 @@ import dependence
|
||||
import traceback
|
||||
import develop
|
||||
|
||||
# if (sys.version_info[0] >= 3):
|
||||
# unicode = str
|
||||
|
||||
# host_platform = base.host_platform()
|
||||
# if (host_platform == 'windows'):
|
||||
# import libwindows
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
def install_module(path):
|
||||
@ -21,11 +28,18 @@ def find_rabbitmqctl(base_path):
|
||||
return base.find_file(os.path.join(base_path, 'RabbitMQ Server'), 'rabbitmqctl.bat')
|
||||
|
||||
def restart_win_rabbit():
|
||||
# todo maybe restarting is not relevant after many years and versions?
|
||||
base.print_info('restart RabbitMQ node to prevent "Erl.exe high CPU usage every Monday morning on Windows" https://groups.google.com/forum/#!topic/rabbitmq-users/myl74gsYyYg')
|
||||
rabbitmqctl = find_rabbitmqctl(os.environ['PROGRAMW6432']) or find_rabbitmqctl(os.environ['ProgramFiles(x86)'])
|
||||
if rabbitmqctl is not None:
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
|
||||
try:
|
||||
# code = libwindows.sudo(unicode(sys.executable), ['net', 'stop', 'rabbitmq'])
|
||||
# code = libwindows.sudo(unicode(sys.executable), ['net', 'start', 'rabbitmq'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
|
||||
except SystemExit:
|
||||
base.print_error('Perhaps Erlang cookies are different: Replace %userprofile%/.erlang.cookie with %WINDIR%/System32/config/systemprofile/.erlang.cookie')
|
||||
raise
|
||||
else:
|
||||
base.print_info('Missing rabbitmqctl.bat')
|
||||
|
||||
@ -49,7 +63,18 @@ def run_integration_example():
|
||||
|
||||
def start_linux_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
|
||||
|
||||
|
||||
def update_config(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
|
||||
def make_start():
|
||||
base.configure_common_apps()
|
||||
|
||||
@ -64,15 +89,8 @@ def make_start():
|
||||
start_linux_services()
|
||||
|
||||
def make_configure(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
base.print_info('Build modules')
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
update_config(args)
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
|
||||
def make_install():
|
||||
platform = base.host_platform()
|
||||
|
||||
@ -56,13 +56,6 @@ save text files with reports.
|
||||
"reportFolder": "build_tools/scripts/license_checker/reports"
|
||||
```
|
||||
|
||||
* `licensePath` specifies the path to the license template.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"licensePath": "build_tools/scripts/license_checker/license_template.txt"
|
||||
```
|
||||
|
||||
* `printChecking` specifies whether to output
|
||||
information about which file is
|
||||
being checked to the console.
|
||||
@ -110,36 +103,14 @@ Possible array values:
|
||||
```json
|
||||
"fileExtensions": [".js"]
|
||||
```
|
||||
|
||||
* `licensePath` specifies the path to the license template.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"licensePath": "header.license"
|
||||
```
|
||||
|
||||
* `startMultiComm` the line that starts the multiline comment.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"startMultiComm": "/*"
|
||||
```
|
||||
|
||||
* `endMultiComm` the line that ends the multiline comment.
|
||||
You should carefully consider the formatting
|
||||
of the string, all spaces are taken into account.
|
||||
This affects how the license check works.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"endMultiComm": " */"
|
||||
```
|
||||
|
||||
Space at the beginning for a prettier comment.
|
||||
|
||||
* `prefix` the line on which each comment
|
||||
line will begin, except for the
|
||||
beginning and end.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"prefix": " *"
|
||||
```
|
||||
|
||||
Space at the beginning for a prettier comment.
|
||||
* `ignoreListDir` folder paths to ignore.
|
||||
**For example:**
|
||||
|
||||
@ -172,6 +143,18 @@ Possible array values:
|
||||
]
|
||||
```
|
||||
|
||||
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"ignoreListDir": [
|
||||
"sdkjs/develop"
|
||||
],
|
||||
"allowListFile": [
|
||||
"sdkjs/develop/awesomeFileToAllow.js",
|
||||
]
|
||||
```
|
||||
|
||||
Any number of configurations can be
|
||||
specified, they can overlap
|
||||
if we need to check
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
{
|
||||
"basePath": "../../../",
|
||||
"reportFolder": "build_tools/scripts/license_checker/reports",
|
||||
"licensePath": "build_tools/scripts/license_checker/header.license",
|
||||
"printChecking": false,
|
||||
"printReports": false,
|
||||
"fix": ["OUTDATED"],
|
||||
@ -9,9 +8,7 @@
|
||||
{
|
||||
"dir": "core",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"core/build",
|
||||
"core/Common/cfcpp/test",
|
||||
@ -24,14 +21,15 @@
|
||||
"core/DesktopEditor/raster/JBig2",
|
||||
"core/DesktopEditor/raster/Jp2",
|
||||
"core/DesktopEditor/xml/libxml2",
|
||||
"core/DesktopEditor/xmlsec",
|
||||
"core/DesktopEditor/xmlsec",
|
||||
"core/DjVuFile/libdjvu",
|
||||
"core/DjVuFile/wasm",
|
||||
"core/EpubFile",
|
||||
"core/OOXML/PPTXFormat/Limit/pri",
|
||||
"core/Fb2File",
|
||||
"core/HtmlFile2",
|
||||
"core/HtmlFile2",
|
||||
"core/Apple",
|
||||
"core/HwpFile",
|
||||
"core/OdfFile/Common/utf8cpp",
|
||||
"core/OfficeUtils/js/emsdk",
|
||||
"core/OfficeUtils/src/zlib-1.2.11",
|
||||
@ -59,9 +57,7 @@
|
||||
{
|
||||
"dir": "core-ext",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"core-ext/AutoTester",
|
||||
"core-ext/cell_android",
|
||||
@ -83,9 +79,7 @@
|
||||
{
|
||||
"dir": "sdkjs",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"sdkjs/deploy",
|
||||
"sdkjs/develop",
|
||||
@ -106,9 +100,7 @@
|
||||
{
|
||||
"dir": "sdkjs-forms",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
@ -117,9 +109,7 @@
|
||||
{
|
||||
"dir": "sdkjs-ooxml",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
@ -128,9 +118,7 @@
|
||||
{
|
||||
"dir": "web-apps",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor",
|
||||
@ -138,6 +126,7 @@
|
||||
],
|
||||
"ignoreListDir": [
|
||||
"web-apps/apps/common/mobile",
|
||||
"web-apps/apps/common/main/lib/mods",
|
||||
"web-apps/apps/documenteditor/mobile",
|
||||
"web-apps/apps/spreadsheeteditor/mobile",
|
||||
"web-apps/apps/presentationeditor/mobile",
|
||||
@ -145,7 +134,6 @@
|
||||
],
|
||||
"ignoreListFile": [
|
||||
"web-apps/apps/api/documents/api.js",
|
||||
"web-apps/apps/common/main/lib/mods/perfect-scrollbar.js",
|
||||
"web-apps/apps/common/main/lib/core/application.js",
|
||||
"web-apps/apps/common/main/lib/core/keymaster.js",
|
||||
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
|
||||
@ -154,9 +142,7 @@
|
||||
{
|
||||
"dir": "web-apps-mobile",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
@ -165,9 +151,7 @@
|
||||
{
|
||||
"dir": "server",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"server/FileConverter/bin"
|
||||
],
|
||||
@ -178,9 +162,7 @@
|
||||
{
|
||||
"dir": "server-lockstorage",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
@ -188,9 +170,7 @@
|
||||
{
|
||||
"dir": "server-license",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
@ -198,9 +178,7 @@
|
||||
{
|
||||
"dir": "server-license-key",
|
||||
"fileExtensions": [".js"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
@ -208,9 +186,7 @@
|
||||
{
|
||||
"dir": "editors-ios",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
|
||||
"startMultiComm": "/*",
|
||||
"endMultiComm": " */",
|
||||
"prefix": " *",
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"vendor",
|
||||
"Vendor",
|
||||
@ -222,6 +198,11 @@
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h",
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "editors-webview-ios",
|
||||
"fileExtensions": [".swift", ".xcconfig"],
|
||||
"licensePath": "header.license",
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1,28 +1,31 @@
|
||||
(c) Copyright Ascensio System SIA 2010-2023
|
||||
|
||||
This program is a free software product. You can redistribute it and/or
|
||||
modify it under the terms of the GNU Affero General Public License (AGPL)
|
||||
version 3 as published by the Free Software Foundation. In accordance with
|
||||
Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
|
||||
that Ascensio System SIA expressly excludes the warranty of non-infringement
|
||||
of any third-party rights.
|
||||
|
||||
This program is distributed WITHOUT ANY WARRANTY; without even the implied
|
||||
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
|
||||
details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
|
||||
|
||||
You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
|
||||
street, Riga, Latvia, EU, LV-1050.
|
||||
|
||||
The interactive user interfaces in modified source and object code versions
|
||||
of the Program must display Appropriate Legal Notices, as required under
|
||||
Section 5 of the GNU AGPL version 3.
|
||||
|
||||
Pursuant to Section 7(b) of the License you must retain the original Product
|
||||
logo when distributing the program. Pursuant to Section 7(e) we decline to
|
||||
grant you any rights under trademark law for use of our trademarks.
|
||||
|
||||
All the Product's GUI elements, including illustrations and icon sets, as
|
||||
well as technical writing content are licensed under the terms of the
|
||||
Creative Commons Attribution-ShareAlike 4.0 International. See the License
|
||||
terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
|
||||
/*
|
||||
* (c) Copyright Ascensio System SIA 2010-2025
|
||||
*
|
||||
* This program is a free software product. You can redistribute it and/or
|
||||
* modify it under the terms of the GNU Affero General Public License (AGPL)
|
||||
* version 3 as published by the Free Software Foundation. In accordance with
|
||||
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
|
||||
* that Ascensio System SIA expressly excludes the warranty of non-infringement
|
||||
* of any third-party rights.
|
||||
*
|
||||
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
|
||||
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
|
||||
*
|
||||
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
|
||||
* street, Riga, Latvia, EU, LV-1050.
|
||||
*
|
||||
* The interactive user interfaces in modified source and object code versions
|
||||
* of the Program must display Appropriate Legal Notices, as required under
|
||||
* Section 5 of the GNU AGPL version 3.
|
||||
*
|
||||
* Pursuant to Section 7(b) of the License you must retain the original Product
|
||||
* logo when distributing the program. Pursuant to Section 7(e) we decline to
|
||||
* grant you any rights under trademark law for use of our trademarks.
|
||||
*
|
||||
* All the Product's GUI elements, including illustrations and icon sets, as
|
||||
* well as technical writing content are licensed under the terms of the
|
||||
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
|
||||
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
|
||||
*
|
||||
*/
|
||||
|
||||
@ -25,9 +25,6 @@ class Config(object):
|
||||
Attributes:
|
||||
dir: Directory to check.
|
||||
fileExtensions: file extensions to check.
|
||||
startMultiComm: characters to start a multi-line comment.
|
||||
endMultiComm: characters to end a multi-line comment.
|
||||
prefix: prefix for multiline comments
|
||||
ignoreListDir: Ignored folder paths.
|
||||
ignoreListDirName: Ignored folder names.
|
||||
ignoreListFile: Ignored file paths.
|
||||
@ -36,9 +33,7 @@ class Config(object):
|
||||
def __init__(self,
|
||||
dir: str,
|
||||
fileExtensions: list[str],
|
||||
startMultiComm: str,
|
||||
endMultiComm: str,
|
||||
prefix: str = '',
|
||||
licensePath: str = 'header.license',
|
||||
allowListFile: list[str] = [],
|
||||
ignoreListDir: list[str] = [],
|
||||
ignoreListDirName: list[str] = [],
|
||||
@ -46,13 +41,19 @@ class Config(object):
|
||||
|
||||
self._dir = dir
|
||||
self._fileExtensions = fileExtensions
|
||||
self._startMultiComm = startMultiComm
|
||||
self._endMultiComm = endMultiComm
|
||||
self._prefix = prefix
|
||||
self._allowListFile = allowListFile
|
||||
self._ignoreListDir = ignoreListDir
|
||||
self._ignoreListDirName = ignoreListDirName
|
||||
self._ignoreListFile = ignoreListFile
|
||||
"""Read license template."""
|
||||
with open(licensePath, 'r', encoding="utf8") as file:
|
||||
lines = file.readlines()
|
||||
if not lines:
|
||||
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
|
||||
non_empty_lines = [s for s in lines if not s.isspace()]
|
||||
self._startMultiComm = non_empty_lines[0]
|
||||
self._endMultiComm = non_empty_lines[-1]
|
||||
self._license_lines = lines
|
||||
|
||||
def getDir(self) -> str:
|
||||
return self._dir
|
||||
@ -62,8 +63,8 @@ class Config(object):
|
||||
return self._startMultiComm
|
||||
def getEndMultiComm(self) -> str:
|
||||
return self._endMultiComm
|
||||
def getPrefix(self) -> str:
|
||||
return self._prefix
|
||||
def getLicense(self) -> list[str]:
|
||||
return self._license_lines
|
||||
def getAllowListFile(self) -> list[str]:
|
||||
return self._allowListFile
|
||||
def getIgnoreListDir(self) -> list[str]:
|
||||
@ -77,7 +78,6 @@ with open(CONFIG_PATH, 'r') as j:
|
||||
_json: dict = json.load(j)
|
||||
BASE_PATH: str = _json.get('basePath') or '../../../'
|
||||
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
|
||||
LICENSE_TEMPLATE_PATH: str = _json.get('licensePath') or 'build_tools/scripts/license_checker/header.license'
|
||||
if (_json.get('fix')):
|
||||
try:
|
||||
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
|
||||
@ -93,23 +93,6 @@ with open(CONFIG_PATH, 'r') as j:
|
||||
|
||||
os.chdir(BASE_PATH)
|
||||
|
||||
with open(LICENSE_TEMPLATE_PATH, 'r') as f:
|
||||
LICENSE: list[str] = f.readlines()
|
||||
if not LICENSE:
|
||||
raise Exception(f'Error getting license template. Cannot read {LICENSE_TEMPLATE_PATH} file. Is not it empty?')
|
||||
|
||||
def getLicense(start: str, prefix: str, end: str) -> list[str]:
|
||||
"""Returns a valid license for any kind of comment prefix."""
|
||||
result = [start]
|
||||
for i in LICENSE:
|
||||
if i == '\n':
|
||||
result.append(prefix)
|
||||
else:
|
||||
result.append(f'{" ".join([prefix, i.strip()])}')
|
||||
result.append(prefix)
|
||||
result.append(end)
|
||||
return result
|
||||
|
||||
class Error(object):
|
||||
def __init__(self, errorType: ErrorType) -> None:
|
||||
self._errorType = errorType
|
||||
@ -144,8 +127,6 @@ class Checker(object):
|
||||
self._reports: list[Report] = []
|
||||
def getReports(self):
|
||||
return self._reports
|
||||
def getLicense(self):
|
||||
return getLicense(start=self._config.getStartMultiComm(), prefix=self._config.getPrefix(), end=self._config.getEndMultiComm())
|
||||
def _checkLine(self, line: str, prefix: str) -> bool:
|
||||
"""Checks if a line has a prefix."""
|
||||
"""Trim to catch invalid license without leading spaces"""
|
||||
@ -172,7 +153,7 @@ class Checker(object):
|
||||
break
|
||||
return result
|
||||
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
|
||||
license = self.getLicense()
|
||||
license = self._config.getLicense()
|
||||
if len(license) != len(test):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.LEN_MISMATCH),
|
||||
@ -180,32 +161,29 @@ class Checker(object):
|
||||
invalidLinesCount = 0
|
||||
lastWrongLine = 0
|
||||
for i in range(len(license)):
|
||||
if (license[i] != test[i].strip('\n')):
|
||||
if (license[i] != test[i]):
|
||||
invalidLinesCount += 1
|
||||
lastWrongLine = i
|
||||
if (invalidLinesCount == 1):
|
||||
r = r'\d\d\d\d\-\d\d\d\d'
|
||||
testDate = re.search(r, test[lastWrongLine])
|
||||
licenseDate = re.search(r, license[lastWrongLine])
|
||||
r = r'\d\d\d\d'
|
||||
testDate = re.findall(r, test[lastWrongLine])
|
||||
licenseDate = re.findall(r, license[lastWrongLine])
|
||||
|
||||
if testDate and licenseDate:
|
||||
testDate = testDate.group()
|
||||
licenseDate = licenseDate.group()
|
||||
else:
|
||||
if not (testDate and licenseDate):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
message=f'Something wrong...')
|
||||
|
||||
testLastYear = testDate.split('-')[1]
|
||||
licenseLastYear = licenseDate.split('-')[1]
|
||||
if (int(testLastYear) < int(licenseLastYear)):
|
||||
testLastYear = int(testDate[-1])
|
||||
licenseLastYear = int(licenseDate[-1])
|
||||
if (testLastYear < licenseLastYear):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.OUTDATED),
|
||||
message=f'Found date {testDate}, expected {licenseDate}')
|
||||
message=f'Found date {testLastYear}, expected {licenseLastYear}')
|
||||
else:
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
message=f"Found something similar to the date: {testDate}, but it's not correct. Expected: {licenseDate}")
|
||||
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
|
||||
elif (invalidLinesCount > 0):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
@ -286,8 +264,9 @@ class Fixer(object):
|
||||
with open(pathToFile, 'r', encoding="utf8") as file:
|
||||
buffer = file.readlines()
|
||||
with open(pathToFile, 'w', encoding="utf8") as file:
|
||||
license = self._checker.getLicense()
|
||||
file.writelines(map(lambda x: "".join([x, '\n']), license))
|
||||
license = self._config.getLicense()
|
||||
file.writelines(license)
|
||||
file.write('\n')
|
||||
file.writelines(buffer)
|
||||
return
|
||||
def _fixLicense(self, pathToFile: str):
|
||||
@ -301,8 +280,8 @@ class Fixer(object):
|
||||
for i in oldLicense:
|
||||
buffer.remove(i)
|
||||
with open(pathToFile, 'w', encoding=writeEncoding) as file:
|
||||
license = self._checker.getLicense()
|
||||
file.writelines(map(lambda x: "".join([x, '\n']), license))
|
||||
license = self._config.getLicense()
|
||||
file.writelines(license)
|
||||
file.writelines(buffer)
|
||||
return
|
||||
|
||||
@ -328,7 +307,7 @@ def writeReports(reports: list[Report]) -> None:
|
||||
for i in reports:
|
||||
files[i.getError().getErrorType().name].append(i)
|
||||
for i in ErrorType:
|
||||
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w') as f:
|
||||
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
|
||||
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
|
||||
|
||||
for config in CONFIGS:
|
||||
|
||||
@ -17,10 +17,7 @@ if utils.is_windows():
|
||||
desktop_product_name = "Desktop Editors"
|
||||
desktop_product_name_s = desktop_product_name.replace(" ","")
|
||||
desktop_package_name = company_name + "-" + desktop_product_name_s
|
||||
desktop_vcredist_list = ["2022"]
|
||||
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
|
||||
desktop_changes_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/changes"
|
||||
desktop_updates_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/updates"
|
||||
|
||||
if utils.is_macos():
|
||||
desktop_package_name = "ONLYOFFICE"
|
||||
@ -33,6 +30,59 @@ if utils.is_macos():
|
||||
builder_product_name = "Document Builder"
|
||||
|
||||
if utils.is_linux():
|
||||
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
|
||||
builder_make_targets = ["deb", "rpm"] # tar
|
||||
server_make_targets = ["deb", "rpm", "tar"]
|
||||
builder_make_targets = [
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "tar/*.tar*",
|
||||
"dst": "builder/linux/generic/"
|
||||
},
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "builder/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/build/RPMS/*/*.rpm",
|
||||
"dst": "builder/linux/rhel/"
|
||||
}
|
||||
]
|
||||
desktop_make_targets = [
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "tar/*.tar*",
|
||||
"dst": "desktop/linux/generic/"
|
||||
},
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "desktop/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "rpm-suse",
|
||||
"src": "rpm-suse/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/suse/"
|
||||
}
|
||||
]
|
||||
server_make_targets = [
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "server/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/builddir/RPMS/*/*.rpm",
|
||||
"dst": "server/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "*.tar*",
|
||||
"dst": "server/linux/snap/"
|
||||
}
|
||||
]
|
||||
|
||||
@ -7,14 +7,17 @@ import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("BUILDER")
|
||||
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_archive()
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif utils.is_macos():
|
||||
make_macos()
|
||||
elif utils.is_linux():
|
||||
make_linux()
|
||||
else:
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def s3_upload(files, dst):
|
||||
@ -22,117 +25,135 @@ def s3_upload(files, dst):
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
aws_kwargs = { "acl": "public-read" }
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"] = branding.s3_endpoint_url
|
||||
upload = utils.s3_upload(
|
||||
f, "s3://" + branding.s3_bucket + "/" + key, **aws_kwargs)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.add_deploy_data(key)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
|
||||
def make_windows():
|
||||
global inno_file, zip_file, suffix, key_prefix
|
||||
utils.set_cwd("document-builder-package")
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
prefix = common.platformPrefixes[common.platform]
|
||||
company = branding.company_name
|
||||
product = branding.builder_product_name.replace(" ","")
|
||||
source_dir = "..\\build_tools\\out\\%s\\%s\\%s" % (prefix, company, product)
|
||||
package_name = company + "-" + product
|
||||
package_version = common.version + "." + common.build
|
||||
suffix = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x86": "x86"
|
||||
}[common.platform]
|
||||
zip_file = "%s-%s-%s-%s.zip" % (company, product, package_version, suffix)
|
||||
inno_file = "%s-%s-%s-%s.exe" % (company, product, package_version, suffix)
|
||||
utils.log_h2("builder archive build")
|
||||
utils.delete_file("builder.7z")
|
||||
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("builder archive build", ret)
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("builder clean")
|
||||
utils.delete_dir("build")
|
||||
|
||||
utils.log_h2("copy arifacts")
|
||||
utils.create_dir("build\\app")
|
||||
utils.copy_dir_content(source_dir, "build\\app\\")
|
||||
|
||||
make_zip()
|
||||
make_inno()
|
||||
utils.log_h2("builder archive deploy")
|
||||
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("builder archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_zip():
|
||||
utils.log_h2("builder zip build")
|
||||
utils.log_h3(zip_file)
|
||||
def make_windows():
|
||||
global package_version, arch
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
ret = utils.cmd("7z", "a", "-y", zip_file, ".\\app\\*",
|
||||
chdir="build", creates="build\\" + zip_file, verbose=True)
|
||||
package_version = common.version + "." + common.build
|
||||
arch = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x86": "x86"
|
||||
}[common.platform]
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("builder clean")
|
||||
utils.delete_dir("build")
|
||||
utils.delete_files("exe\\*.exe")
|
||||
utils.delete_files("zip\\*.msi")
|
||||
|
||||
if make_prepare():
|
||||
make_zip()
|
||||
make_inno()
|
||||
else:
|
||||
utils.set_summary("builder zip build", False)
|
||||
utils.set_summary("builder inno build", False)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_prepare():
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("builder prepare")
|
||||
ret = utils.ps1("make.ps1", args, verbose=True)
|
||||
utils.set_summary("builder prepare", ret)
|
||||
return ret
|
||||
|
||||
def make_zip():
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
# if common.sign:
|
||||
# args += ["-Sign"]
|
||||
|
||||
utils.log_h2("builder zip build")
|
||||
ret = utils.ps1("make_zip.ps1", args, verbose=True)
|
||||
utils.set_summary("builder zip build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder zip deploy")
|
||||
ret = s3_upload(["build\\" + zip_file], "builder/win/generic/")
|
||||
ret = s3_upload(utils.glob_path("zip/*.zip"), "builder/win/generic/")
|
||||
utils.set_summary("builder zip deploy", ret)
|
||||
return
|
||||
|
||||
def make_inno():
|
||||
utils.log_h2("builder inno build")
|
||||
utils.log_h3(inno_file)
|
||||
|
||||
args = [
|
||||
"-Arch", suffix,
|
||||
"-Version", common.version,
|
||||
"-Build", common.build
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if not branding.onlyoffice:
|
||||
args += [
|
||||
"-Branding", "%s\\%s\\document-builder-package\\exe" % (common.workspace_dir, common.branding)
|
||||
]
|
||||
args += ["-Branding", common.branding]
|
||||
if common.sign:
|
||||
args += [
|
||||
"-Sign",
|
||||
"-CertName", branding.cert_name
|
||||
]
|
||||
ret = utils.ps1(
|
||||
"make_inno.ps1", args, creates="build\\" + inno_file, verbose=True
|
||||
)
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("builder inno build")
|
||||
ret = utils.ps1("make_inno.ps1", args, verbose=True)
|
||||
utils.set_summary("builder inno build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder inno deploy")
|
||||
ret = s3_upload(["build\\" + inno_file], "builder/win/inno/")
|
||||
ret = s3_upload(utils.glob_path("exe/*.exe"), "builder/win/inno/")
|
||||
utils.set_summary("builder inno deploy", ret)
|
||||
return
|
||||
|
||||
def make_macos():
|
||||
company = branding.company_name.lower()
|
||||
product = branding.builder_product_name.replace(" ","").lower()
|
||||
source_dir = "build_tools/out/%s/%s/%s" % (common.prefix, company, product)
|
||||
arch_list = {
|
||||
"darwin_x86_64": "x86_64",
|
||||
"darwin_arm64": "arm64"
|
||||
}
|
||||
suffix = arch_list[common.platform]
|
||||
builder_tar = "../%s-%s-%s-%s-%s.tar.xz" % \
|
||||
(company, product, common.version, common.build, suffix)
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
utils.set_cwd(source_dir)
|
||||
utils.log_h2("builder tar build")
|
||||
make_args = ["tar"]
|
||||
if common.platform == "darwin_arm64":
|
||||
make_args += ["-e", "UNAME_M=arm64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("builder tar build", ret)
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("builder clean")
|
||||
utils.delete_files("../*.tar*")
|
||||
|
||||
utils.log_h2("builder build")
|
||||
ret = utils.sh("tar --xz -cvf %s *" % builder_tar, creates=builder_tar, verbose=True)
|
||||
utils.set_summary("builder build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder deploy")
|
||||
ret = s3_upload([builder_tar], "builder/mac/")
|
||||
utils.set_summary("builder deploy", ret)
|
||||
if common.deploy:
|
||||
utils.log_h2("builder tar deploy")
|
||||
ret = s3_upload(utils.glob_path("tar/*.tar.xz"), "builder/mac/generic/")
|
||||
utils.set_summary("builder tar deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
@ -141,7 +162,7 @@ def make_linux():
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
utils.log_h2("builder build")
|
||||
make_args = branding.builder_make_targets
|
||||
make_args = [t["make"] for t in branding.builder_make_targets]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -150,32 +171,10 @@ def make_linux():
|
||||
utils.set_summary("builder build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.log_h2("builder tar deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("tar/*.tar.gz"),
|
||||
"builder/linux/generic/")
|
||||
utils.set_summary("builder tar deploy", ret)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.log_h2("builder deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"builder/linux/debian/")
|
||||
utils.set_summary("builder deb deploy", ret)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.log_h2("builder rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
|
||||
"builder/linux/rhel/")
|
||||
utils.set_summary("builder rpm deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.set_summary("builder tar deploy", False)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.set_summary("builder deb deploy", False)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.set_summary("builder rpm deploy", False)
|
||||
for t in branding.builder_make_targets:
|
||||
utils.log_h2("builder " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("builder " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -15,16 +15,3 @@ platformPrefixes = {
|
||||
|
||||
out_dir = "build_tools/out"
|
||||
tsa_server = "http://timestamp.digicert.com"
|
||||
vcredist_links = {
|
||||
# Microsoft Visual C++ 2015-2022 Redistributable - 14.38.33130
|
||||
"2022": {
|
||||
"x64": {
|
||||
"url": "https://aka.ms/vs/17/release/vc_redist.x64.exe",
|
||||
"md5": "101b0b9f74cdc6cdbd2570bfe92e302c"
|
||||
},
|
||||
"x86": {
|
||||
"url": "https://aka.ms/vs/17/release/vc_redist.x86.exe",
|
||||
"md5": "0d762264d9765e21c15a58edc43f4706"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -10,78 +10,84 @@ def make():
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_core()
|
||||
make_archive()
|
||||
return
|
||||
|
||||
def make_core():
|
||||
prefix = common.platformPrefixes[common.platform]
|
||||
company = branding.company_name.lower()
|
||||
repos = {
|
||||
"windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build },
|
||||
"windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build },
|
||||
"darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
"darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build },
|
||||
"linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
}
|
||||
repo = repos[common.platform]
|
||||
branch = utils.get_env("BRANCH_NAME")
|
||||
core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company))
|
||||
dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"])
|
||||
dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"])
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
if branch is None:
|
||||
utils.log_err("BRANCH_NAME variable is undefined")
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
if not utils.is_file(core_7z):
|
||||
utils.log_err("file not exist: " + core_7z)
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
utils.log_h2("core archive build")
|
||||
utils.delete_file("core.7z")
|
||||
args = ["7z", "a", "-y", "core.7z", "./core/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("core archive build", ret)
|
||||
|
||||
utils.log_h2("core deploy")
|
||||
aws_kwargs = { "acl": "public-read" }
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"]=branding.s3_endpoint_url
|
||||
utils.log_h2("core archive deploy")
|
||||
dest = "core-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
core_7z,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z",
|
||||
**aws_kwargs)
|
||||
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("core archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z")
|
||||
utils.add_deploy_data(dest_version + "/core.7z")
|
||||
ret = utils.s3_sync(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version + "/",
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest + "/",
|
||||
delete=True, **aws_kwargs)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z")
|
||||
utils.set_summary("core deploy", ret)
|
||||
return
|
||||
|
||||
def deploy_closuremaps(license):
|
||||
if not common.deploy: return
|
||||
utils.log_h1("CLOSURE MAPS")
|
||||
utils.set_cwd(utils.get_path("sdkjs/build/maps"))
|
||||
|
||||
maps = utils.glob_path("*.js.map")
|
||||
if not maps:
|
||||
utils.log_err("files do not exist")
|
||||
utils.set_summary("closure maps " + license + " deploy", False)
|
||||
return
|
||||
|
||||
utils.log_h2("closure maps " + license + " deploy")
|
||||
aws_kwargs = {}
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"]=branding.s3_endpoint_url
|
||||
ret = True
|
||||
for f in maps:
|
||||
key = "closure-maps/%s/%s/%s/%s" % (license, common.version, common.build, f)
|
||||
upload = utils.s3_upload(
|
||||
f, "s3://" + branding.s3_bucket + "/" + key, **aws_kwargs)
|
||||
ret &= upload
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.add_deploy_data(key)
|
||||
utils.set_summary("closure maps " + license + " deploy", ret)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def deploy_closuremaps_sdkjs(license):
|
||||
if not common.deploy: return
|
||||
utils.log_h1("SDKJS CLOSURE MAPS")
|
||||
|
||||
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
|
||||
if maps:
|
||||
for m in maps: utils.log("- " + m)
|
||||
else:
|
||||
utils.log_err("files do not exist")
|
||||
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
|
||||
return
|
||||
|
||||
utils.log_h2("sdkjs closure maps %s deploy" % license)
|
||||
ret = True
|
||||
for f in maps:
|
||||
base = utils.get_basename(f)
|
||||
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
ret &= upload
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
|
||||
return
|
||||
|
||||
def deploy_closuremaps_webapps(license):
|
||||
if not common.deploy: return
|
||||
utils.log_h1("WEB-APPS CLOSURE MAPS")
|
||||
|
||||
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
|
||||
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
|
||||
if maps:
|
||||
for m in maps: utils.log("- " + m)
|
||||
else:
|
||||
utils.log_err("files do not exist")
|
||||
utils.set_summary("web-apps closure maps %s deploy" % license, False)
|
||||
return
|
||||
|
||||
utils.log_h2("web-apps closure maps %s deploy" % license)
|
||||
ret = True
|
||||
for f in maps:
|
||||
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
|
||||
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
ret &= upload
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
|
||||
return
|
||||
|
||||
@ -24,13 +24,8 @@ def s3_upload(files, dst):
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
aws_kwargs = { "acl": "public-read" }
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"] = branding.s3_endpoint_url
|
||||
upload = utils.s3_upload(
|
||||
f, "s3://" + branding.s3_bucket + "/" + key, **aws_kwargs)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.add_deploy_data(key)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
@ -40,153 +35,105 @@ def s3_upload(files, dst):
|
||||
#
|
||||
|
||||
def make_windows():
|
||||
global package_version, arch_list, source_dir, branding_dir, desktop_dir, viewer_dir, \
|
||||
inno_file, inno_sa_file, inno_update_file, inno_update_file_new, advinst_file
|
||||
global package_name, package_version, arch, xp, suffix
|
||||
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
|
||||
|
||||
package_name = branding.desktop_package_name
|
||||
package_version = common.version + "." + common.build
|
||||
arch_list = {
|
||||
arch = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x64_xp": "x64",
|
||||
"windows_x86": "x86",
|
||||
"windows_x86_xp": "x86"
|
||||
}
|
||||
suffix = arch_list[common.platform]
|
||||
if common.platform.endswith("_xp"): suffix += "-xp"
|
||||
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
inno_update_file = "update\\editors_update_%s.exe" % suffix.replace("-","_")
|
||||
inno_update_file_new = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
|
||||
if branding.onlyoffice:
|
||||
branding_dir = "."
|
||||
else:
|
||||
branding_dir = common.workspace_dir + "\\" + common.branding + "\\desktop-apps\\win-linux\\package\\windows"
|
||||
}[common.platform]
|
||||
xp = common.platform.endswith("_xp")
|
||||
suffix = arch + ("-xp" if xp else "")
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("desktop clean")
|
||||
utils.delete_dir("build")
|
||||
# utils.delete_dir("data\\vcredist")
|
||||
utils.delete_dir("DesktopEditors-cache")
|
||||
utils.delete_files("*.exe")
|
||||
utils.delete_files("*.msi")
|
||||
utils.delete_files("*.aic")
|
||||
utils.delete_files("*.tmp")
|
||||
utils.delete_files("*.zip")
|
||||
utils.delete_files("update\\*.exe")
|
||||
utils.delete_files("update\\*.xml")
|
||||
utils.delete_files("update\\*.html")
|
||||
|
||||
utils.log_h2("copy arifacts")
|
||||
source_dir = "%s\\build_tools\\out\\%s\\%s" \
|
||||
% (common.workspace_dir, common.prefix, branding.company_name)
|
||||
utils.create_dir("build")
|
||||
desktop_dir = "build\\" + branding.desktop_product_name_s
|
||||
utils.copy_dir(source_dir + "\\" + branding.desktop_product_name_s, desktop_dir)
|
||||
if not branding.onlyoffice:
|
||||
viewer_dir = "build\\" + branding.viewer_product_name_s
|
||||
utils.copy_dir(source_dir + "\\" + branding.viewer_product_name_s, viewer_dir)
|
||||
utils.delete_files("data\\*.exe")
|
||||
|
||||
make_prepare()
|
||||
make_zip()
|
||||
|
||||
if not download_vcredist("2022"):
|
||||
utils.set_summary("desktop inno build", False)
|
||||
utils.set_summary("desktop inno standalone build", False)
|
||||
utils.set_summary("desktop inno update build", False)
|
||||
utils.set_summary("desktop advinst build", False)
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
make_inno()
|
||||
|
||||
if common.platform == "windows_x64":
|
||||
make_update_files()
|
||||
|
||||
if common.platform in ["windows_x64", "windows_x86"]:
|
||||
make_advinst()
|
||||
make_advinst()
|
||||
make_online()
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_zip():
|
||||
utils.log_h2("desktop zip build")
|
||||
|
||||
def make_prepare():
|
||||
args = [
|
||||
"-Target", common.platform,
|
||||
"-BuildDir", "build",
|
||||
"-DesktopDir", branding.desktop_product_name_s
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if not branding.onlyoffice:
|
||||
args += ["-MultimediaDir", branding.viewer_product_name_s]
|
||||
args += ["-BrandingDir", branding_dir]
|
||||
if branding.onlyoffice and not common.platform.endswith("_xp"):
|
||||
args += ["-ExcludeHelp"]
|
||||
if xp:
|
||||
args += ["-Target", "xp"]
|
||||
if common.sign:
|
||||
args += ["-Sign", "-CertName", branding.cert_name]
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop prepare")
|
||||
ret = utils.ps1("make.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop prepare", ret)
|
||||
return
|
||||
|
||||
def make_zip():
|
||||
zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if xp:
|
||||
args += ["-Target", "xp"]
|
||||
# if common.sign:
|
||||
# args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop zip build")
|
||||
ret = utils.ps1("make_zip.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop zip build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop zip deploy")
|
||||
ret = s3_upload(utils.glob_path("*.zip"), "desktop/win/generic/")
|
||||
ret = s3_upload([zip_file], "desktop/win/generic/")
|
||||
utils.set_summary("desktop zip deploy", ret)
|
||||
return
|
||||
|
||||
def download_vcredist(year):
|
||||
utils.log_h2("vcredist " + year + " download")
|
||||
|
||||
arch = arch_list[common.platform]
|
||||
link = common.vcredist_links[year][arch]["url"]
|
||||
md5 = common.vcredist_links[year][arch]["md5"]
|
||||
vcredist_file = "data\\vcredist\\vcredist_%s_%s.exe" % (year, arch)
|
||||
|
||||
utils.log_h2(vcredist_file)
|
||||
utils.create_dir(utils.get_dirname(vcredist_file))
|
||||
ret = utils.download_file(link, vcredist_file, md5, verbose=True)
|
||||
utils.set_summary("vcredist " + year + " download", ret)
|
||||
return ret
|
||||
|
||||
def make_inno():
|
||||
utils.log_h2("desktop inno build")
|
||||
|
||||
inno_arch_list = {
|
||||
"windows_x64": "64",
|
||||
"windows_x86": "32",
|
||||
"windows_x64_xp": "64",
|
||||
"windows_x86_xp": "32"
|
||||
}
|
||||
iscc_args = [
|
||||
"/Qp",
|
||||
"/DVERSION=" + package_version,
|
||||
"/DsAppVersion=" + package_version,
|
||||
"/DDEPLOY_PATH=" + desktop_dir,
|
||||
"/DARCH=" + arch_list[common.platform],
|
||||
"/D_ARCH=" + inno_arch_list[common.platform],
|
||||
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
inno_update_file = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
|
||||
update_wrapper = not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if branding.onlyoffice:
|
||||
iscc_args.append("/D_ONLYOFFICE=1")
|
||||
else:
|
||||
iscc_args.append("/DsBrandingFolder=" + \
|
||||
utils.get_abspath(common.workspace_dir + "\\" + common.branding + "\\desktop-apps"))
|
||||
if common.platform.endswith("_xp"):
|
||||
iscc_args.append("/D_WIN_XP=1")
|
||||
if common.sign:
|
||||
iscc_args.append("/DENABLE_SIGNING=1")
|
||||
iscc_args.append("/Sbyparam=signtool.exe sign /a /v /n $q" + \
|
||||
branding.cert_name + "$q /t " + common.tsa_server + " $f")
|
||||
args = ["iscc"] + iscc_args + ["common.iss"]
|
||||
ret = utils.cmd(*args, creates=inno_file, verbose=True)
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop inno build")
|
||||
if xp:
|
||||
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp"], verbose=True)
|
||||
else:
|
||||
ret = utils.ps1("make_inno.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop inno build", ret)
|
||||
|
||||
if branding.onlyoffice and not common.platform.endswith("_xp"):
|
||||
args = ["iscc"] + iscc_args + ["/DEMBED_HELP", "/DsPackageEdition=Standalone", "common.iss"]
|
||||
ret = utils.cmd(*args, creates=inno_sa_file, verbose=True)
|
||||
if branding.onlyoffice and not xp:
|
||||
utils.log_h2("desktop inno standalone")
|
||||
ret = utils.ps1("make_inno.ps1", args + ["-Target", "standalone"], verbose=True)
|
||||
utils.set_summary("desktop inno standalone build", ret)
|
||||
|
||||
if not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper):
|
||||
args = ["iscc"] + iscc_args + ["/DTARGET_NAME=" + inno_file, "update_common.iss"]
|
||||
ret = utils.cmd(*args, creates=inno_update_file, verbose=True)
|
||||
if update_wrapper:
|
||||
utils.log_h2("desktop inno update build")
|
||||
if xp:
|
||||
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp_update"], verbose=True)
|
||||
else:
|
||||
ret = utils.ps1("make_inno.ps1", args + ["-Target", "update"], verbose=True)
|
||||
utils.set_summary("desktop inno update build", ret)
|
||||
|
||||
if common.deploy:
|
||||
@ -194,127 +141,34 @@ def make_inno():
|
||||
ret = s3_upload([inno_file], "desktop/win/inno/")
|
||||
utils.set_summary("desktop inno deploy", ret)
|
||||
|
||||
if branding.onlyoffice and not common.platform.endswith("_xp"):
|
||||
if branding.onlyoffice and not xp:
|
||||
utils.log_h2("desktop inno standalone deploy")
|
||||
ret = s3_upload([inno_sa_file], "desktop/win/inno/")
|
||||
utils.set_summary("desktop inno standalone deploy", ret)
|
||||
|
||||
utils.log_h2("desktop inno update deploy")
|
||||
if utils.is_file(inno_update_file):
|
||||
ret = s3_upload(
|
||||
[inno_update_file], "desktop/win/inno/" + inno_update_file_new)
|
||||
ret = s3_upload([inno_update_file], "desktop/win/inno/")
|
||||
elif utils.is_file(inno_file):
|
||||
ret = s3_upload(
|
||||
[inno_file], "desktop/win/inno/" + inno_update_file_new)
|
||||
ret = s3_upload([inno_file], "desktop/win/inno/" + inno_update_file)
|
||||
else:
|
||||
ret = False
|
||||
utils.set_summary("desktop inno update deploy", ret)
|
||||
return
|
||||
|
||||
def make_update_files():
|
||||
utils.log_h2("desktop update files build")
|
||||
|
||||
changes_dir = common.workspace_dir + "\\" + utils.get_path(branding.desktop_changes_dir) + "\\" + common.version
|
||||
|
||||
if common.deploy and utils.glob_path(changes_dir + "\\*.html"):
|
||||
utils.log_h2("desktop update files deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path(changes_dir + "\\*.html"),
|
||||
"desktop/win/update/%s/%s/" % (common.version, common.build))
|
||||
utils.set_summary("desktop update files deploy", ret)
|
||||
return
|
||||
|
||||
def make_advinst():
|
||||
utils.log_h2("desktop advinst build")
|
||||
|
||||
msi_build = {
|
||||
"windows_x64": "MsiBuild64",
|
||||
"windows_x86": "MsiBuild32"
|
||||
}[common.platform]
|
||||
|
||||
if not branding.onlyoffice:
|
||||
multimedia_dir = common.workspace_dir + "\\" + common.branding + "\\multimedia"
|
||||
utils.copy_file(branding_dir + "\\dictionary.ail", "dictionary.ail")
|
||||
utils.copy_dir_content(branding_dir + "\\data", "data", ".bmp")
|
||||
utils.copy_dir_content(branding_dir + "\\data", "data", ".png")
|
||||
utils.copy_dir_content(
|
||||
branding_dir + "\\..\\..\\extras\\projicons\\res",
|
||||
"..\\..\\extras\\projicons\\res",
|
||||
".ico")
|
||||
utils.copy_file(
|
||||
branding_dir + "\\..\\..\\..\\common\\package\\license\\eula_" + common.branding + ".rtf",
|
||||
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
|
||||
utils.copy_file(
|
||||
multimedia_dir + "\\imageviewer\\icons\\ico\\" + common.branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\gallery.ico")
|
||||
utils.copy_file(
|
||||
multimedia_dir + "\\videoplayer\\icons\\" + common.branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\media.ico")
|
||||
|
||||
utils.write_file(desktop_dir + "\\converter\\package.config", "package=msi")
|
||||
|
||||
aic_content = [";aic"]
|
||||
if not common.sign:
|
||||
aic_content += [
|
||||
"ResetSig"
|
||||
]
|
||||
if branding.onlyoffice:
|
||||
for path in utils.glob_path(desktop_dir + "\\editors\\web-apps\\apps\\*\\main\\resources\\help"):
|
||||
utils.delete_dir(path)
|
||||
aic_content += [
|
||||
"DelFolder CUSTOM_PATH"
|
||||
]
|
||||
else:
|
||||
aic_content += [
|
||||
"SetProperty UpgradeCode=\"" + branding.desktop_upgrade_code + "\"",
|
||||
"AddUpgradeCode {47EEF706-B0E4-4C43-944B-E5F914B92B79} \
|
||||
-min_ver 7.1.1 -include_min_ver \
|
||||
-max_ver 7.2.2 -include_max_ver \
|
||||
-include_lang 1049 \
|
||||
-property_name UPGRADE_2 -enable_migrate",
|
||||
"DelLanguage 1029 -buildname " + msi_build,
|
||||
"DelLanguage 1031 -buildname " + msi_build,
|
||||
"DelLanguage 1041 -buildname " + msi_build,
|
||||
"DelLanguage 1046 -buildname " + msi_build,
|
||||
"DelLanguage 2070 -buildname " + msi_build,
|
||||
"DelLanguage 1060 -buildname " + msi_build,
|
||||
"DelLanguage 1036 -buildname " + msi_build,
|
||||
"DelLanguage 3082 -buildname " + msi_build,
|
||||
"DelLanguage 1033 -buildname " + msi_build,
|
||||
"SetCurrentFeature ExtendedFeature",
|
||||
"NewSync CUSTOM_PATH " + viewer_dir,
|
||||
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + viewer_dir + "\\ImageViewer.exe",
|
||||
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + viewer_dir + "\\VideoPlayer.exe",
|
||||
"SetProperty ProductName=\"" + branding.desktop_product_name_full + "\"",
|
||||
"SetProperty ASCC_REG_PREFIX=" + branding.ascc_reg_prefix
|
||||
]
|
||||
if common.platform == "windows_x86":
|
||||
aic_content += [
|
||||
"SetComponentAttribute -feature_name ExtendedFeature -unset -64bit_component"
|
||||
]
|
||||
if common.platform == "windows_x86":
|
||||
aic_content += [
|
||||
"SetComponentAttribute -feature_name MainFeature -unset -64bit_component",
|
||||
"SetComponentAttribute -feature_name FileProgIds -unset -64bit_component",
|
||||
"SetComponentAttribute -feature_name FileOpenWith -unset -64bit_component",
|
||||
"SetComponentAttribute -feature_name FileProgramCapatibilities -unset -64bit_component",
|
||||
"SetComponentAttribute -feature_name FileTypeAssociations -unset -64bit_component",
|
||||
"SetComponentAttribute -feature_name FileNewTemplates -unset -64bit_component"
|
||||
]
|
||||
aic_content += [
|
||||
"SetCurrentFeature MainFeature",
|
||||
"NewSync APPDIR " + desktop_dir,
|
||||
"UpdateFile APPDIR\\DesktopEditors.exe " + desktop_dir + "\\DesktopEditors.exe",
|
||||
"UpdateFile APPDIR\\updatesvc.exe " + desktop_dir + "\\updatesvc.exe",
|
||||
"SetProperty VERSION=\"" + package_version + "\"",
|
||||
"SetProperty VERSION_SHORT=\"" + re.sub(r"^(\d+\.\d+).+", "\\1", package_version) + "\"",
|
||||
"SetVersion " + package_version,
|
||||
"SetPackageName " + advinst_file + " -buildname " + msi_build,
|
||||
"Rebuild -buildslist " + msi_build
|
||||
if not common.platform in ["windows_x64", "windows_x86"]:
|
||||
return
|
||||
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
utils.write_file("DesktopEditors.aic", "\r\n".join(aic_content), "utf-8-sig")
|
||||
ret = utils.cmd("AdvancedInstaller.com", "/execute", \
|
||||
"DesktopEditors.aip", "DesktopEditors.aic", verbose=True)
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop advinst build")
|
||||
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop advinst build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
@ -323,6 +177,19 @@ def make_advinst():
|
||||
utils.set_summary("desktop advinst deploy", ret)
|
||||
return
|
||||
|
||||
def make_online():
|
||||
if not common.platform in ["windows_x86_xp"]:
|
||||
return
|
||||
online_file = "%s-%s-%s.exe" % ("OnlineInstaller", package_version, suffix)
|
||||
ret = utils.is_file(online_file)
|
||||
utils.set_summary("desktop online installer build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop online installer deploy")
|
||||
ret = s3_upload([online_file], "desktop/win/online/")
|
||||
utils.set_summary("desktop online installer deploy", ret)
|
||||
return
|
||||
|
||||
#
|
||||
# macOS
|
||||
#
|
||||
@ -429,11 +296,17 @@ def make_sparkle_updates():
|
||||
macos_zip = "build/" + zip_filename + ".zip"
|
||||
utils.create_dir(updates_dir)
|
||||
utils.copy_file(macos_zip, updates_dir)
|
||||
utils.copy_dir_content(released_updates_dir, updates_dir, ".zip")
|
||||
utils.sh(
|
||||
"ls -1t " + released_updates_dir + "/*.zip" \
|
||||
+ " | head -n 3" \
|
||||
+ " | while read f; do cp -fv \"$f\" " + updates_dir + "/; done",
|
||||
verbose=True)
|
||||
|
||||
for file in utils.glob_path(changes_dir + "/" + common.version + "/*.html"):
|
||||
filename = utils.get_basename(file).replace("changes", zip_filename)
|
||||
utils.copy_file(file, updates_dir + "/" + filename)
|
||||
for ext in [".html", ".ru.html"]:
|
||||
changes_src = changes_dir + "/" + common.version + "/changes" + ext
|
||||
changes_dst = updates_dir + "/" + zip_filename + ext
|
||||
if not utils.copy_file(changes_src, changes_dst):
|
||||
utils.write_file(changes_dst, "<!DOCTYPE html>placeholder")
|
||||
|
||||
sparkle_base_url = "%s/%s/updates/" % (branding.sparkle_base_url, suffix)
|
||||
ret = utils.sh(
|
||||
@ -446,17 +319,6 @@ def make_sparkle_updates():
|
||||
)
|
||||
utils.set_summary("desktop sparkle files build", ret)
|
||||
|
||||
utils.log("")
|
||||
utils.log_h3("generate checksums")
|
||||
utils.sh(
|
||||
"md5 *.zip *.delta > md5sums.txt",
|
||||
chdir="build/update", verbose=True
|
||||
)
|
||||
utils.sh(
|
||||
"shasum -a 256 *.zip *.delta > sha256sums.txt",
|
||||
chdir="build/update", verbose=True
|
||||
)
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("desktop sparkle files deploy")
|
||||
ret = s3_upload(
|
||||
@ -475,7 +337,7 @@ def make_linux():
|
||||
utils.set_cwd("desktop-apps/win-linux/package/linux")
|
||||
|
||||
utils.log_h2("desktop build")
|
||||
make_args = branding.desktop_make_targets
|
||||
make_args = [t["make"] for t in branding.desktop_make_targets]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -483,68 +345,11 @@ def make_linux():
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("desktop build", ret)
|
||||
|
||||
rpm_arch = "*"
|
||||
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
utils.log_h2("desktop tar deploy")
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
ret = s3_upload(
|
||||
utils.glob_path("tar/*.tar*"),
|
||||
"desktop/linux/generic/")
|
||||
utils.set_summary("desktop tar deploy", ret)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"desktop/linux/debian/")
|
||||
utils.set_summary("desktop deb deploy", ret)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb-astra deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb-astra/*.deb"),
|
||||
"desktop/linux/astra/")
|
||||
utils.set_summary("desktop deb-astra deploy", ret)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/rhel/")
|
||||
utils.set_summary("desktop rpm deploy", ret)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop suse-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/suse/")
|
||||
utils.set_summary("desktop suse-rpm deploy", ret)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop apt-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/altlinux/")
|
||||
utils.set_summary("desktop apt-rpm deploy", ret)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop urpmi deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"desktop/linux/rosa/")
|
||||
utils.set_summary("desktop urpmi deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop tar deploy", False)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb deploy", False)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb-astra deploy", False)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop rpm deploy", False)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop suse-rpm deploy", False)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop apt-rpm deploy", False)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop urpmi deploy", False)
|
||||
for t in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("desktop " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -22,20 +22,15 @@ def make_mobile():
|
||||
utils.sh("rm -rfv *.zip", verbose=True)
|
||||
|
||||
utils.log_h2("mobile build")
|
||||
ret = utils.sh("zip -r " + zip_file + " ./android* ./js", verbose=True)
|
||||
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
|
||||
utils.set_summary("mobile build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
utils.log_h2("mobile deploy")
|
||||
key = "mobile/android/" + zip_file
|
||||
aws_kwargs = { "acl": "public-read" }
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"] = branding.s3_endpoint_url
|
||||
ret = utils.s3_upload(
|
||||
zip_file, "s3://" + branding.s3_bucket + "/" + key, **aws_kwargs)
|
||||
ret = utils.s3_upload(zip_file, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if ret:
|
||||
utils.add_deploy_data(key)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("mobile deploy", ret)
|
||||
|
||||
|
||||
@ -19,13 +19,8 @@ def s3_upload(files, dst):
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
aws_kwargs = { "acl": "public-read" }
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
aws_kwargs["endpoint_url"] = branding.s3_endpoint_url
|
||||
upload = utils.s3_upload(
|
||||
f, "s3://" + branding.s3_bucket + "/" + key, **aws_kwargs)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.add_deploy_data(key)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
@ -41,10 +36,13 @@ def make_windows(edition):
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
ret = utils.cmd("make", "clean", verbose=True)
|
||||
args = ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if edition == "prerequisites":
|
||||
make_args = ["exe-pr"]
|
||||
else:
|
||||
make_args = ["exe", "-e", "PRODUCT_NAME=" + product_name]
|
||||
if not branding.onlyoffice:
|
||||
args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret &= utils.cmd("make", "packages", *args, verbose=True)
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret &= utils.cmd("make", *make_args, verbose=True)
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
@ -65,7 +63,8 @@ def make_linux(edition):
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
|
||||
make_args = [t["make"] for t in branding.server_make_targets]
|
||||
make_args += ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
@ -74,40 +73,10 @@ def make_linux(edition):
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " deb deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"server/linux/debian/")
|
||||
utils.set_summary("server " + edition + " deb deploy", ret)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
|
||||
"server/linux/rhel/")
|
||||
utils.set_summary("server " + edition + " rpm deploy", ret)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " apt-rpm deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"),
|
||||
"server/linux/altlinux/")
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " snap deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("*.tar.gz"),
|
||||
"server/linux/snap/")
|
||||
utils.set_summary("server " + edition + " snap deploy", ret)
|
||||
else:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " deb deploy", False)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " rpm deploy", False)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", False)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " snap deploy", False)
|
||||
for t in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -12,7 +12,6 @@ import subprocess
|
||||
import sys
|
||||
import time
|
||||
import package_common as common
|
||||
import base
|
||||
|
||||
def host_platform():
|
||||
return platform.system().lower()
|
||||
@ -73,6 +72,9 @@ def get_path(path):
|
||||
return path.replace("/", "\\")
|
||||
return path
|
||||
|
||||
def get_relpath(path, rel_path):
|
||||
return os.path.relpath(get_path(path), get_path(rel_path))
|
||||
|
||||
def get_abspath(path):
|
||||
return os.path.abspath(get_path(path))
|
||||
|
||||
@ -199,13 +201,12 @@ def copy_files(src, dst, override=True, verbose=True):
|
||||
copy_files(file + "/*", dst + "/" + file_name, override)
|
||||
return
|
||||
|
||||
def copy_dir(src, dst, override=True, verbose=True):
|
||||
def copy_dir(src, dst, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_dir:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" override: " + str(override))
|
||||
base.copy_dir(src, dst)
|
||||
shutil.copytree(src, dst)
|
||||
return
|
||||
|
||||
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
|
||||
@ -215,20 +216,18 @@ def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose
|
||||
log(" dst: " + dst)
|
||||
log(" include: " + filter_include)
|
||||
log(" exclude: " + filter_exclude)
|
||||
src_folder = src
|
||||
if ("/" != src[-1:]):
|
||||
src_folder += "/"
|
||||
src_folder += "*"
|
||||
for file in glob.glob(src_folder):
|
||||
basename = os.path.basename(file)
|
||||
if ("" != filter_include) and (-1 == basename.find(filter_include)):
|
||||
for item in os.listdir(src):
|
||||
s = os.path.join(src, item)
|
||||
d = os.path.join(dst, item)
|
||||
if ("" != filter_include) and (-1 == item.find(filter_include)):
|
||||
continue
|
||||
if ("" != filter_exclude) and (-1 != basename.find(filter_exclude)):
|
||||
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
|
||||
continue
|
||||
if is_file(file):
|
||||
copy_file(file, dst, verbose=False)
|
||||
elif is_dir(file):
|
||||
copy_dir(file, dst + "/" + basename, verbose=False)
|
||||
if os.path.isdir(s):
|
||||
shutil.copytree(s, d)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
log(item)
|
||||
return
|
||||
|
||||
def delete_file(path, verbose=True):
|
||||
@ -267,11 +266,6 @@ def set_summary(target, status):
|
||||
common.summary.append({target: status})
|
||||
return
|
||||
|
||||
def add_deploy_data(key):
|
||||
with open(common.deploy_data, 'a+') as f:
|
||||
f.write(key + "\n")
|
||||
return
|
||||
|
||||
def cmd(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- cmd:")
|
||||
@ -323,33 +317,11 @@ def ps1(file, args=[], **kwargs):
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
return True
|
||||
ret = subprocess.call(
|
||||
["powershell", "-File", file] + args, stderr=subprocess.STDOUT, shell=True
|
||||
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
|
||||
stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
return ret
|
||||
|
||||
def download_file(url, path, md5, verbose=False):
|
||||
if verbose:
|
||||
log("- download_file:")
|
||||
log(" url: " + path)
|
||||
log(" path: " + url)
|
||||
log(" md5: " + md5)
|
||||
if is_file(path):
|
||||
if get_hash_md5(path) == md5:
|
||||
log_err("file already exist (match checksum)")
|
||||
return True
|
||||
else:
|
||||
log_err("wrong checksum (%s), delete" % md5)
|
||||
os.remove(path)
|
||||
ret = powershell(
|
||||
"(New-Object System.Net.WebClient).DownloadFile('%s','%s')" % (url, path),
|
||||
verbose=True
|
||||
)
|
||||
md5_new = get_hash_md5(path)
|
||||
if md5 != md5_new:
|
||||
log_err("checksum didn't match (%s != %s)" % (md5, md5_new))
|
||||
return False
|
||||
return ret
|
||||
|
||||
def sh(command, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- sh:")
|
||||
@ -408,15 +380,13 @@ def s3_upload(src, dst, **kwargs):
|
||||
ret = sh(" ".join(args), verbose=True)
|
||||
return ret
|
||||
|
||||
def s3_sync(src, dst, **kwargs):
|
||||
def s3_copy(src, dst, **kwargs):
|
||||
args = ["aws"]
|
||||
if kwargs.get("endpoint_url"):
|
||||
args += ["--endpoint-url", kwargs["endpoint_url"]]
|
||||
args += ["s3", "sync", "--no-progress"]
|
||||
args += ["s3", "cp", "--no-progress"]
|
||||
if kwargs.get("acl"):
|
||||
args += ["--acl", kwargs["acl"]]
|
||||
if kwargs.get("delete") and kwargs["delete"]:
|
||||
args += ["--delete"]
|
||||
args += [src, dst]
|
||||
if is_windows():
|
||||
ret = cmd(*args, verbose=True)
|
||||
|
||||
150
scripts/qmake.py
Normal file
150
scripts/qmake.py
Normal file
@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
__dir__name__ = os.path.dirname(__file__)
|
||||
sys.path.append(__dir__name__ + '/core_common/modules/android')
|
||||
|
||||
import base
|
||||
import config
|
||||
import android_ndk
|
||||
import multiprocessing
|
||||
|
||||
def get_make_file_suffix(platform):
|
||||
suffix = platform
|
||||
if config.check_option("config", "debug"):
|
||||
suffix += "_debug_"
|
||||
suffix += config.option("branding")
|
||||
return suffix
|
||||
|
||||
def get_j_num():
|
||||
if ("0" != config.option("multiprocess")):
|
||||
return ["-j" + str(multiprocessing.cpu_count())]
|
||||
return []
|
||||
|
||||
def check_support_platform(platform):
|
||||
qt_dir = base.qt_setup(platform)
|
||||
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
|
||||
return False
|
||||
return True
|
||||
|
||||
def make(platform, project, qmake_config_addon="", is_no_errors=False):
|
||||
# check platform
|
||||
if not check_support_platform(platform):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
return
|
||||
|
||||
old_env = dict(os.environ)
|
||||
|
||||
# qt
|
||||
qt_dir = base.qt_setup(platform)
|
||||
base.set_env("OS_DEPLOY", platform)
|
||||
|
||||
# pro & makefile
|
||||
file_pro = os.path.abspath(project)
|
||||
|
||||
pro_dir = os.path.dirname(file_pro)
|
||||
if (pro_dir.endswith("/.")):
|
||||
pro_dir = pro_dir[:-2]
|
||||
if (pro_dir.endswith("/")):
|
||||
pro_dir = pro_dir[:-1]
|
||||
|
||||
makefile_name = "Makefile." + get_make_file_suffix(platform)
|
||||
makefile = pro_dir + "/" + makefile_name
|
||||
stash_file = pro_dir + "/.qmake.stash"
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(pro_dir)
|
||||
|
||||
if (base.is_file(stash_file)):
|
||||
base.delete_file(stash_file)
|
||||
if (base.is_file(makefile)):
|
||||
base.delete_file(makefile)
|
||||
|
||||
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
|
||||
|
||||
# setup android env
|
||||
if (-1 != platform.find("android")):
|
||||
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
|
||||
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
|
||||
|
||||
# setup ios env
|
||||
if (-1 != platform.find("ios")):
|
||||
base.hack_xcode_ios()
|
||||
|
||||
if base.is_file(makefile):
|
||||
base.delete_file(makefile)
|
||||
|
||||
config_param = base.qt_config(platform)
|
||||
if ("" != qmake_config_addon):
|
||||
config_param += (" " + qmake_config_addon)
|
||||
|
||||
# qmake ADDON
|
||||
qmake_addon = []
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon = config.option("qmake_addon").split()
|
||||
|
||||
clean_params = ["clean", "-f", makefile]
|
||||
distclean_params = ["distclean", "-f", makefile]
|
||||
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
|
||||
|
||||
qmake_app = qt_dir + "/bin/qmake"
|
||||
# non windows platform
|
||||
if not base.is_windows():
|
||||
if base.is_file(qt_dir + "/onlyoffice_qt.conf"):
|
||||
build_params.append("-qtconf")
|
||||
build_params.append(qt_dir + "/onlyoffice_qt.conf")
|
||||
base.cmd(qmake_app, build_params)
|
||||
base.correct_makefile_after_qmake(platform, makefile)
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd_and_return_cwd("make", clean_params, True)
|
||||
base.cmd_and_return_cwd("make", distclean_params, True)
|
||||
base.cmd(qmake_app, build_params)
|
||||
base.correct_makefile_after_qmake(platform, makefile)
|
||||
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
|
||||
else:
|
||||
config_params_array = base.qt_config_as_param(config_param)
|
||||
config_params_string = ""
|
||||
for item in config_params_array:
|
||||
config_params_string += (" \"" + item + "\"")
|
||||
qmake_addon_string = " ".join(qmake_addon)
|
||||
if ("" != qmake_addon_string):
|
||||
qmake_addon_string = " " + qmake_addon_string
|
||||
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
|
||||
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
|
||||
if ("1" == config.option("clean")):
|
||||
qmake_bat.append("call nmake " + " ".join(clean_params))
|
||||
qmake_bat.append("call nmake " + " ".join(distclean_params))
|
||||
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefile)
|
||||
base.run_as_bat(qmake_bat, is_no_errors)
|
||||
|
||||
if (base.is_file(stash_file)):
|
||||
base.delete_file(stash_file)
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
def make_all_platforms(project, qmake_config_addon=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
make(platform, project, qmake_config_addon)
|
||||
return
|
||||
@ -2,6 +2,8 @@
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import argparse
|
||||
|
||||
def readFile(path):
|
||||
with open(path, "r", errors='replace') as file:
|
||||
filedata = file.read()
|
||||
@ -112,6 +114,8 @@ class EditorApi(object):
|
||||
line = line.replace("}", "")
|
||||
lineWithoutSpaces = line.replace(" ", "")
|
||||
if not is_found_function and 0 == line.find("function "):
|
||||
if -1 == decoration.find("@constructor"):
|
||||
return
|
||||
codeCorrect += (line + addon_for_func + "\n")
|
||||
is_found_function = True
|
||||
if not is_found_function and -1 != line.find(".prototype."):
|
||||
@ -177,7 +181,7 @@ class EditorApi(object):
|
||||
|
||||
def generate(self):
|
||||
for file in self.files:
|
||||
file_content = readFile(file)
|
||||
file_content = readFile(f'{sdkjs_dir}/{file}')
|
||||
arrRecords = file_content.split("/**")
|
||||
arrRecords = arrRecords[1:-1]
|
||||
for record in arrRecords:
|
||||
@ -185,8 +189,8 @@ class EditorApi(object):
|
||||
self.numfile += 1
|
||||
correctContent = ''.join(self.records)
|
||||
correctContent += "\n"
|
||||
os.mkdir('deploy/api_builder/' + self.folder)
|
||||
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
|
||||
os.mkdir(args.destination + self.folder)
|
||||
writeFile(args.destination + self.folder + "/api.js", correctContent)
|
||||
return
|
||||
|
||||
def convert_to_interface(arrFiles, sEditorType):
|
||||
@ -195,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType):
|
||||
editor.generate()
|
||||
return
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir("../../../sdkjs")
|
||||
if True == os.path.isdir('deploy/api_builder'):
|
||||
shutil.rmtree('deploy/api_builder', ignore_errors=True)
|
||||
os.mkdir('deploy/api_builder')
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
sdkjs_dir = "../../../sdkjs"
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
old_cur = os.getcwd()
|
||||
|
||||
if True == os.path.isdir(args.destination):
|
||||
shutil.rmtree(args.destination, ignore_errors=True)
|
||||
os.mkdir(args.destination)
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
|
||||
|
||||
|
||||
80
scripts/sdkjs_common/jsdoc/README.md
Normal file
80
scripts/sdkjs_common/jsdoc/README.md
Normal file
@ -0,0 +1,80 @@
|
||||
|
||||
# Documentation Generation Guide
|
||||
|
||||
This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
|
||||
|
||||
## Requirements
|
||||
|
||||
```bash
|
||||
Node.js v20 and above
|
||||
Python v3.10 and above
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
cd build_tools/scripts/sdkjs_common/jsdoc
|
||||
npm install
|
||||
```
|
||||
|
||||
## Scripts Overview
|
||||
|
||||
### `generate_docs_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api`.
|
||||
|
||||
### `generate_docs_plugins_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `api_plugins.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_plugins_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api-plugins`.
|
||||
|
||||
### `generate_docs_md.py`
|
||||
|
||||
This script generates Markdown documentation from the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
```bash
|
||||
python generate_docs_md.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `../../../../office-js-api/`.
|
||||
|
||||
## Example
|
||||
|
||||
To generate JSON documentation with the default output path:
|
||||
```bash
|
||||
python generate_docs_json.py /path/to/save/json
|
||||
```
|
||||
|
||||
To generate JSON documentation with the default output path:
|
||||
```bash
|
||||
python generate_docs_plugins_json.py /path/to/save/json
|
||||
```
|
||||
|
||||
To generate Markdown documentation and specify a custom output path:
|
||||
```bash
|
||||
python generate_docs_md.py /path/to/save/markdown
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
|
||||
- The output directories will be created if they do not exist.
|
||||
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/cell.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/cell.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
216
scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js
Normal file
216
scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js
Normal file
@ -0,0 +1,216 @@
|
||||
exports.handlers = {
|
||||
processingComplete: function(e) {
|
||||
// array for filtered doclets
|
||||
let filteredDoclets = [];
|
||||
|
||||
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
|
||||
|
||||
const classesDocletsMap = {}; // doclets for classes write at the end
|
||||
let passedClasses = []; // passed classes for current editor
|
||||
|
||||
// Remove dublicates doclets
|
||||
const latestDoclets = {};
|
||||
e.doclets.forEach(doclet => {
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
|
||||
latestDoclets[doclet.longname] = doclet;
|
||||
}
|
||||
});
|
||||
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
|
||||
|
||||
// check available classess for current editor
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAdd =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' &&
|
||||
(!isMethod || hasTypeofEditorsTag);
|
||||
|
||||
if (shouldAdd) {
|
||||
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
passedClasses.push(cleanName(doclet.memberof));
|
||||
}
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
classesDocletsMap[cleanName(doclet.name)] = doclet;
|
||||
}
|
||||
}
|
||||
|
||||
// remove unavailave classes in current editor
|
||||
passedClasses = passedClasses.filter(className => {
|
||||
const doclet = classesDocletsMap[className];
|
||||
if (!doclet) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
|
||||
|
||||
// class is passes if there is no editor tag or the current editor is among the tags
|
||||
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
|
||||
return isPassed;
|
||||
});
|
||||
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
if (shouldAddMethod) {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We leave only the necessary fields
|
||||
doclet.memberof = cleanName(doclet.memberof);
|
||||
doclet.longname = cleanName(doclet.longname);
|
||||
doclet.name = cleanName(doclet.name);
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
memberof: cleanName(doclet.memberof),
|
||||
|
||||
params: doclet.params ? doclet.params.map(param => ({
|
||||
type: param.type ? {
|
||||
names: param.type.names,
|
||||
parsedType: param.type.parsedType
|
||||
} : param.type,
|
||||
|
||||
name: param.name,
|
||||
description: param.description,
|
||||
optional: param.optional,
|
||||
defaultvalue: param.defaultvalue
|
||||
})) : doclet.params,
|
||||
|
||||
returns: doclet.returns ? doclet.returns.map(returnObj => ({
|
||||
type: {
|
||||
names: returnObj.type.names,
|
||||
parsedType: returnObj.type.parsedType
|
||||
}
|
||||
})) : doclet.returns,
|
||||
|
||||
name: doclet.name,
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: doclet.scope,
|
||||
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
see: doclet.see
|
||||
};
|
||||
|
||||
// Add the filtered doclet to the array
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.name))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
augments: doclet.augments || undefined,
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
see: doclet.see || undefined
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'typedef') {
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
see: doclet.see,
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace doclets with a filtered array
|
||||
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
|
||||
}
|
||||
};
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/forms.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/forms.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/slide.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/slide.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/builder/word.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/builder/word.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/cell.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/cell.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/cell/api_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/common.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/common.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
85
scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js
Normal file
85
scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js
Normal file
@ -0,0 +1,85 @@
|
||||
exports.handlers = {
|
||||
processingComplete: function(e) {
|
||||
const filteredDoclets = [];
|
||||
|
||||
function checkNullProps(oDoclet) {
|
||||
for (let key of Object.keys(oDoclet)) {
|
||||
if (oDoclet[key] == null) {
|
||||
delete oDoclet[key];
|
||||
}
|
||||
if (typeof(oDoclet[key]) == "object") {
|
||||
checkNullProps(oDoclet[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
if (true == doclet.undocumented || doclet.kind == 'package') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
kind: doclet.kind,
|
||||
since: doclet.since,
|
||||
name: doclet.name,
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type,
|
||||
|
||||
description: doclet.description,
|
||||
memberof: doclet.memberof,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
longname: doclet.longname,
|
||||
scope: doclet.scope,
|
||||
alias: doclet.alias,
|
||||
|
||||
params: doclet.params ? doclet.params.map(param => ({
|
||||
type: param.type ? {
|
||||
names: param.type.names,
|
||||
parsedType: param.type.parsedType
|
||||
} : param.type,
|
||||
|
||||
name: param.name,
|
||||
description: param.description,
|
||||
optional: param.optional,
|
||||
defaultvalue: param.defaultvalue
|
||||
})) : doclet.params,
|
||||
|
||||
returns: doclet.returns ? doclet.returns.map(returnObj => ({
|
||||
type: {
|
||||
names: returnObj.type.names,
|
||||
parsedType: returnObj.type.parsedType
|
||||
}
|
||||
})) : doclet.returns,
|
||||
see: doclet.see
|
||||
};
|
||||
|
||||
checkNullProps(filteredDoclet)
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
|
||||
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
|
||||
}
|
||||
};
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/forms.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/forms.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs-forms/apiPlugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/slide.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/slide.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/slide/api_plugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/config/plugins/word.json
Normal file
16
scripts/sdkjs_common/jsdoc/config/plugins/word.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
110
scripts/sdkjs_common/jsdoc/generate_docs_json.py
Normal file
110
scripts/sdkjs_common/jsdoc/generate_docs_json.py
Normal file
@ -0,0 +1,110 @@
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
import platform
|
||||
|
||||
root = '../../../..'
|
||||
|
||||
# Configuration files
|
||||
configs = [
|
||||
"./config/builder/word.json",
|
||||
"./config/builder/cell.json",
|
||||
"./config/builder/slide.json",
|
||||
"./config/builder/forms.json"
|
||||
]
|
||||
|
||||
editors_maps = {
|
||||
"word": "CDE",
|
||||
"cell": "CSE",
|
||||
"slide": "CPE",
|
||||
"forms": "CFE"
|
||||
}
|
||||
|
||||
def generate(output_dir, md=False):
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# Generate JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
command_set_env = "export"
|
||||
if (platform.system().lower() == "windows"):
|
||||
command_set_env = "set"
|
||||
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
|
||||
print(f"Generating {editor_name}.json: {command}")
|
||||
subprocess.run(command, shell=True)
|
||||
|
||||
# Append examples to JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
|
||||
# Read the JSON file
|
||||
with open(output_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Modify JSON data
|
||||
for doclet in data:
|
||||
if 'see' in doclet:
|
||||
if doclet['see'] is not None:
|
||||
if editor_name == 'forms':
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
|
||||
else:
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
|
||||
|
||||
file_path = f'{root}/' + doclet['see'][0]
|
||||
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as see_file:
|
||||
example_content = see_file.read()
|
||||
|
||||
# Extract the first line as a comment if it exists
|
||||
lines = example_content.split('\n')
|
||||
if lines[0].startswith('//'):
|
||||
comment = lines[0] + '\n'
|
||||
code_content = '\n'.join(lines[1:])
|
||||
else:
|
||||
comment = ''
|
||||
code_content = example_content
|
||||
|
||||
if md == True:
|
||||
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
|
||||
|
||||
if md == False:
|
||||
document_type = editor_name
|
||||
if "forms" == document_type:
|
||||
document_type = "pdf"
|
||||
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
|
||||
|
||||
# Write the modified JSON file back
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
print("Documentation generation for builder completed.")
|
||||
|
||||
def remove_builder_lines(text):
|
||||
lines = text.splitlines() # Split text into lines
|
||||
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
|
||||
return "\n".join(filtered_lines)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/office-js-api-declarations/office-js-api"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
409
scripts/sdkjs_common/jsdoc/generate_docs_md.py
Normal file
409
scripts/sdkjs_common/jsdoc/generate_docs_md.py
Normal file
@ -0,0 +1,409 @@
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import generate_docs_json
|
||||
|
||||
# Configuration files
|
||||
editors = [
|
||||
"word",
|
||||
"cell",
|
||||
"slide",
|
||||
"forms"
|
||||
]
|
||||
|
||||
missing_examples = []
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def write_markdown_file(file_path, content):
|
||||
with open(file_path, 'w', encoding='utf-8') as md_file:
|
||||
md_file.write(content)
|
||||
|
||||
def remove_js_comments(text):
|
||||
text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) # single-line
|
||||
text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) # multi-line
|
||||
return text.strip()
|
||||
|
||||
def correct_description(string):
|
||||
"""
|
||||
Cleans up or transforms certain tags in a doclet description:
|
||||
- <b> => **
|
||||
- <note>...</note> => 💡 ...
|
||||
- Provide a default if None.
|
||||
"""
|
||||
if string is None:
|
||||
return 'No description provided.'
|
||||
|
||||
# Replace <b> tags with markdown bold
|
||||
string = re.sub(r'<b>', '**', string)
|
||||
string = re.sub(r'</b>', '**', string)
|
||||
# Convert <note>...</note> to a little icon + text
|
||||
string = re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
|
||||
return string
|
||||
|
||||
def correct_default_value(value, enumerations, classes):
|
||||
if value is None:
|
||||
return ''
|
||||
|
||||
if value == True:
|
||||
value = "true"
|
||||
elif value == False:
|
||||
value = "false"
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return generate_data_types_markdown([value], enumerations, classes)
|
||||
|
||||
def remove_line_breaks(string):
|
||||
return re.sub(r'[\r\n]+', ' ', string)
|
||||
|
||||
# Convert Array.<T> => T[] (including nested arrays).
|
||||
def convert_jsdoc_array_to_ts(type_str: str) -> str:
|
||||
"""
|
||||
Recursively replaces 'Array.<T>' with 'T[]',
|
||||
handling nested arrays like 'Array.<Array.<string>>' => 'string[][]'.
|
||||
"""
|
||||
pattern = re.compile(r'Array\.<([^>]+)>')
|
||||
|
||||
while True:
|
||||
match = pattern.search(type_str)
|
||||
if not match:
|
||||
break
|
||||
|
||||
inner_type = match.group(1).strip()
|
||||
# Recursively convert inner parts
|
||||
inner_type = convert_jsdoc_array_to_ts(inner_type)
|
||||
|
||||
# Replace the outer Array.<...> with ...[]
|
||||
type_str = (
|
||||
type_str[:match.start()]
|
||||
+ f"{inner_type}[]"
|
||||
+ type_str[match.end():]
|
||||
)
|
||||
|
||||
return type_str
|
||||
|
||||
def escape_text_outside_code_blocks(markdown: str) -> str:
|
||||
"""
|
||||
Splits content by fenced code blocks, escapes MDX-unsafe characters
|
||||
(<, >, {, }) only in the text outside those code blocks.
|
||||
"""
|
||||
# A regex to capture fenced code blocks with ```
|
||||
parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL)
|
||||
|
||||
# Even indices (0, 2, 4, ...) are outside code blocks,
|
||||
# odd indices (1, 3, 5, ...) are actual code blocks.
|
||||
for i in range(0, len(parts), 2):
|
||||
# Only escape in parts outside code blocks
|
||||
parts[i] = (parts[i]
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('{', '{')
|
||||
.replace('}', '}')
|
||||
)
|
||||
return "".join(parts)
|
||||
|
||||
def get_base_type(ts_type: str) -> str:
|
||||
"""
|
||||
Given a TypeScript-like type (e.g. "Drawing[][]"), return the
|
||||
'base' portion by stripping trailing "[]". For "Drawing[][]",
|
||||
returns "Drawing". For "Array.<Drawing>", you'd convert it first
|
||||
to "Drawing[]" then return "Drawing".
|
||||
"""
|
||||
while ts_type.endswith('[]'):
|
||||
ts_type = ts_type[:-2]
|
||||
return ts_type
|
||||
|
||||
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
|
||||
"""
|
||||
1) Convert each raw JSDoc type from Array.<T> to T[].
|
||||
2) Split union types if needed (usually they're provided as separate
|
||||
elements in 'types' already, but let's be safe).
|
||||
3) For each type, extract the base type (e.g. "Drawing" from "Drawing[]").
|
||||
4) If the base type matches an enumeration or class, link the entire
|
||||
T[]-based string.
|
||||
5) Join with " | ".
|
||||
"""
|
||||
|
||||
# Convert each raw type from JSDoc to TS
|
||||
converted = [convert_jsdoc_array_to_ts(t) for t in types] # e.g. ["Drawing[]", "Foo[]", ...]
|
||||
|
||||
# For each converted type (like "Drawing[]"), see if the base is in enumerations or classes
|
||||
def link_if_known(ts_type):
|
||||
base = get_base_type(ts_type) # e.g. "Drawing" from "Drawing[]"
|
||||
|
||||
# Check enumerations first
|
||||
for enum in enumerations:
|
||||
if enum['name'] == base:
|
||||
# Replace the entire token with a link
|
||||
return f"[{ts_type}]({root}Enumeration/{base}.md)"
|
||||
|
||||
# Check classes
|
||||
if base in classes:
|
||||
return f"[{ts_type}]({root}{base}/{base}.md)"
|
||||
|
||||
# Otherwise just return as-is
|
||||
return ts_type
|
||||
|
||||
# Build final list of possibly-linked types
|
||||
linked = [link_if_known(ts_t) for ts_t in converted]
|
||||
|
||||
# Join them with " | "
|
||||
param_types_md = r' \| '.join(linked)
|
||||
|
||||
# If there's still leftover angle brackets for generics, gently escape or link them
|
||||
# e.g. "Object.<string, number>" => "Object.<string, number>"
|
||||
# or do more specialized linking if you want to handle them deeper.
|
||||
def replace_leftover_generics(match):
|
||||
element = match.group(1).strip()
|
||||
return f"<{element}>"
|
||||
|
||||
param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md)
|
||||
|
||||
return param_types_md
|
||||
|
||||
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
|
||||
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
|
||||
content += generate_properties_markdown(properties, enumerations, classes)
|
||||
|
||||
content += "## Methods\n\n"
|
||||
for method in methods:
|
||||
method_name = method['name']
|
||||
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
|
||||
|
||||
# Escape just before returning
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_method_markdown(method, enumerations, classes, example_editor_name):
|
||||
method_name = method['name']
|
||||
description = method.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
params = method.get('params', [])
|
||||
returns = method.get('returns', [])
|
||||
example = method.get('example', '')
|
||||
memberof = method.get('memberof', '')
|
||||
|
||||
content = f"# {method_name}\n\n{description}\n\n"
|
||||
|
||||
# Syntax
|
||||
param_list = ', '.join([param['name'] for param in params]) if params else ''
|
||||
content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n"
|
||||
if memberof:
|
||||
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
|
||||
|
||||
# Parameters
|
||||
content += "## Parameters\n\n"
|
||||
if params:
|
||||
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
|
||||
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
|
||||
for param in params:
|
||||
param_name = param.get('name', 'Unnamed')
|
||||
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
|
||||
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
|
||||
param_required = "Required" if not param.get('optional') else "Optional"
|
||||
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
|
||||
|
||||
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
|
||||
else:
|
||||
content += "This method doesn't have any parameters.\n"
|
||||
|
||||
# Returns
|
||||
content += "\n## Returns\n\n"
|
||||
if returns:
|
||||
return_type_list = returns[0].get('type', {}).get('names', [])
|
||||
return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes)
|
||||
content += return_type_md
|
||||
else:
|
||||
content += "This method doesn't return any data."
|
||||
|
||||
# Example
|
||||
if example:
|
||||
# Separate comment and code, remove JS comments
|
||||
if '```js' in example:
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = remove_js_comments(comment)
|
||||
content += f"\n\n## Example\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n"
|
||||
else:
|
||||
# If there's no triple-backtick structure, just show it as code
|
||||
cleaned_example = remove_js_comments(example)
|
||||
content += f"\n\n## Example\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_properties_markdown(properties, enumerations, classes, root='../'):
|
||||
if properties is None:
|
||||
return ''
|
||||
|
||||
content = "## Properties\n\n"
|
||||
content += "| Name | Type | Description |\n"
|
||||
content += "| ---- | ---- | ----------- |\n"
|
||||
|
||||
for prop in properties:
|
||||
prop_name = prop['name']
|
||||
prop_description = prop.get('description', 'No description provided.')
|
||||
prop_description = remove_line_breaks(correct_description(prop_description))
|
||||
prop_types = prop['type']['names'] if prop.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root)
|
||||
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
|
||||
|
||||
# Escape outside code blocks
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_enumeration_markdown(enumeration, enumerations, classes, example_editor_name):
|
||||
enum_name = enumeration['name']
|
||||
description = enumeration.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
example = enumeration.get('example', '')
|
||||
|
||||
content = f"# {enum_name}\n\n{description}\n\n"
|
||||
|
||||
ptype = enumeration['type']['parsedType']
|
||||
if ptype['type'] == 'TypeUnion':
|
||||
enum_empty = True # is empty enum
|
||||
|
||||
content += "## Type\n\nEnumeration\n\n"
|
||||
content += "## Values\n\n"
|
||||
# Each top-level name in the union
|
||||
for raw_t in enumeration['type']['names']:
|
||||
ts_t = convert_jsdoc_array_to_ts(raw_t)
|
||||
|
||||
# Attempt linking: we compare the raw type to enumerations/classes
|
||||
if any(enum['name'] == raw_t for enum in enumerations):
|
||||
content += f"- [{ts_t}](../Enumeration/{raw_t}.md)\n"
|
||||
enum_empty = False
|
||||
elif raw_t in classes:
|
||||
content += f"- [{ts_t}](../{raw_t}/{raw_t}.md)\n"
|
||||
enum_empty = False
|
||||
elif ts_t.find('Api') == -1:
|
||||
content += f"- {ts_t}\n"
|
||||
enum_empty = False
|
||||
|
||||
if enum_empty == True:
|
||||
return None
|
||||
elif enumeration['properties'] is not None:
|
||||
content += "## Type\n\nObject\n\n"
|
||||
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
|
||||
else:
|
||||
content += "## Type\n\n"
|
||||
# If it's not a union and has no properties, simply print the type(s).
|
||||
types = enumeration['type']['names']
|
||||
t_md = generate_data_types_markdown(types, enumerations, classes)
|
||||
content += t_md + "\n\n"
|
||||
|
||||
# Example
|
||||
if example:
|
||||
if '```js' in example:
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = remove_js_comments(comment)
|
||||
content += f"\n\n## Example\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n"
|
||||
else:
|
||||
# If there's no triple-backtick structure
|
||||
cleaned_example = remove_js_comments(example)
|
||||
content += f"\n\n## Example\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def process_doclets(data, output_dir, editor_name):
|
||||
classes = {}
|
||||
classes_props = {}
|
||||
enumerations = []
|
||||
editor_dir = os.path.join(output_dir, editor_name)
|
||||
example_editor_name = 'editor-'
|
||||
|
||||
if editor_name == 'Word':
|
||||
example_editor_name += 'docx'
|
||||
elif editor_name == 'Forms':
|
||||
example_editor_name += 'pdf'
|
||||
elif editor_name == 'Slide':
|
||||
example_editor_name += 'pptx'
|
||||
elif editor_name == 'Cell':
|
||||
example_editor_name += 'xlsx'
|
||||
|
||||
for doclet in data:
|
||||
if doclet['kind'] == 'class':
|
||||
class_name = doclet['name']
|
||||
classes[class_name] = []
|
||||
classes_props[class_name] = doclet.get('properties', None)
|
||||
elif doclet['kind'] == 'function':
|
||||
class_name = doclet.get('memberof')
|
||||
if class_name:
|
||||
if class_name not in classes:
|
||||
classes[class_name] = []
|
||||
classes[class_name].append(doclet)
|
||||
elif doclet['kind'] == 'typedef':
|
||||
enumerations.append(doclet)
|
||||
|
||||
# Process classes
|
||||
for class_name, methods in classes.items():
|
||||
class_dir = os.path.join(editor_dir, class_name)
|
||||
methods_dir = os.path.join(class_dir, 'Methods')
|
||||
os.makedirs(methods_dir, exist_ok=True)
|
||||
|
||||
# Write class file
|
||||
class_content = generate_class_markdown(
|
||||
class_name,
|
||||
methods,
|
||||
classes_props[class_name],
|
||||
enumerations,
|
||||
classes
|
||||
)
|
||||
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
|
||||
|
||||
# Write method files
|
||||
for method in methods:
|
||||
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
|
||||
method_content = generate_method_markdown(method, enumerations, classes, example_editor_name)
|
||||
write_markdown_file(method_file_path, method_content)
|
||||
|
||||
if not method.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(method_file_path, output_dir))
|
||||
|
||||
# Process enumerations
|
||||
enum_dir = os.path.join(editor_dir, 'Enumeration')
|
||||
os.makedirs(enum_dir, exist_ok=True)
|
||||
|
||||
for enum in enumerations:
|
||||
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
|
||||
enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name)
|
||||
if enum_content is None:
|
||||
continue
|
||||
|
||||
write_markdown_file(enum_file_path, enum_content)
|
||||
if not enum.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
|
||||
|
||||
def generate(output_dir):
|
||||
print('Generating Markdown documentation...')
|
||||
|
||||
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
|
||||
for editor_name in editors:
|
||||
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
|
||||
|
||||
shutil.rmtree(output_dir + f'/{editor_name.title()}')
|
||||
os.makedirs(output_dir + f'/{editor_name.title()}')
|
||||
|
||||
data = load_json(input_file)
|
||||
process_doclets(data, output_dir, editor_name.title())
|
||||
|
||||
shutil.rmtree(output_dir + 'tmp_json')
|
||||
print('Done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../../office-js-api/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
print("START_MISSING_EXAMPLES")
|
||||
print(",".join(missing_examples))
|
||||
print("END_MISSING_EXAMPLES")
|
||||
111
scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py
Normal file
111
scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py
Normal file
@ -0,0 +1,111 @@
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
|
||||
# Configuration files
|
||||
configs = [
|
||||
"./config/plugins/common.json",
|
||||
"./config/plugins/word.json",
|
||||
"./config/plugins/cell.json",
|
||||
"./config/plugins/slide.json",
|
||||
"./config/plugins/forms.json"
|
||||
]
|
||||
|
||||
root = '../../../..'
|
||||
|
||||
def generate(output_dir, md=False):
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# Generate JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
command = f"npx jsdoc -c {config} -X > {output_file}"
|
||||
print(f"Generating {editor_name}.json: {command}")
|
||||
subprocess.run(command, shell=True)
|
||||
|
||||
common_doclets_file = os.path.join(output_dir, 'common.json')
|
||||
with open(common_doclets_file, 'r', encoding='utf-8') as f:
|
||||
common_doclets_json = json.dumps(json.load(f))
|
||||
os.remove(common_doclets_file)
|
||||
|
||||
# Append examples to JSON documentation
|
||||
for config in configs:
|
||||
if (config.find('common') != -1):
|
||||
continue
|
||||
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
example_folder_name = editor_name # name of folder with examples
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
|
||||
# Read the JSON file
|
||||
with open(output_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
start_common_doclet_idx = len(data)
|
||||
data += json.loads(common_doclets_json)
|
||||
|
||||
# Modify JSON data
|
||||
for idx, doclet in enumerate(data):
|
||||
if idx == start_common_doclet_idx:
|
||||
example_folder_name = 'common'
|
||||
elif editor_name == 'forms':
|
||||
example_folder_name = 'word'
|
||||
|
||||
if 'see' in doclet:
|
||||
if doclet['see'] is not None:
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', example_folder_name.title())
|
||||
file_path = f'{root}/' + doclet['see'][0]
|
||||
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as see_file:
|
||||
example_content = see_file.read()
|
||||
|
||||
# Extract the first line as a comment if it exists
|
||||
lines = example_content.split('\n')
|
||||
if lines[0].startswith('//'):
|
||||
comment = lines[0] + '\n'
|
||||
code_content = '\n'.join(lines[1:])
|
||||
else:
|
||||
comment = ''
|
||||
code_content = example_content
|
||||
|
||||
doclet['examples'] = [remove_js_comments(comment) + code_content]
|
||||
|
||||
if md == False:
|
||||
document_type = editor_name
|
||||
if "forms" == document_type:
|
||||
document_type = "pdf"
|
||||
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
|
||||
|
||||
# Write the modified JSON file back
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
print("Documentation generation for builder completed.")
|
||||
|
||||
def remove_builder_lines(text):
|
||||
lines = text.splitlines() # Split text into lines
|
||||
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
|
||||
return "\n".join(filtered_lines)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/office-js-api-declarations/office-js-api-plugins"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
467
scripts/sdkjs_common/jsdoc/generate_docs_plugins_md.py
Normal file
467
scripts/sdkjs_common/jsdoc/generate_docs_plugins_md.py
Normal file
@ -0,0 +1,467 @@
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import generate_docs_plugins_json
|
||||
|
||||
# Configuration files
|
||||
editors = [
|
||||
"word",
|
||||
"cell",
|
||||
"slide",
|
||||
"forms"
|
||||
]
|
||||
|
||||
missing_examples = []
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def write_markdown_file(file_path, content):
|
||||
with open(file_path, 'w', encoding='utf-8') as md_file:
|
||||
md_file.write(content)
|
||||
|
||||
def remove_js_comments(text):
|
||||
text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) # single-line
|
||||
text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) # multi-line
|
||||
return text.strip()
|
||||
|
||||
def correct_description(string):
|
||||
"""
|
||||
Cleans up or transforms certain tags in a doclet description:
|
||||
- <b> => **
|
||||
- <note>...</note> => 💡 ...
|
||||
- Provide a default if None.
|
||||
"""
|
||||
if string is None:
|
||||
return 'No description provided.'
|
||||
|
||||
# Replace <b> tags with markdown bold
|
||||
string = re.sub(r'<b>', '**', string)
|
||||
string = re.sub(r'</b>', '**', string)
|
||||
# Convert <note>...</note> to a little icon + text
|
||||
string = re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
|
||||
return string
|
||||
|
||||
def correct_default_value(value, enumerations, classes):
|
||||
if value is None:
|
||||
return ''
|
||||
|
||||
if value == True:
|
||||
value = "true"
|
||||
elif value == False:
|
||||
value = "false"
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return generate_data_types_markdown([value], enumerations, classes)
|
||||
|
||||
def remove_line_breaks(string):
|
||||
return re.sub(r'[\r\n]+', ' ', string)
|
||||
|
||||
# Convert Array.<T> => T[] (including nested arrays).
|
||||
def convert_jsdoc_array_to_ts(type_str: str) -> str:
|
||||
"""
|
||||
Recursively replaces 'Array.<T>' with 'T[]',
|
||||
handling nested arrays like 'Array.<Array.<string>>' => 'string[][]'.
|
||||
"""
|
||||
pattern = re.compile(r'Array\.<([^>]+)>')
|
||||
|
||||
while True:
|
||||
match = pattern.search(type_str)
|
||||
if not match:
|
||||
break
|
||||
|
||||
inner_type = match.group(1).strip()
|
||||
# Recursively convert inner parts
|
||||
inner_type = convert_jsdoc_array_to_ts(inner_type)
|
||||
|
||||
# Replace the outer Array.<...> with ...[]
|
||||
type_str = (
|
||||
type_str[:match.start()]
|
||||
+ f"{inner_type}[]"
|
||||
+ type_str[match.end():]
|
||||
)
|
||||
|
||||
return type_str
|
||||
|
||||
def escape_text_outside_code_blocks(markdown: str) -> str:
|
||||
"""
|
||||
Splits content by fenced code blocks, escapes MDX-unsafe characters
|
||||
(<, >, {, }) only in the text outside those code blocks.
|
||||
"""
|
||||
# A regex to capture fenced code blocks with ```
|
||||
parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL)
|
||||
|
||||
# Even indices (0, 2, 4, ...) are outside code blocks,
|
||||
# odd indices (1, 3, 5, ...) are actual code blocks.
|
||||
for i in range(0, len(parts), 2):
|
||||
# Only escape in parts outside code blocks
|
||||
parts[i] = (parts[i]
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('{', '{')
|
||||
.replace('}', '}')
|
||||
)
|
||||
return "".join(parts)
|
||||
|
||||
def get_base_type(ts_type: str) -> str:
|
||||
"""
|
||||
Given a TypeScript-like type (e.g. "Drawing[][]"), return the
|
||||
'base' portion by stripping trailing "[]". For "Drawing[][]",
|
||||
returns "Drawing". For "Array.<Drawing>", you'd convert it first
|
||||
to "Drawing[]" then return "Drawing".
|
||||
"""
|
||||
while ts_type.endswith('[]'):
|
||||
ts_type = ts_type[:-2]
|
||||
return ts_type
|
||||
|
||||
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
|
||||
"""
|
||||
1) Convert each raw JSDoc type from Array.<T> to T[].
|
||||
2) Split union types if needed (usually they're provided as separate
|
||||
elements in 'types' already, but let's be safe).
|
||||
3) For each type, extract the base type (e.g. "Drawing" from "Drawing[]").
|
||||
4) If the base type matches an enumeration or class, link the entire
|
||||
T[]-based string.
|
||||
5) Join with " | ".
|
||||
"""
|
||||
|
||||
# Convert each raw type from JSDoc to TS
|
||||
converted = [convert_jsdoc_array_to_ts(t) for t in types] # e.g. ["Drawing[]", "Foo[]", ...]
|
||||
|
||||
# For each converted type (like "Drawing[]"), see if the base is in enumerations or classes
|
||||
def link_if_known(ts_type):
|
||||
base = get_base_type(ts_type) # e.g. "Drawing" from "Drawing[]"
|
||||
|
||||
# Check enumerations first
|
||||
for enum in enumerations:
|
||||
if enum['name'] == base:
|
||||
# Replace the entire token with a link
|
||||
return f"[{ts_type}]({root}Enumeration/{base}.md)"
|
||||
|
||||
# Check classes
|
||||
if base in classes:
|
||||
return f"[{ts_type}]({root}{base}/{base}.md)"
|
||||
|
||||
# Otherwise just return as-is
|
||||
return ts_type
|
||||
|
||||
# Build final list of possibly-linked types
|
||||
linked = [link_if_known(ts_t) for ts_t in converted]
|
||||
|
||||
# Join them with " | "
|
||||
param_types_md = r' \| '.join(linked)
|
||||
|
||||
# If there's still leftover angle brackets for generics, gently escape or link them
|
||||
# e.g. "Object.<string, number>" => "Object.<string, number>"
|
||||
# or do more specialized linking if you want to handle them deeper.
|
||||
def replace_leftover_generics(match):
|
||||
element = match.group(1).strip()
|
||||
return f"<{element}>"
|
||||
|
||||
param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md)
|
||||
|
||||
return param_types_md
|
||||
|
||||
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
|
||||
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
|
||||
content += generate_properties_markdown(properties, enumerations, classes)
|
||||
|
||||
content += "## Methods\n\n"
|
||||
for method in methods:
|
||||
method_name = method['name']
|
||||
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
|
||||
|
||||
# Escape just before returning
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_method_markdown(method, enumerations, classes, example_editor_name):
|
||||
"""
|
||||
Generates Markdown for a method doclet, relying only on `method['examples']`
|
||||
(array of strings). Ignores any single `method['example']` field.
|
||||
"""
|
||||
|
||||
method_name = method['name']
|
||||
description = method.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
params = method.get('params', [])
|
||||
returns = method.get('returns', [])
|
||||
memberof = method.get('memberof', '')
|
||||
|
||||
# Use the 'examples' array only
|
||||
examples = method.get('examples', [])
|
||||
|
||||
content = f"# {method_name}\n\n{description}\n\n"
|
||||
|
||||
# Syntax
|
||||
param_list = ', '.join([param['name'] for param in params]) if params else ''
|
||||
content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n"
|
||||
if memberof:
|
||||
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
|
||||
|
||||
# Parameters
|
||||
content += "## Parameters\n\n"
|
||||
if params:
|
||||
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
|
||||
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
|
||||
for param in params:
|
||||
param_name = param.get('name', 'Unnamed')
|
||||
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
|
||||
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
|
||||
param_required = "Required" if not param.get('optional') else "Optional"
|
||||
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
|
||||
|
||||
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
|
||||
else:
|
||||
content += "This method doesn't have any parameters.\n"
|
||||
|
||||
# Returns
|
||||
content += "\n## Returns\n\n"
|
||||
if returns:
|
||||
return_type_list = returns[0].get('type', {}).get('names', [])
|
||||
return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes)
|
||||
content += return_type_md
|
||||
else:
|
||||
content += "This method doesn't return any data."
|
||||
|
||||
# Process examples array
|
||||
if examples:
|
||||
if len(examples) > 1:
|
||||
content += "\n\n## Examples\n\n"
|
||||
else:
|
||||
content += "\n\n## Example\n\n"
|
||||
|
||||
for i, ex_line in enumerate(examples, start=1):
|
||||
# Remove JS comments
|
||||
cleaned_example = remove_js_comments(ex_line).strip()
|
||||
|
||||
# Attempt splitting if the user used ```js
|
||||
if '```js' in cleaned_example:
|
||||
comment, code = cleaned_example.split('```js', 1)
|
||||
comment = comment.strip()
|
||||
code = code.strip()
|
||||
if len(examples) > 1:
|
||||
content += f"**Example {i}:**\n\n{comment}\n\n"
|
||||
|
||||
content += f"```javascript {example_editor_name}\n{code}\n```\n"
|
||||
else:
|
||||
if len(examples) > 1:
|
||||
content += f"**Example {i}:**\n\n{comment}\n\n"
|
||||
# No special fences, just show as code
|
||||
content += f"```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_properties_markdown(properties, enumerations, classes, root='../'):
|
||||
if properties is None:
|
||||
return ''
|
||||
|
||||
content = "## Properties\n\n"
|
||||
content += "| Name | Type | Description |\n"
|
||||
content += "| ---- | ---- | ----------- |\n"
|
||||
|
||||
for prop in properties:
|
||||
prop_name = prop['name']
|
||||
prop_description = prop.get('description', 'No description provided.')
|
||||
prop_description = remove_line_breaks(correct_description(prop_description))
|
||||
prop_types = prop['type']['names'] if prop.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root)
|
||||
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
|
||||
|
||||
# Escape outside code blocks
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_enumeration_markdown(enumeration, enumerations, classes, example_editor_name):
|
||||
"""
|
||||
Generates Markdown documentation for a 'typedef' doclet.
|
||||
This version only works with `enumeration['examples']` (an array of strings),
|
||||
ignoring any single `enumeration['examples']` field.
|
||||
"""
|
||||
|
||||
enum_name = enumeration['name']
|
||||
description = enumeration.get('description', 'No description provided.')
|
||||
description = correct_description(description)
|
||||
|
||||
# Only use the 'examples' array
|
||||
examples = enumeration.get('examples', [])
|
||||
|
||||
content = f"# {enum_name}\n\n{description}\n\n"
|
||||
|
||||
parsed_type = enumeration['type'].get('parsedType')
|
||||
if not parsed_type:
|
||||
# If parsedType is missing, just list 'type.names' if available
|
||||
type_names = enumeration['type'].get('names', [])
|
||||
if type_names:
|
||||
content += "## Type\n\n"
|
||||
t_md = generate_data_types_markdown(type_names, enumerations, classes)
|
||||
content += t_md + "\n\n"
|
||||
else:
|
||||
ptype = parsed_type['type']
|
||||
|
||||
# 1) Handle TypeUnion
|
||||
if ptype == 'TypeUnion':
|
||||
content += "## Type\n\nEnumeration\n\n"
|
||||
content += "## Values\n\n"
|
||||
for raw_t in enumeration['type']['names']:
|
||||
# Attempt linking
|
||||
if any(enum['name'] == raw_t for enum in enumerations):
|
||||
content += f"- [{raw_t}](../Enumeration/{raw_t}.md)\n"
|
||||
elif raw_t in classes:
|
||||
content += f"- [{raw_t}](../{raw_t}/{raw_t}.md)\n"
|
||||
else:
|
||||
content += f"- {raw_t}\n"
|
||||
|
||||
# 2) Handle TypeApplication (e.g. Object.<string, string>)
|
||||
elif ptype == 'TypeApplication':
|
||||
content += "## Type\n\nObject\n\n"
|
||||
type_names = enumeration['type'].get('names', [])
|
||||
if type_names:
|
||||
t_md = generate_data_types_markdown(type_names, enumerations, classes)
|
||||
content += f"**Type:** {t_md}\n\n"
|
||||
|
||||
# 3) If properties are present, treat it like an object
|
||||
if enumeration.get('properties') is not None:
|
||||
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
|
||||
|
||||
# 4) If it's neither TypeUnion nor TypeApplication, just output the type names
|
||||
if ptype not in ('TypeUnion', 'TypeApplication'):
|
||||
type_names = enumeration['type'].get('names', [])
|
||||
if type_names:
|
||||
content += "## Type\n\n"
|
||||
t_md = generate_data_types_markdown(type_names, enumerations, classes)
|
||||
content += t_md + "\n\n"
|
||||
|
||||
# Process examples array
|
||||
if examples:
|
||||
if len(examples) > 1:
|
||||
content += "\n\n## Examples\n\n"
|
||||
else:
|
||||
content += "\n\n## Example\n\n"
|
||||
|
||||
for i, ex_line in enumerate(examples, start=1):
|
||||
# Remove JS comments
|
||||
cleaned_example = remove_js_comments(ex_line).strip()
|
||||
|
||||
# Attempt splitting if the user used ```js
|
||||
if '```js' in cleaned_example:
|
||||
comment, code = cleaned_example.split('```js', 1)
|
||||
comment = comment.strip()
|
||||
code = code.strip()
|
||||
if len(examples) > 1:
|
||||
content += f"**Example {i}:**\n\n{comment}\n\n"
|
||||
|
||||
content += f"```javascript {example_editor_name}\n{code}\n```\n"
|
||||
else:
|
||||
if len(examples) > 1:
|
||||
content += f"**Example {i}:**\n\n{comment}\n\n"
|
||||
# No special fences, just show as code
|
||||
content += f"```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def process_doclets(data, output_dir, editor_name):
|
||||
classes = {}
|
||||
classes_props = {}
|
||||
enumerations = []
|
||||
editor_dir = os.path.join(output_dir, editor_name)
|
||||
example_editor_name = 'editor-'
|
||||
|
||||
if editor_name == 'Word':
|
||||
example_editor_name += 'docx'
|
||||
elif editor_name == 'Forms':
|
||||
example_editor_name += 'pdf'
|
||||
elif editor_name == 'Slide':
|
||||
example_editor_name += 'pptx'
|
||||
elif editor_name == 'Cell':
|
||||
example_editor_name += 'xlsx'
|
||||
|
||||
for doclet in data:
|
||||
if doclet['kind'] == 'class':
|
||||
class_name = doclet['name']
|
||||
classes[class_name] = []
|
||||
classes_props[class_name] = doclet.get('properties', None)
|
||||
elif doclet['kind'] == 'function':
|
||||
class_name = doclet.get('memberof')
|
||||
if class_name:
|
||||
if class_name not in classes:
|
||||
classes[class_name] = []
|
||||
classes[class_name].append(doclet)
|
||||
elif doclet['kind'] == 'typedef':
|
||||
enumerations.append(doclet)
|
||||
|
||||
# Process classes
|
||||
for class_name, methods in classes.items():
|
||||
class_dir = os.path.join(editor_dir, class_name)
|
||||
methods_dir = os.path.join(class_dir, 'Methods')
|
||||
os.makedirs(methods_dir, exist_ok=True)
|
||||
|
||||
# Write class file
|
||||
class_content = generate_class_markdown(
|
||||
class_name,
|
||||
methods,
|
||||
classes_props[class_name],
|
||||
enumerations,
|
||||
classes
|
||||
)
|
||||
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
|
||||
|
||||
# Write method files
|
||||
for method in methods:
|
||||
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
|
||||
method_content = generate_method_markdown(method, enumerations, classes, example_editor_name)
|
||||
write_markdown_file(method_file_path, method_content)
|
||||
|
||||
if not method.get('examples', ''):
|
||||
missing_examples.append(os.path.relpath(method_file_path, output_dir))
|
||||
|
||||
# Process enumerations
|
||||
enum_dir = os.path.join(editor_dir, 'Enumeration')
|
||||
os.makedirs(enum_dir, exist_ok=True)
|
||||
|
||||
for enum in enumerations:
|
||||
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
|
||||
enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name)
|
||||
if enum_content is None:
|
||||
continue
|
||||
|
||||
write_markdown_file(enum_file_path, enum_content)
|
||||
if not enum.get('examples', ''):
|
||||
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
|
||||
|
||||
def generate(output_dir):
|
||||
print('Generating Markdown documentation...')
|
||||
|
||||
if output_dir[-1] == '/':
|
||||
output_dir = output_dir[:-1]
|
||||
|
||||
generate_docs_plugins_json.generate(output_dir + '/tmp_json', md=True)
|
||||
for editor_name in editors:
|
||||
input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json")
|
||||
|
||||
shutil.rmtree(output_dir + f'/{editor_name.title()}', ignore_errors=True)
|
||||
os.makedirs(output_dir + f'/{editor_name.title()}')
|
||||
|
||||
data = load_json(input_file)
|
||||
process_doclets(data, output_dir, editor_name.title())
|
||||
|
||||
shutil.rmtree(output_dir + '/tmp_json')
|
||||
print('Done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../../office-js-api/Plugins/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
print("START_MISSING_EXAMPLES")
|
||||
print(",".join(missing_examples))
|
||||
print("END_MISSING_EXAMPLES")
|
||||
248
scripts/sdkjs_common/jsdoc/generate_jsonl_dataset.py
Normal file
248
scripts/sdkjs_common/jsdoc/generate_jsonl_dataset.py
Normal file
@ -0,0 +1,248 @@
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import generate_docs_json
|
||||
from datetime import datetime
|
||||
|
||||
# Configuration files
|
||||
editors = [
|
||||
"word",
|
||||
"cell",
|
||||
"slide",
|
||||
"forms"
|
||||
]
|
||||
|
||||
root = '../../../..'
|
||||
missing_examples = []
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def read_file_content(file_path):
|
||||
try:
|
||||
with open(file_path, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
except Exception as e:
|
||||
missing_examples.append(file_path)
|
||||
# print(f"Failed to open file {file_path}: {e}")
|
||||
return ""
|
||||
|
||||
def extract_js_comments_as_text(text):
|
||||
# Extract single-line comments (after //)
|
||||
single_line_comments = re.findall(r'^\s*//(.*)$', text, flags=re.MULTILINE)
|
||||
# Extract multi-line comments (between /* and */)
|
||||
multi_line_comments = re.findall(r'/\*(.*?)\*/', text, flags=re.DOTALL)
|
||||
# Combine all found comments into a single list
|
||||
all_comments = single_line_comments + multi_line_comments
|
||||
# Join comments into a single text, separated by a space
|
||||
return " ".join(comment.strip() for comment in all_comments if comment.strip())
|
||||
|
||||
def extract_examples_blocks(content: str):
|
||||
blocks = []
|
||||
current_block = {"comments": [], "code": []}
|
||||
in_comment_section = True # Collect comments until code appears
|
||||
current_comment_group = [] # Accumulate lines of the current comment
|
||||
|
||||
for line in content.splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
# Empty line
|
||||
if in_comment_section and current_comment_group:
|
||||
# Finish the current comment group
|
||||
comment_text = " ".join(current_comment_group)
|
||||
current_block["comments"].append(comment_text)
|
||||
current_comment_group = []
|
||||
elif not in_comment_section:
|
||||
# Empty line in the code – keep it as is
|
||||
current_block["code"].append(line)
|
||||
continue
|
||||
|
||||
if stripped.startswith("//"):
|
||||
if in_comment_section:
|
||||
# Remove comment marker and extra spaces
|
||||
current_comment_group.append(extract_js_comments_as_text(stripped))
|
||||
else:
|
||||
# Comment after code starts – finish the current block and start a new one
|
||||
blocks.append({
|
||||
"comments": current_block["comments"],
|
||||
"code": "\n".join(current_block["code"]).rstrip()
|
||||
})
|
||||
current_block = {"comments": [], "code": []}
|
||||
in_comment_section = True
|
||||
# Start a new comment group with the current line
|
||||
current_comment_group = [stripped[2:].strip()]
|
||||
else:
|
||||
# Code line
|
||||
if in_comment_section:
|
||||
if current_comment_group:
|
||||
comment_text = " ".join(current_comment_group)
|
||||
current_block["comments"].append(comment_text)
|
||||
current_comment_group = []
|
||||
in_comment_section = False
|
||||
current_block["code"].append(line)
|
||||
|
||||
# Finalize any remaining comment group
|
||||
if in_comment_section and current_comment_group:
|
||||
comment_text = " ".join(current_comment_group)
|
||||
current_block["comments"].append(comment_text)
|
||||
# Save the last block if it's not empty
|
||||
if current_block["comments"] or current_block["code"]:
|
||||
blocks.append({
|
||||
"comments": current_block["comments"],
|
||||
"code": "\n".join(current_block["code"]).rstrip()
|
||||
})
|
||||
|
||||
return blocks
|
||||
|
||||
def extract_examples_blocks_temp(content: str):
|
||||
lines = content.splitlines()
|
||||
comment_blocks = []
|
||||
current_group = []
|
||||
first_code_index = None
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
if current_group:
|
||||
comment_blocks.append(" ".join(current_group))
|
||||
current_group = []
|
||||
continue
|
||||
if stripped.startswith("//"):
|
||||
current_group.append(stripped[2:].strip())
|
||||
else:
|
||||
if current_group:
|
||||
comment_blocks.append(" ".join(current_group))
|
||||
current_group = []
|
||||
first_code_index = i
|
||||
break
|
||||
|
||||
code_part = ""
|
||||
if first_code_index is not None:
|
||||
code_part = "\n".join(lines[first_code_index:]).rstrip()
|
||||
|
||||
return [{"comments": comment_blocks, "code": code_part}]
|
||||
|
||||
def create_entry(system_message, user_message, assistant_message, model):
|
||||
entry = {
|
||||
"created_at": datetime.now().isoformat(" "),
|
||||
"messages": [
|
||||
{"role": "system", "content": system_message},
|
||||
{"role": "user", "content": user_message},
|
||||
{"role": "assistant", "content": assistant_message}
|
||||
],
|
||||
"recommended": False,
|
||||
"upvoted": True
|
||||
}
|
||||
|
||||
if model is not "":
|
||||
entry["model"] = model
|
||||
|
||||
return entry
|
||||
|
||||
def process_doclets(doclets, output_entries, editor_name, model):
|
||||
for doclet in doclets:
|
||||
kind = doclet.get("kind", "").lower()
|
||||
see = doclet.get("see", [])
|
||||
|
||||
# The "see" field must always be present
|
||||
if not see:
|
||||
continue
|
||||
|
||||
# Processing based on the "kind" value
|
||||
if kind == "function":
|
||||
method_name = doclet.get("name", "")
|
||||
memberof = doclet.get("memberof", "")
|
||||
# Functions must have both "name" (method_name) and "memberof" fields filled
|
||||
if not (method_name and memberof):
|
||||
continue
|
||||
system_message = (
|
||||
f"You act as an API expert for Onlyoffice {editor_name.title()} editor. "
|
||||
f"This task is an example for the function {method_name} in the class {memberof}."
|
||||
)
|
||||
default_user_message = f"How do I use the method {method_name} of {memberof} class?"
|
||||
|
||||
elif kind == "class":
|
||||
class_name = doclet.get("name", "")
|
||||
system_message = (
|
||||
f"You act as an API expert for Onlyoffice {editor_name.title()} editor. "
|
||||
f"This task is an example for the class {class_name}."
|
||||
)
|
||||
default_user_message = f"How do I instantiate or work with the class {class_name}?"
|
||||
|
||||
elif kind == "typedef":
|
||||
typedef_name = doclet.get("name", "")
|
||||
system_message = (
|
||||
f"You act as an API expert for Onlyoffice {editor_name.title()} editor. "
|
||||
f"This task is an example for the typedef {typedef_name}"
|
||||
)
|
||||
default_user_message = f"How do I use the typedef {typedef_name}?"
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
# Read the content of the first file listed in the "see" field
|
||||
content = read_file_content(f'{root}/{see[0]}')
|
||||
if content == "":
|
||||
continue
|
||||
|
||||
# now use only first block cause there is bad comments in examples
|
||||
blocks = extract_examples_blocks_temp(content)
|
||||
|
||||
for block in blocks:
|
||||
assistant_message = block['code']
|
||||
|
||||
# default entry
|
||||
output_entries.append(create_entry(system_message, default_user_message, assistant_message, model))
|
||||
|
||||
# If the file content contains comments, create a separate entry for each one
|
||||
for comment in block['comments']:
|
||||
output_entries.append(create_entry(system_message, comment, assistant_message, model))
|
||||
|
||||
def generate(output_dir, model):
|
||||
print('Generating documentation JSONL dataset...')
|
||||
|
||||
shutil.rmtree(output_dir, ignore_errors=True)
|
||||
os.makedirs(output_dir)
|
||||
|
||||
generate_docs_json.generate(f'{output_dir}/tmp_json')
|
||||
|
||||
output_entries = []
|
||||
output_filename = "dataset.jsonl"
|
||||
|
||||
for editor_name in editors:
|
||||
input_file = os.path.join(f'{output_dir}/tmp_json', editor_name + ".json")
|
||||
doclets = load_json(input_file)
|
||||
process_doclets(doclets, output_entries, editor_name, model)
|
||||
|
||||
with open(f'{output_dir}/{output_filename}', "w", encoding="utf-8") as out_file:
|
||||
for entry in output_entries:
|
||||
out_file.write(json.dumps(entry, ensure_ascii=False) + "\n")
|
||||
|
||||
shutil.rmtree(f'{output_dir}/tmp_json')
|
||||
print('Done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation JSONL dataset")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../../office-js-api/dataset" # Default value
|
||||
)
|
||||
parser.add_argument(
|
||||
"model",
|
||||
type=str,
|
||||
help="Type of model",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
generate(args.destination, args.model)
|
||||
print("START_MISSING_EXAMPLES")
|
||||
print(",".join(missing_examples))
|
||||
print("END_MISSING_EXAMPLES")
|
||||
39
scripts/sdkjs_common/jsdoc/get_latest_branch.py
Normal file
39
scripts/sdkjs_common/jsdoc/get_latest_branch.py
Normal file
@ -0,0 +1,39 @@
|
||||
import subprocess
|
||||
|
||||
def fetch_branches():
|
||||
#Fetch all branches without tags from the remote.
|
||||
subprocess.run(['git', 'fetch', '--no-tags', 'origin', '+refs/heads/*:refs/remotes/origin/*'], check=True)
|
||||
|
||||
def get_branches():
|
||||
#Get list of branches in the repository."""
|
||||
result = subprocess.run(['git', 'branch', '-r'], capture_output=True, text=True)
|
||||
return [line.strip() for line in result.stdout.splitlines()]
|
||||
|
||||
def parse_version(version_str):
|
||||
#Parse version string and return a tuple of integers (major, minor, patch).
|
||||
try:
|
||||
return tuple(map(int, version_str.lstrip('v').split('.')))
|
||||
except ValueError:
|
||||
return (0, 0, 0) # Default for non-parsable versions
|
||||
|
||||
def get_max_version_branch(branches):
|
||||
#Find the branch with the highest version.
|
||||
max_branch = None
|
||||
max_version = (0, 0, 0)
|
||||
|
||||
for branch in branches:
|
||||
parts = branch.split('/')
|
||||
if len(parts) >= 2 and (parts[1] == 'hotfix' or parts[1] == 'release'):
|
||||
version = parse_version(parts[2])
|
||||
if version > max_version:
|
||||
max_version = version
|
||||
max_branch = parts
|
||||
|
||||
return max_branch
|
||||
|
||||
if __name__ == "__main__":
|
||||
fetch_branches() # Fetch branches without tags
|
||||
branches = get_branches()
|
||||
max_version_branch = get_max_version_branch(branches)
|
||||
if max_version_branch:
|
||||
print('/'.join(max_version_branch[1:])) # Print only the branch name without origin
|
||||
7
scripts/sdkjs_common/jsdoc/package.json
Normal file
7
scripts/sdkjs_common/jsdoc/package.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"jsdoc-to-markdown": "7.1.1",
|
||||
"dmd": "6.1.0",
|
||||
"handlebars": "4.7.7"
|
||||
}
|
||||
}
|
||||
189
scripts/sln.py
Normal file
189
scripts/sln.py
Normal file
@ -0,0 +1,189 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
import config
|
||||
import json
|
||||
import os
|
||||
|
||||
is_log = False
|
||||
|
||||
def is_exist_in_array(projects, proj):
|
||||
for p in projects:
|
||||
if p == proj:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_full_projects_list(json_data, list):
|
||||
result = []
|
||||
for rec in list:
|
||||
if rec in json_data:
|
||||
result += get_full_projects_list(json_data, json_data[rec])
|
||||
else:
|
||||
result.append(rec)
|
||||
return result
|
||||
|
||||
def adjust_project_params(params):
|
||||
ret_params = params
|
||||
|
||||
# check aliases
|
||||
all_windows = []
|
||||
all_windows_xp = []
|
||||
all_linux = []
|
||||
all_mac = []
|
||||
all_android = []
|
||||
|
||||
for i in config.platforms:
|
||||
if (0 == i.find("win")):
|
||||
all_windows.append(i)
|
||||
if (-1 != i.find("xp")):
|
||||
all_windows_xp.append(i)
|
||||
elif (0 == i.find("linux")):
|
||||
all_linux.append(i)
|
||||
elif (0 == i.find("mac")):
|
||||
all_mac.append(i)
|
||||
elif (0 == i.find("android")):
|
||||
all_android.append(i)
|
||||
|
||||
if is_exist_in_array(params, "win"):
|
||||
ret_params += all_windows
|
||||
if is_exist_in_array(params, "!win"):
|
||||
ret_params += ["!" + x for x in all_windows]
|
||||
|
||||
if is_exist_in_array(params, "win_xp"):
|
||||
ret_params += all_windows_xp
|
||||
if is_exist_in_array(params, "!win_xp"):
|
||||
ret_params += ["!" + x for x in all_windows_xp]
|
||||
|
||||
if is_exist_in_array(params, "linux"):
|
||||
ret_params += all_linux
|
||||
if is_exist_in_array(params, "!linux"):
|
||||
ret_params += ["!" + x for x in all_linux]
|
||||
|
||||
if is_exist_in_array(params, "mac"):
|
||||
ret_params += all_mac
|
||||
if is_exist_in_array(params, "!mac"):
|
||||
ret_params += ["!" + x for x in all_mac]
|
||||
|
||||
if is_exist_in_array(params, "android"):
|
||||
ret_params += all_android
|
||||
if is_exist_in_array(params, "!android"):
|
||||
ret_params += ["!" + x for x in all_android]
|
||||
|
||||
return ret_params
|
||||
|
||||
def get_projects(pro_json_path, platform):
|
||||
json_path = os.path.abspath(pro_json_path)
|
||||
data = json.load(open(json_path))
|
||||
|
||||
root_dir_json = "../"
|
||||
if ("root" in data):
|
||||
root_dir_json = data["root"]
|
||||
|
||||
root_dir = os.path.dirname(json_path)
|
||||
if ("/" != root_dir[-1] and "\\" != root_dir[-1]):
|
||||
root_dir += "/"
|
||||
root_dir += root_dir_json
|
||||
|
||||
result = []
|
||||
modules = config.option("module").split(" ")
|
||||
for module in modules:
|
||||
if (module == ""):
|
||||
continue
|
||||
if not module in data:
|
||||
continue
|
||||
|
||||
# check aliases to modules
|
||||
records_src = data[module]
|
||||
records = get_full_projects_list(data, records_src)
|
||||
|
||||
#print(records)
|
||||
|
||||
for rec in records:
|
||||
params = []
|
||||
record = rec
|
||||
if (0 == rec.find("[")):
|
||||
pos = rec.find("]")
|
||||
if (-1 == pos):
|
||||
continue
|
||||
record = rec[pos+1:]
|
||||
header = rec[1:pos].replace(" ", "")
|
||||
params_tmp = rec[1:pos].split(",")
|
||||
for par in params_tmp:
|
||||
if (par != ""):
|
||||
params.append(par)
|
||||
|
||||
params = adjust_project_params(params)
|
||||
|
||||
if is_exist_in_array(result, record):
|
||||
continue
|
||||
|
||||
if is_log:
|
||||
print("params: " + ",".join(params))
|
||||
print("file: " + record)
|
||||
|
||||
if is_exist_in_array(params, "!" + platform):
|
||||
continue
|
||||
|
||||
platform_records = []
|
||||
platform_records += config.platforms
|
||||
platform_records += ["win", "win_xp", "linux", "mac", "android"]
|
||||
|
||||
# if one platform exists => all needed must exists
|
||||
is_needed_platform_exist = False
|
||||
for pl in platform_records:
|
||||
if is_exist_in_array(params, pl):
|
||||
is_needed_platform_exist = True;
|
||||
break
|
||||
|
||||
# if one config exists => all needed must exists
|
||||
is_needed_config_exist = False
|
||||
for item in params:
|
||||
if (0 == item.find("!")):
|
||||
continue
|
||||
if is_exist_in_array(platform_records, item):
|
||||
continue
|
||||
is_needed_config_exist = True
|
||||
break;
|
||||
|
||||
if is_needed_platform_exist:
|
||||
if not is_exist_in_array(params, platform):
|
||||
continue
|
||||
|
||||
config_params = config.option("config").split(" ") + config.option("features").split(" ")
|
||||
config_params = [x for x in config_params if x]
|
||||
|
||||
is_append = True
|
||||
for conf in config_params:
|
||||
if is_exist_in_array(params, "!" + conf):
|
||||
is_append = False
|
||||
break
|
||||
if is_needed_config_exist and not is_exist_in_array(params, conf):
|
||||
is_append = False
|
||||
break
|
||||
if is_append:
|
||||
result.append(root_dir + record)
|
||||
|
||||
# delete duplicates
|
||||
old_results = result
|
||||
result = []
|
||||
|
||||
map_results = set()
|
||||
for item in old_results:
|
||||
proj = item.replace("\\", "/")
|
||||
if proj in map_results:
|
||||
continue
|
||||
map_results.add(proj)
|
||||
result.append(proj)
|
||||
|
||||
if is_log:
|
||||
print(result)
|
||||
return result
|
||||
|
||||
# test example
|
||||
if __name__ == '__main__':
|
||||
# test
|
||||
config.parse()
|
||||
|
||||
is_log = True
|
||||
projects = get_projects("./../sln.json", "win_64")
|
||||
106
sln.json
Normal file
106
sln.json
Normal file
@ -0,0 +1,106 @@
|
||||
{
|
||||
"root" : "../",
|
||||
|
||||
"spell" : [
|
||||
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
|
||||
],
|
||||
|
||||
"core" : [
|
||||
|
||||
"core/Common/3dParty/cryptopp/project/cryptopp.pro",
|
||||
"core/Common/cfcpp/cfcpp.pro",
|
||||
|
||||
"core/UnicodeConverter/UnicodeConverter.pro",
|
||||
"core/Common/kernel.pro",
|
||||
"core/Common/Network/network.pro",
|
||||
|
||||
"core/DesktopEditor/graphics/pro/graphics.pro",
|
||||
|
||||
"core/PdfFile/PdfFile.pro",
|
||||
"core/DjVuFile/DjVuFile.pro",
|
||||
"core/XpsFile/XpsFile.pro",
|
||||
"core/HtmlFile2/HtmlFile2.pro",
|
||||
"core/Fb2File/Fb2File.pro",
|
||||
"core/EpubFile/CEpubFile.pro",
|
||||
"core/HwpFile/HWPFile.pro",
|
||||
|
||||
"core/Apple/IWork.pro",
|
||||
|
||||
"core/DocxRenderer/DocxRenderer.pro",
|
||||
|
||||
"core/DesktopEditor/doctrenderer/doctrenderer.pro",
|
||||
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro",
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro",
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro",
|
||||
|
||||
"[!no_x2t]core/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro",
|
||||
"[!no_x2t]core/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro",
|
||||
"[!no_x2t]core/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro",
|
||||
"[!no_x2t]core/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro",
|
||||
|
||||
"[!no_x2t]core/TxtFile/Projects/Linux/TxtXmlFormatLib.pro",
|
||||
"[!no_x2t]core/RtfFile/Projects/Linux/RtfFormatLib.pro",
|
||||
"[!no_x2t]core/OdfFile/Projects/Linux/OdfFormatLib.pro",
|
||||
|
||||
"[!no_x2t]core/OOXML/Projects/Linux/BinDocument/BinDocument.pro",
|
||||
|
||||
"[!no_x2t]core/X2tConverter/build/Qt/X2tConverter.pro",
|
||||
|
||||
"[win,linux,mac]core/DesktopEditor/AllFontsGen/AllFontsGen.pro",
|
||||
"[win,linux,mac]core/DesktopEditor/allthemesgen/allthemesgen.pro",
|
||||
|
||||
"[win,linux,mac]core/DesktopEditor/doctrenderer/app_builder/docbuilder.pro",
|
||||
|
||||
"[win,linux,mac]core/DesktopEditor/pluginsmanager/pluginsmanager.pro",
|
||||
|
||||
"[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro",
|
||||
|
||||
"spell",
|
||||
|
||||
"[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro",
|
||||
"[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro"
|
||||
|
||||
],
|
||||
|
||||
"builder" : [
|
||||
"core",
|
||||
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro"
|
||||
],
|
||||
|
||||
"server" : [
|
||||
"core"
|
||||
],
|
||||
|
||||
"multimedia" : [
|
||||
"[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro"
|
||||
],
|
||||
|
||||
"desktop" : [
|
||||
"core",
|
||||
"multimedia",
|
||||
|
||||
"core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",
|
||||
|
||||
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro",
|
||||
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro",
|
||||
|
||||
"[win,linux]desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro",
|
||||
"[win,linux]desktop-apps/win-linux/ASCDocumentEditor.pro",
|
||||
|
||||
"[win]desktop-apps/win-linux/extras/projicons/ProjIcons.pro",
|
||||
"[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro",
|
||||
"[win_xp]desktop-apps/win-linux/extras/online-installer/OnlineInstaller.pro"
|
||||
],
|
||||
|
||||
"mobile" : [
|
||||
"core"
|
||||
],
|
||||
|
||||
"osign" : [
|
||||
"[win,linux,mac]core/DesktopEditor/xmlsec/src/osign/lib/osign.pro"
|
||||
]
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
{
|
||||
"browser" : "chrome",
|
||||
"browserUrl" : "C:/Program Files/Google/Chrome/Application/chrome.exe"
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
{
|
||||
"browser" : "firefox",
|
||||
"browserUrl" : "C:/Program Files/Mozilla Firefox/firefox.exe"
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
|
||||
os.environ["PUPPETEER_SKIP_CHROMIUM_DOWNLOAD"] = "true"
|
||||
base.cmd("npm", ["i", "puppeteer"])
|
||||
@ -1,64 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import json
|
||||
|
||||
def get_tests_in_dir(directory):
|
||||
files = []
|
||||
for file in glob.glob(directory + "/*.js"):
|
||||
if base.is_file(file):
|
||||
files.append(file)
|
||||
elif is_dir(file):
|
||||
files += get_tests_in_dir(file)
|
||||
return files
|
||||
|
||||
params = sys.argv[1:]
|
||||
if (0 == len(params)):
|
||||
print("use: run.py path_to_config [path_to_test]")
|
||||
exit(0)
|
||||
|
||||
config_path = params[0]
|
||||
test_file = "./tests"
|
||||
|
||||
if (1 < len(params)):
|
||||
test_file = params[1]
|
||||
|
||||
tests_array = [test_file]
|
||||
if base.is_dir(test_file):
|
||||
tests_array = get_tests_in_dir(test_file)
|
||||
|
||||
config_content = "{}"
|
||||
with open(config_path, "r") as config_path_loader:
|
||||
config_content = config_path_loader.read()
|
||||
|
||||
print(config_content)
|
||||
|
||||
config = json.loads(config_content)
|
||||
os.environ["PUPPETEER_SKIP_CHROMIUM_DOWNLOAD"] = "true"
|
||||
if "browser" in config:
|
||||
print("browser: " + config["browser"])
|
||||
os.environ["PUPPETEER_PRODUCT"] = config["browser"]
|
||||
|
||||
if "browserUrl" in config:
|
||||
print("browserUrl: " + config["browserUrl"])
|
||||
os.environ["PUPPETEER_EXECUTABLE_PATH"] = config["browserUrl"]
|
||||
|
||||
if not base.is_dir("./work_directory"):
|
||||
base.create_dir("./work_directory")
|
||||
base.create_dir("./work_directory/cache")
|
||||
base.create_dir("./work_directory/downloads")
|
||||
|
||||
for test in tests_array:
|
||||
print("run test: " + test)
|
||||
run_file = test + ".runned.js"
|
||||
base.copy_file("./tester.js", run_file)
|
||||
test_content = base.readFile(test)
|
||||
test_content = test_content.replace("await Tester.", "Tester.")
|
||||
test_content = test_content.replace("Tester.", "await Tester.")
|
||||
base.replaceInFile(run_file, "\"%%CODE%%\"", test_content)
|
||||
base.cmd("node", [run_file])
|
||||
base.delete_file(run_file)
|
||||
@ -1,171 +0,0 @@
|
||||
const puppeteer = require('puppeteer')
|
||||
const pathfs = require('path')
|
||||
const fs = require('fs');
|
||||
|
||||
function TesterImpl()
|
||||
{
|
||||
this.browser = null;
|
||||
this.page = null;
|
||||
this.width = 1500;
|
||||
this.height = 800;
|
||||
this.pixelRatio = 1;
|
||||
|
||||
this.cacheDir = pathfs.resolve("./work_directory/cache");
|
||||
this.downloadsDir = pathfs.resolve("./work_directory/downloads");
|
||||
this.downloadCounter = 0;
|
||||
|
||||
this.load = async function(url)
|
||||
{
|
||||
const head = { x: 100, y: 200 };
|
||||
this.browser = await puppeteer.launch({
|
||||
headless: false,
|
||||
product: process.env["PUPPETEER_PRODUCT"],
|
||||
args: [
|
||||
"--disable-infobars",
|
||||
`--window-size=${this.width+head.x},${this.height+head.y}`,
|
||||
"--disk-cache-dir=" + this.cacheDir
|
||||
],
|
||||
defaultViewport : {width: this.width, height: this.height, deviceScaleFactor : this.pixelRatio }
|
||||
});
|
||||
|
||||
this.page = await this.browser.newPage();
|
||||
await this.page.setViewport({ width: this.width, height: this.height });
|
||||
let waitObject = (process.env["PUPPETEER_PRODUCT"] === "firefox") ? { waitUntil: "networkidle0", timeout: 15000 } : {};
|
||||
await this.page.goto(url + "&autotest=enabled", waitObject);
|
||||
console.log("[tester] pageLoaded");
|
||||
return this.page;
|
||||
};
|
||||
|
||||
this.close = async function(nosleep)
|
||||
{
|
||||
if (true !== nosleep)
|
||||
await this.waitAutosave();
|
||||
await this.browser.close();
|
||||
};
|
||||
|
||||
this.sleep = async function(ms)
|
||||
{
|
||||
return await new Promise(resolve => setTimeout(resolve, ms));
|
||||
};
|
||||
|
||||
this.waitEditor = async function()
|
||||
{
|
||||
// TODO: wait first onEndRecalculate
|
||||
await this.sleep(5000);
|
||||
console.log("[tester] editorReady");
|
||||
};
|
||||
|
||||
this.waitAutosave = async function()
|
||||
{
|
||||
await this.sleep(5000);
|
||||
};
|
||||
|
||||
this.evaluateInMainFrame = async function(code)
|
||||
{
|
||||
return await this.page.evaluate(code);
|
||||
};
|
||||
this.evaluateInEditorFrame = async function(code)
|
||||
{
|
||||
const frame = await this.page.frames().find(frame => frame.name() === 'frameEditor');
|
||||
if (!frame)
|
||||
return;
|
||||
return await frame.evaluate(code);
|
||||
};
|
||||
|
||||
this.click = async function(id)
|
||||
{
|
||||
let res = await this.evaluateInEditorFrame("document.getElementById(\"" + id + "\").click(); \"[tester] clicked: " + id + "\"");
|
||||
//console.log(res);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.mouseClick = async function(x, y, options)
|
||||
{
|
||||
let res = await this.page.mouse.click(x, y, options);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.eval = async function(code)
|
||||
{
|
||||
let res = await this.evaluateInEditorFrame(code);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.keyDown = async function(key)
|
||||
{
|
||||
// https://pptr.dev/api/puppeteer.keyinput
|
||||
let res = await this.page.keyboard.down(key);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.keyUp = async function(key)
|
||||
{
|
||||
// https://pptr.dev/api/puppeteer.keyinput
|
||||
let res = await this.page.keyboard.up(key);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.keyClick = async function(key)
|
||||
{
|
||||
// https://pptr.dev/api/puppeteer.keyinput
|
||||
let res = await this.page.keyboard.down(key);
|
||||
res = await this.page.keyboard.up(key);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.keyPress = async function(key)
|
||||
{
|
||||
// https://pptr.dev/api/puppeteer.keyinput
|
||||
let res = await this.page.keyboard.press(key);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.input = async function(text)
|
||||
{
|
||||
let res = await this.page.keyboard.type(text);
|
||||
await this.sleep(200);
|
||||
return res;
|
||||
};
|
||||
|
||||
this.downloadFile = async function(format, path)
|
||||
{
|
||||
const tmpDir = pathfs.resolve(this.downloadsDir, "./tmp" + this.downloadCounter++);
|
||||
fs.mkdirSync(tmpDir);
|
||||
|
||||
// emulate download
|
||||
const client = await this.page.target().createCDPSession();
|
||||
await client.send("Page.setDownloadBehavior", {
|
||||
behavior: "allow",
|
||||
downloadPath: tmpDir
|
||||
});
|
||||
|
||||
await this.evaluateInEditorFrame("document.querySelectorAll('[data-layout-name=\"toolbar-file\"]')[0].click();");
|
||||
await this.sleep(200);
|
||||
await this.evaluateInEditorFrame("document.getElementsByClassName(\"svg-format-" + format + "\")[0].click();");
|
||||
await this.sleep(200);
|
||||
await this.evaluateInEditorFrame("document.getElementById(\"fm-btn-return\").click();");
|
||||
|
||||
await this.sleep(2000);
|
||||
|
||||
const files = fs.readdirSync(tmpDir);
|
||||
fs.copyFileSync(pathfs.resolve(tmpDir, "./" + files[0]), pathfs.resolve(path));
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
};
|
||||
}
|
||||
|
||||
const Tester = new TesterImpl;
|
||||
|
||||
try {
|
||||
(async () => {
|
||||
"%%CODE%%"
|
||||
})();
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
@ -1,27 +0,0 @@
|
||||
Tester.load("path_to_file");
|
||||
Tester.waitEditor();
|
||||
|
||||
// down Enter
|
||||
Tester.keyClick("Enter");
|
||||
|
||||
// type text
|
||||
Tester.input("Hello World!");
|
||||
|
||||
Tester.keyPress("ArrowLeft");
|
||||
Tester.keyDown("Shift");
|
||||
for (let i = 0; i < 5; i++)
|
||||
Tester.keyPress("ArrowLeft");
|
||||
Tester.keyUp("Shift");
|
||||
|
||||
// bold
|
||||
Tester.click("id-toolbar-btn-bold");
|
||||
// italic
|
||||
Tester.mouseClick(115, 105);
|
||||
|
||||
// if needed
|
||||
Tester.waitAutosave();
|
||||
|
||||
Tester.downloadFile("docx", "./work_directory/new.docx")
|
||||
Tester.downloadFile("odt", "./work_directory/new.odt")
|
||||
|
||||
Tester.close(true);
|
||||
@ -7,6 +7,9 @@ import os
|
||||
import glob
|
||||
import shutil
|
||||
|
||||
sys.stdin.reconfigure(encoding='utf-8')
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (3 > len(params)):
|
||||
@ -20,8 +23,14 @@ directory_input = params[0].replace("\\", "/")
|
||||
directory_output = params[1].replace("\\", "/")
|
||||
author_name = params[2]
|
||||
|
||||
if not os.path.exists(directory_output):
|
||||
os.mkdir(directory_output)
|
||||
|
||||
input_files = []
|
||||
count = 1
|
||||
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
|
||||
print(count, file)
|
||||
count += 1
|
||||
input_files.append(file.replace("\\", "/"))
|
||||
|
||||
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
|
||||
@ -30,7 +39,7 @@ def change_author_name(file_dist, output_file, author_name):
|
||||
app = "7za" if ("mac" == base.host_platform()) else "7z"
|
||||
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'r') as file:
|
||||
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
|
||||
data = file.read()
|
||||
|
||||
creator_open = "<dc:creator>"
|
||||
@ -67,7 +76,7 @@ def change_author_name(file_dist, output_file, author_name):
|
||||
else:
|
||||
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'w') as file:
|
||||
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
|
||||
file.write(data)
|
||||
|
||||
shutil.copyfile(file_dist, output_file)
|
||||
@ -80,7 +89,12 @@ for input_file in input_files:
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
|
||||
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
|
||||
change_author_name(input_file, output_file, author_name)
|
||||
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]).replace(' ', '_') + u"." + input_file.split(".")[-1]
|
||||
|
||||
try:
|
||||
change_author_name(input_file, output_file, author_name)
|
||||
except:
|
||||
print("Error in converting document: ", input_file)
|
||||
continue
|
||||
base.delete_dir(temp_dir)
|
||||
output_cur += 1
|
||||
|
||||
114
tools/common/desktop_templates.py
Normal file
114
tools/common/desktop_templates.py
Normal file
@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import base64
|
||||
|
||||
sys.stdin.reconfigure(encoding='utf-8')
|
||||
sys.stdout.reconfigure(encoding='utf-8')
|
||||
|
||||
base.configure_common_apps()
|
||||
|
||||
def change_property(data_src, name, value):
|
||||
data = data_src
|
||||
creator_open = "<dc:" + name + ">"
|
||||
creator_close = "</dc:" + name + ">"
|
||||
open_tag_pos = data.find(creator_open)
|
||||
if open_tag_pos == -1:
|
||||
creator_close_to_find = "<dc:" + name + "/>"
|
||||
else:
|
||||
creator_close_to_find = "</dc:" + name + ">"
|
||||
close_tag_pos = data.find(creator_close_to_find)
|
||||
last_tag_pos = data.find("</cp:coreProperties>")
|
||||
|
||||
if open_tag_pos != -1 and close_tag_pos != - 1:
|
||||
data = data[:open_tag_pos + len(creator_open)] + value + data[close_tag_pos:]
|
||||
elif close_tag_pos != - 1:
|
||||
data = data[:close_tag_pos] + creator_open + value + creator_close + data[close_tag_pos + len(creator_close_to_find):]
|
||||
else:
|
||||
data = data[:last_tag_pos] + creator_open + value + creator_close + data[last_tag_pos:]
|
||||
return data
|
||||
|
||||
def change_author_name(file_input):
|
||||
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
|
||||
base.create_dir(temp_dir)
|
||||
|
||||
app = "7za" if ("mac" == base.host_platform()) else "7z"
|
||||
base.cmd_exe(app, ["x", "-y", file_input, "-o" + temp_dir, "docProps/core.xml", "-r"])
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
|
||||
data = file.read()
|
||||
|
||||
data = change_property(data, "creator", "")
|
||||
data = change_property(data, "lastModifiedBy", "")
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
|
||||
file.write(data)
|
||||
|
||||
base.cmd_exe(app, ["a", "-r", file_input, temp_dir + "/docProps"])
|
||||
base.delete_dir(temp_dir)
|
||||
|
||||
def get_files(dir):
|
||||
arr_files = []
|
||||
for file in glob.glob(dir + "/*"):
|
||||
if base.is_file(file):
|
||||
arr_files.append(file)
|
||||
elif base.is_dir(file):
|
||||
arr_files += get_files(file)
|
||||
return arr_files
|
||||
|
||||
def get_local_path(base, src_dir):
|
||||
test1 = base.replace("\\", "/")
|
||||
test2 = src_dir.replace("\\", "/")
|
||||
return test2[len(test1)+1:]
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (3 > len(params)):
|
||||
print("use: convert.py path_to_x2t_directory path_to_input_directory path_to_output_directory")
|
||||
exit(0)
|
||||
|
||||
base.configure_common_apps()
|
||||
|
||||
x2t_directory = params[0]
|
||||
src_directory = params[1]
|
||||
dst_directory = params[2]
|
||||
|
||||
if base.is_dir(dst_directory):
|
||||
base.delete_dir(dst_directory)
|
||||
base.create_dir(dst_directory)
|
||||
|
||||
src_files = get_files(src_directory)
|
||||
|
||||
for file in src_files:
|
||||
directory = os.path.dirname(file)
|
||||
name = os.path.basename(file)
|
||||
directory_out_file = dst_directory + "/" + get_local_path(src_directory, directory)
|
||||
if not base.is_dir(directory_out_file):
|
||||
os.makedirs(directory_out_file, exist_ok=True)
|
||||
name_without_ext = os.path.splitext(name)[0]
|
||||
name_ext = os.path.splitext(name)[1][1:]
|
||||
|
||||
dst_ext = name_ext
|
||||
if ("docx" == name_ext) or ("dotx" == name_ext):
|
||||
dst_ext = "dotx"
|
||||
elif ("pptx" == name_ext) or ("potx" == name_ext):
|
||||
dst_ext = "potx"
|
||||
elif ("xlsx" == name_ext) or ("xltx" == name_ext):
|
||||
dst_ext = "xltx"
|
||||
|
||||
dst_name = name_without_ext
|
||||
if (len(dst_name) < 4) or (dst_name[0:4] != "[32]"):
|
||||
dst_name = "[32]" + base64.b32encode(name_without_ext.encode("utf-8")).decode("utf-8")
|
||||
|
||||
dst_file = directory_out_file + "/" + dst_name + "." + dst_ext
|
||||
|
||||
os.makedirs(directory_out_file, exist_ok=True)
|
||||
base.cmd_in_dir(x2t_directory, "x2t", [file, dst_file])
|
||||
|
||||
change_author_name(dst_file)
|
||||
|
||||
print(name_without_ext + " => " + dst_name)
|
||||
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
@ -51,19 +52,43 @@ if base.is_file(directory_fonts_local + "/AllFonts.js"):
|
||||
directory_fonts = directory_fonts_local
|
||||
# ---------------------------------------------------
|
||||
|
||||
json_params = "{'spreadsheetLayout':{'fitToWidth':1,'fitToHeight':1},"
|
||||
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}}"
|
||||
|
||||
|
||||
json_params = "{"
|
||||
|
||||
json_params += "'spreadsheetLayout':{"
|
||||
|
||||
# True for fit, False for 100%
|
||||
isScaleSheetToPage = False
|
||||
|
||||
json_fit_text = "0"
|
||||
if isScaleSheetToPage:
|
||||
json_fit_text = "1"
|
||||
|
||||
json_params += "'fitToWidth':" + json_fit_text + ",'fitToHeight':" + json_fit_text + ","
|
||||
|
||||
if True:
|
||||
json_params += "'orientation':'landscape',"
|
||||
|
||||
page_margins = "'pageMargins':{'bottom':10,'footer':5,'header':5,'left':5,'right':5,'top':10}"
|
||||
page_setup = "'pageSetup':{'orientation':1,'width':210,'height':297,'paperUnits':0,'scale':100,'printArea':false,'horizontalDpi':600,'verticalDpi':600,'usePrinterDefaults':true,'fitToHeight':0,'fitToWidth':0}"
|
||||
|
||||
json_params += "'sheetsProps':{'0':{'headings':false,'printTitlesWidth':null,'printTitlesHeight':null," + page_margins + "," + page_setup + "}}},"
|
||||
|
||||
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}"
|
||||
json_params += "}"
|
||||
json_params = json_params.replace("'", """)
|
||||
|
||||
output_len = len(input_files)
|
||||
output_cur = 1
|
||||
for input_file in input_files:
|
||||
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
|
||||
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0])
|
||||
output_file_tmp = os.path.join(output_dir, "temp")
|
||||
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0].strip())
|
||||
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
|
||||
xml_convert += u"<TaskQueueDataConvert>"
|
||||
xml_convert += (u"<m_sFileFrom>" + input_file + u"</m_sFileFrom>")
|
||||
xml_convert += (u"<m_sFileTo>" + output_file + u".zip</m_sFileTo>")
|
||||
xml_convert += (u"<m_sFileTo>" + output_file_tmp + u".zip</m_sFileTo>")
|
||||
xml_convert += u"<m_nFormatTo>1029</m_nFormatTo>"
|
||||
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
|
||||
xml_convert += (u"<m_sFontDir>" + directory_fonts + u"</m_sFontDir>")
|
||||
@ -83,8 +108,9 @@ for input_file in input_files:
|
||||
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
base.extract_unicode(output_file + u".zip", output_file)
|
||||
base.delete_file(output_dir + "/" + os.path.splitext(os.path.basename(input_file))[0] + ".zip")
|
||||
base.extract_unicode(output_file_tmp + u".zip", output_file_tmp)
|
||||
base.move_dir(str(output_file_tmp), str(output_file))
|
||||
base.delete_file(output_file_tmp + u".zip")
|
||||
output_cur += 1
|
||||
|
||||
base.delete_dir(temp_dir)
|
||||
|
||||
38
tools/linux/arm/arm32/Dockerfile
Normal file
38
tools/linux/arm/arm32/Dockerfile
Normal file
@ -0,0 +1,38 @@
|
||||
FROM arm32v7/ubuntu:16.04
|
||||
|
||||
# basic dependencies
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y install wget xz-utils
|
||||
|
||||
# qt source
|
||||
RUN mkdir /source && cd /source && \
|
||||
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
|
||||
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
|
||||
|
||||
# build dependencies
|
||||
RUN apt-get -y install \
|
||||
build-essential \
|
||||
glib-2.0-dev \
|
||||
libglu1-mesa-dev \
|
||||
libgtk-3-dev \
|
||||
libpulse-dev \
|
||||
libasound2-dev \
|
||||
libatspi2.0-dev \
|
||||
libcups2-dev \
|
||||
libdbus-1-dev \
|
||||
libicu-dev \
|
||||
libgstreamer1.0-dev \
|
||||
libgstreamer-plugins-base1.0-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb* \
|
||||
libxi-dev \
|
||||
libxrender-dev \
|
||||
libxss-dev
|
||||
|
||||
# increase or decrease CORES value to change the number of parallel jobs while building qt
|
||||
ENV CORES=4
|
||||
|
||||
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
|
||||
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
|
||||
make -j$CORES && \
|
||||
make install
|
||||
38
tools/linux/arm/arm64/Dockerfile
Normal file
38
tools/linux/arm/arm64/Dockerfile
Normal file
@ -0,0 +1,38 @@
|
||||
FROM arm64v8/ubuntu:16.04
|
||||
|
||||
# basic dependencies
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y install wget xz-utils
|
||||
|
||||
# qt source
|
||||
RUN mkdir /source && cd /source && \
|
||||
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
|
||||
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
|
||||
|
||||
# build dependencies
|
||||
RUN apt-get -y install \
|
||||
build-essential \
|
||||
glib-2.0-dev \
|
||||
libglu1-mesa-dev \
|
||||
libgtk-3-dev \
|
||||
libpulse-dev \
|
||||
libasound2-dev \
|
||||
libatspi2.0-dev \
|
||||
libcups2-dev \
|
||||
libdbus-1-dev \
|
||||
libicu-dev \
|
||||
libgstreamer1.0-dev \
|
||||
libgstreamer-plugins-base1.0-dev \
|
||||
libx11-xcb-dev \
|
||||
libxcb* \
|
||||
libxi-dev \
|
||||
libxrender-dev \
|
||||
libxss-dev
|
||||
|
||||
# increase or decrease CORES value to change the number of parallel jobs while building qt
|
||||
ENV CORES=4
|
||||
|
||||
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
|
||||
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
|
||||
make -j$CORES && \
|
||||
make install
|
||||
30
tools/linux/arm/build_qt.py
Executable file
30
tools/linux/arm/build_qt.py
Executable file
@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.append(__dir__name__ + '/../../../scripts')
|
||||
import base
|
||||
|
||||
def docker_build(image_name, dockerfile_dir, build_dir):
|
||||
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
|
||||
base.cmd("docker", ["run", "--rm", "-v", build_dir + ":/build", image_name])
|
||||
base.cmd("docker", ["image", "rm", image_name])
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='Build qt for linux arm architecture')
|
||||
parser.add_argument('build_dir', help='the path to build directory (directory may not exist)')
|
||||
parser.add_argument('-a', '--arch', action='store', help='target architecture (arm32 or arm64)', choices=['arm32', 'arm64'], required=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
build_dir = args.build_dir
|
||||
if base.is_dir(build_dir):
|
||||
base.delete_dir(build_dir)
|
||||
base.create_dir(build_dir)
|
||||
|
||||
abs_build_path = os.path.abspath(build_dir)
|
||||
arch = args.arch
|
||||
docker_build('qt-' + arch, __dir__name__ + "/" + arch, abs_build_path)
|
||||
@ -55,14 +55,23 @@ def install_qt():
|
||||
base.cmd_in_dir("./qt-everywhere-opensource-src-5.9.9", "make", ["-j", "4"])
|
||||
base.cmd_in_dir("./qt-everywhere-opensource-src-5.9.9", "make", ["install"])
|
||||
return
|
||||
|
||||
def install_qt_prebuild():
|
||||
url_amd64 = "https://s3.eu-west-1.amazonaws.com/static-doc.teamlab.eu.com/qt/5.9.9/linux_amd64/qt_binary.7z"
|
||||
base.download(url_amd64, "./qt_amd64.7z")
|
||||
base.extract("./qt_amd64.7z", "./qt_build")
|
||||
base.create_dir("./qt_build/Qt-5.9.9")
|
||||
base.cmd("mv", ["./qt_build/qt_amd64", "./qt_build/Qt-5.9.9/gcc_64"])
|
||||
base.setup_local_qmake("./qt_build/Qt-5.9.9/gcc_64/bin")
|
||||
return
|
||||
|
||||
if not base.is_file("./node_js_setup_14.x"):
|
||||
print("install dependencies...")
|
||||
deps.install_deps()
|
||||
|
||||
if not base.is_dir("./qt_build"):
|
||||
if not base.is_dir("./qt_build"):
|
||||
print("install qt...")
|
||||
install_qt()
|
||||
install_qt_prebuild()
|
||||
|
||||
branch = get_branch_name("../..")
|
||||
|
||||
@ -103,6 +112,3 @@ build_tools_params = ["--branch", branch,
|
||||
|
||||
base.cmd_in_dir("../..", "./configure.py", build_tools_params)
|
||||
base.cmd_in_dir("../..", "./make.py")
|
||||
|
||||
|
||||
|
||||
|
||||
@ -39,9 +39,14 @@ def install_deps():
|
||||
"libxi-dev",
|
||||
"libxrender-dev",
|
||||
"libxss1",
|
||||
"libncurses5"]
|
||||
"libncurses5",
|
||||
"libncurses6",
|
||||
"curl",
|
||||
"libxkbcommon-dev",
|
||||
"libxkbcommon-x11-dev"]
|
||||
|
||||
base.cmd("sudo", ["apt-get", "install", "-y"] + packages)
|
||||
for package in packages:
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", package], True)
|
||||
|
||||
# nodejs
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
|
||||
@ -54,17 +59,10 @@ def install_deps():
|
||||
print("Installed Node.js version: " + str(nodejs_cur_version_major) + "." + str(nodejs_cur_version_minor))
|
||||
except:
|
||||
nodejs_cur = 1
|
||||
if (nodejs_cur < 14000):
|
||||
print("Node.js version cannot be less 14")
|
||||
if (nodejs_cur < 16000):
|
||||
print("Node.js version cannot be less 16")
|
||||
print("Reinstall")
|
||||
if (base.is_dir("./node_js_setup_14.x")):
|
||||
base.delete_dir("./node_js_setup_14.x")
|
||||
base.cmd("sudo", ["apt-get", "remove", "--purge", "-y", "nodejs"])
|
||||
base.download("https://deb.nodesource.com/setup_14.x", "./node_js_setup_14.x")
|
||||
base.cmd('curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -')
|
||||
base.cmd("sudo", ["bash", "./node_js_setup_14.x"])
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "npm@6"])
|
||||
base.run_as_bat(["curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - &&sudo apt-get install -y nodejs"])
|
||||
else:
|
||||
print("OK")
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "npm", "yarn"], True)
|
||||
@ -86,4 +84,3 @@ def install_deps():
|
||||
|
||||
if __name__ == "__main__":
|
||||
install_deps()
|
||||
|
||||
|
||||
BIN
tools/linux/python3.tar.gz
Normal file
BIN
tools/linux/python3.tar.gz
Normal file
Binary file not shown.
Reference in New Issue
Block a user