Compare commits

..

179 Commits

Author SHA1 Message Date
8abdeb21da Fix bug with MySQL 2024-09-04 21:50:44 +03:00
f463bff49e Fixes for working by ssh 2024-09-03 15:09:37 +03:00
a817e2b046 Update generate_docs_plugins_json.py
delete generating missing files with json
2024-09-01 00:34:25 +03:00
3539e36bde Update generate_docs_json.py
delete generate missing examples with json files
2024-09-01 00:32:50 +03:00
6930a9ffe1 Update generate_docs_md.py 2024-08-31 12:58:41 +03:00
e0a44502b1 Update generate_docs_md.py 2024-08-31 12:49:30 +03:00
19e1bd5586 Update generate_docs_md.py 2024-08-31 10:27:23 +03:00
ea65ba02f1 Update generate_docs_md.py 2024-08-31 10:23:46 +03:00
8406e48009 Update generate_docs_md.py 2024-08-31 10:14:28 +03:00
a8f1d11cbc Update generate_docs_md.py 2024-08-31 01:48:55 +03:00
f245a4a9c6 Update generate_docs_md.py 2024-08-31 01:40:00 +03:00
597529a16d Fix crossbuild 2024-08-30 10:49:09 +03:00
9b9dba05c2 Update generate_docs_plugins_json.py
delete branch from path
2024-08-30 09:36:10 +03:00
2d0bbc824f Update generate_docs_json.py
delete branch name from path
2024-08-30 09:33:34 +03:00
fa523c673f Update generate_docs_plugins_json.py
change default
2024-08-30 09:29:54 +03:00
da1a4ba393 Update README.md
change default
2024-08-30 09:29:44 +03:00
e9c9712e52 Update generate_docs_json.py
change default path
2024-08-30 09:25:34 +03:00
78561ca659 Update README.md 2024-08-30 09:25:23 +03:00
1ad87383e3 Update README.md
fix Requirements and Installation
2024-08-30 08:31:45 +03:00
c29ac1549f Update README.md
fix default path
2024-08-30 08:26:58 +03:00
f09eeb19e5 Fix generation on linux 2024-08-29 23:19:21 +03:00
4b7b2c78a2 Merge pull request #871 from ONLYOFFICE/fix/jsdoc
[jsdoc] Fixed style
2024-08-29 00:31:34 -07:00
414af6bdb0 [jsdoc] Fixed style 2024-08-29 14:27:47 +07:00
df7288b275 Merge pull request #868 from ONLYOFFICE/feature/extend-apijs-load
[web-apps] copy api.js as api.js.tpl for server package
2024-08-28 14:39:25 +03:00
ce80953086 Merge pull request #870 from ONLYOFFICE/fix/jsdoc
[jsdoc] Fixed creating data types.
2024-08-28 04:05:29 -07:00
d1344dab71 [jsdoc] Fixed creating data types. 2024-08-28 18:00:53 +07:00
4f2ba4ae76 Merge pull request #869 from ONLYOFFICE/fix/jsdocs
[jsdoc] Fixed output path
2024-08-28 03:32:34 -07:00
6bd525c3b4 [jsdoc] Fixed output path 2024-08-28 17:29:12 +07:00
341671a612 Fix typo 2024-08-28 13:10:29 +03:00
9161aa1556 Add generate snapshots 2024-08-28 13:06:12 +03:00
70e9fbabce [web-apps] copy api.js as api.js.tpl for server package 2024-08-27 21:45:15 +03:00
a2c00deba2 Add function for portable test utilities 2024-08-26 14:20:14 +03:00
3baee0c14e Move pro to builder module 2024-08-22 14:07:05 +03:00
bd279d1ad7 Merge pull request #861 from ONLYOFFICE/feature/docbuilder-java
Build and deploy docbuilder Java wrapper
2024-08-19 04:51:08 -07:00
4d55a66307 Merge pull request #865 from ONLYOFFICE/fix/jsdoc
[jsdoc] Replacing line breaks with spaces
2024-08-16 06:09:23 -07:00
9481e01581 [jsdoc] Replacing line breaks with spaces 2024-08-16 20:04:51 +07:00
fe91bf9620 Merge pull request #864 from ONLYOFFICE/fix/jsdocs
[jsdoc] Fixed paths generation
2024-08-16 05:30:20 -07:00
d812ba379b [jsdoc] Fixed paths generation 2024-08-16 19:28:56 +07:00
e1cc7f3c83 Fix libraries not loading on mac 2024-08-14 19:24:34 +04:00
f50d5d2cd1 Fix path problems on mac 2024-08-14 15:54:07 +04:00
b3987b0ad5 Move build of Java wrapper to build_sln.py 2024-08-13 20:42:54 +04:00
63fbbc5603 Add missed library to deploy 2024-08-13 17:26:14 +03:00
dabbc31c09 Handling complex dependencies in project file 2024-08-13 10:38:33 +03:00
997bfa3dd5 Fix typo 2024-08-13 09:14:23 +03:00
50eca8aab5 Fix build 2024-08-13 07:45:29 +03:00
6e4a2e4d5e Add dictionariestester to core deploy 2024-08-13 00:25:56 +03:00
40e9938885 Add test for dicts & spellmodule to core 2024-08-13 00:25:06 +03:00
5bc8ca2266 Build and deploy JAR 2024-08-12 18:42:18 +04:00
4cdbfbfb86 Deploy JNI helper dynamic library 2024-08-12 18:24:51 +04:00
01575d1f2e Fix core and builder archive deploy (#860)
* Refactoring script parameters

* Add builder 7z deploy

* Refactoring core 7z deploy

* Small fix
2024-08-12 16:53:56 +03:00
8f75c75b80 Merge pull request #859 from ONLYOFFICE/fix/builder-docs
[jsdoc][bu] Removed example filed from json docs
2024-08-07 04:24:42 -07:00
ebc084f9ea [jsdoc][bu] Removed example filed from json docs 2024-08-07 18:24:02 +07:00
626efaf5cf Merge pull request #858 from ONLYOFFICE/fix/jsdoc
[jsdoc][plugins] Added examples field to json.
2024-08-07 03:50:29 -07:00
096ce99588 [jsdoc][plugins] Added examples field to json. 2024-08-07 17:45:40 +07:00
9ce103b31b Add returncode in runcommand function 2024-08-06 17:54:39 +03:00
13cbd84b58 Change documentType for pdf 2024-08-06 14:24:12 +03:00
a8912dff41 Refactoring 2024-08-06 13:46:42 +03:00
8b773614ba Fix builder rpm package deploy (#857) 2024-08-06 11:45:54 +03:00
d04f04f382 Merge pull request #856 from ONLYOFFICE/fix/plugins-docs
Jsdocs api plugins generation script
2024-08-05 03:35:02 -07:00
9a44dae4f9 Jsdocs api plugins generation script 2024-08-05 17:28:06 +07:00
07665dd93e Merge pull request #855 from ONLYOFFICE/fix/jsdoc
Fixed jsdoc md generation
2024-08-02 07:35:32 -07:00
eeca17e78b Fixed jsdoc md generation 2024-08-02 21:30:29 +07:00
f91264bc94 Merge pull request #854 from ONLYOFFICE/fix/docs-generation
[jsdoc] Fixed api docs generation
2024-08-02 06:34:17 -07:00
0983e67f21 [jsdoc] Fixed api docs generation 2024-08-02 20:31:11 +07:00
8e7db87554 Refactoring linux packages deploy (#853)
* Fix deploy desktop editors linux packages (#842)

* Small fix

* Fix make targets

* Small fix

* Refactoring linux packages deploy
2024-07-31 14:18:27 +03:00
9d000b2284 Merge pull request #852 from ONLYOFFICE/fix/generation-path
Added branch name to dist path for jddoc json generation
2024-07-29 04:07:03 -07:00
e29fd0ca09 Added branch name to dist path for jsdoc json generation 2024-07-29 18:06:17 +07:00
dcfde5b5e7 Refactoring 2024-07-29 13:57:17 +03:00
871750d6ae Merge pull request #851 from ONLYOFFICE/fix/jsdoc
Fixed comments
2024-07-29 01:40:16 -07:00
d6b5dc0830 Fixed comments 2024-07-29 15:35:26 +07:00
e99a3e8978 Merge pull request #850 from ONLYOFFICE/fix/jsdoc
Fixed path in jsdoc generation script
2024-07-29 00:17:11 -07:00
13db6d3155 Fixed path in jsdoc generation script 2024-07-29 14:15:46 +07:00
f8845d4fc5 Merge pull request #849 from ONLYOFFICE/fix/jsdoc
Fixed docs generation scripts
2024-07-28 23:28:44 -07:00
efcfb00239 Fixed docs generation scripts 2024-07-29 13:27:41 +07:00
1727313e54 Merge pull request #848 from ONLYOFFICE/fix/jsdoc
Fixed getting doclets for docs generation
2024-07-26 08:55:03 -07:00
f6d55d07c1 Fixed getting doclets for docs generation 2024-07-26 22:51:35 +07:00
331bbadaad Merge pull request #847 from ONLYOFFICE/fix/documentation
Build json docs fixes
2024-07-26 06:47:02 -07:00
f012c604b8 Build json docs fixes 2024-07-26 20:42:09 +07:00
a8f6b0c599 Merge pull request #846 from ONLYOFFICE/feature/documentation
Added documentation generation scripts
2024-07-26 06:24:06 -07:00
e46d73869c Added documentation generation scripts 2024-07-26 20:20:55 +07:00
6bf413a008 Merge branch hotfix/v8.1.1 into release/v8.2.0 2024-07-26 08:02:47 +00:00
963c3bf212 Merge pull request #801 from ONLYOFFICE/feature/split_functions
Split functions
2024-07-25 04:05:41 +01:00
f7071569d9 Merge remote-tracking branch 'remotes/origin/release/v8.2.0' into feature/split_functions
# Conflicts:
#	scripts/develop/run_server.py
2024-07-25 03:56:14 +01:00
4e5eadbf82 For bug 68924 2024-07-22 00:07:45 +03:00
113e2e7821 Fixed builder interface script generation 2024-07-19 15:00:50 +07:00
21c8c699dd [desktop] for bug 62528 2024-07-18 14:15:51 +03:00
db36b7dc40 [develop] Fix mysql check. "SHOW DATABASES" returns lowercase result 2024-07-17 14:56:25 +03:00
38522989d3 [develop] Add db-name config option 2024-07-16 19:26:19 +03:00
3af65bf276 Version up 2024-07-12 11:12:32 +03:00
0a51c3bdea Fix bug 46933 2024-07-11 19:32:44 +03:00
ba6c3a8f38 Fix bug 68571 2024-07-07 23:43:03 +03:00
66e196b5ec [develop] Remove confusion with working dir in readme 2024-07-03 17:37:59 +03:00
d4a49d7137 Update github actions (#832) 2024-07-02 16:57:51 +03:00
1cca8af54f Merge branch 'develop' of https://github.com/ONLYOFFICE/build_tools into feature/split_functions 2024-07-02 12:15:59 +03:00
7e925fd931 Merge pull request #830 from ONLYOFFICE/feature/libvlc-linux
Correct RPATHs for libvlc build
2024-06-29 11:32:38 -07:00
45448171d4 Correct rpaths for libvlc build 2024-06-28 20:25:21 +04:00
64ae3d9029 Merge branch release/v8.1.0 into develop 2024-06-26 10:39:48 +00:00
edccac17f6 Merge pull request #827 from ONLYOFFICE/fix/readme
Fix/readme
2024-06-24 12:46:54 +03:00
1d36cad17e [develop] Clarify cwd for docker run command 2024-06-19 11:54:16 +03:00
08e6d5ba53 [docs] Fix PostgreSQL database creation commands in readme 2024-06-19 11:26:02 +03:00
6505ee1b35 Merge branch release/v8.1.0 into master 2024-06-19 08:19:22 +00:00
709612090a Refactoring 2024-06-14 17:17:00 +03:00
1af5c373e4 Refactoring desktop packages build (#824) 2024-06-14 17:10:49 +03:00
8181d187dd Fix previous commit 2024-06-14 17:04:27 +03:00
4b448e3305 Add new options for spreadsheets convertation 2024-06-14 16:40:18 +03:00
fd579511ae Update hard-coded version to v8.1.0 2024-06-11 13:21:19 +00:00
e166237e5d Fix previous commit 2024-06-04 23:29:20 +03:00
b934429e41 Rename modules for standard libs correctly work 2024-06-04 23:27:09 +03:00
d61c1da666 Resolve conflicts when importing modules 2024-06-04 09:21:34 +03:00
8f633771d9 Merge pull request #821 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-06-03 17:17:02 +03:00
684f478c54 Fix mac builder upload path (#820) 2024-06-03 16:29:31 +03:00
cb0099d746 Refactoring 2024-05-27 13:08:05 +03:00
a72ead91dc [develop] Add no-cache option to readme 2024-05-23 01:25:48 +03:00
fd7c3c6cf3 Merge pull request #815 from ONLYOFFICE/release/v8.1.0
Merge release/v8.1.0 into develop
2024-05-22 12:03:32 +03:00
5ef8abacfa Update vcredist checksums (#813) 2024-05-22 11:52:04 +03:00
a01221ffc6 Merge pull request #811 from ONLYOFFICE/fix/docbuilder-python-deploy
Fix docbuilder python deploy on linux and mac
2024-05-21 18:12:17 +03:00
cbd4ab2e15 Fix for linux 2024-05-21 19:00:19 +04:00
e70152b85b Correct deploy rpath for linux 2024-05-21 18:30:58 +04:00
8a9c9a587e Fix rpath for mac 2024-05-21 17:38:31 +04:00
29c15d9acd Fix typo 2024-05-19 12:03:08 +03:00
bf6773f666 Add python wrapper for builder 2024-05-19 11:23:05 +03:00
bba0ff87da Add script for qt arm builds 2024-05-16 13:31:32 +03:00
c9de5278ea Fix build 2024-05-16 12:10:50 +03:00
6f5a791a1f Add hack for android debug builds 2024-05-06 15:18:54 +03:00
1e7a720e74 Disable unused modules 2024-05-03 12:31:04 +03:00
10a7080928 Merge pull request #806 from ONLYOFFICE/feature/build-qt-arm
Qt build for linux arm
2024-04-26 19:09:57 +03:00
7349c64253 Add arm32 build 2024-04-26 19:18:48 +04:00
88649507c7 Fix problem with cores 2024-04-26 14:08:27 +04:00
cc503473f9 Add dockerfile and python script 2024-04-25 22:07:46 +04:00
10fcec1dd8 [license_checker] Update web-apps config 2024-04-24 17:15:47 +03:00
0679c0f6d7 [license_checker] Allow different license templates 2024-04-24 14:23:51 +03:00
a1a69bdbab Add build-only-branding param 2024-04-15 14:41:05 +03:00
da02b358e2 [develop] Check private repo existence(server-lockstorage) 2024-04-10 01:47:31 +03:00
60dcea6ff4 Fix creation xcframeworks if destination exist 2024-04-09 23:34:37 +03:00
b5796d5e6c Add v1 plugins engine to local server 2024-04-02 21:57:00 +03:00
6338fd58c3 Split functions 2024-03-19 17:39:28 +03:00
39b6841557 Fix build 2024-03-18 12:05:26 +03:00
f3a20e8e59 Merge pull request #800 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-15 10:45:45 +03:00
830df65573 Add mobile module support 2024-03-15 10:44:58 +03:00
2aeb9e1315 Fix build on linux 2024-03-14 21:55:49 +03:00
696c48c251 Merge pull request #799 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-14 20:08:54 +03:00
dcf02e7e93 Fix mobile package 2024-03-14 20:03:40 +03:00
581091591b Fix typo 2024-03-14 19:39:49 +03:00
0e6f1a064d Change project type to json 2024-03-14 16:45:40 +03:00
70975098e2 Add web-apps js maps deploy (#796)
* Exclude js maps

* Add web-apps js maps deploy
2024-03-14 14:10:56 +03:00
5b27f9843f test commit 2024-03-14 09:38:37 +03:00
71e29a6599 Fix windows build 2024-03-14 09:38:17 +03:00
6fd43a4b18 Add support short names for ndk 2024-03-14 09:19:09 +03:00
11f207fbe2 Fix typo 2024-03-14 09:12:38 +03:00
6559d589dd Merge pull request #798 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-13 18:33:44 +03:00
b7e9acc242 Merge pull request #797 from ONLYOFFICE/fix/8.0.2
Fix/8.0.2
2024-03-13 18:33:13 +03:00
bfd1cd0555 Fix build with old ndk 2024-03-13 17:14:01 +03:00
590dffdb78 Revert ndk version 2024-03-13 11:24:49 +03:00
0205dd6853 Refactoring 2024-03-10 21:53:45 +03:00
cd03a42c1b Fix packages build (#793) 2024-03-05 11:16:34 +03:00
c1a8d181d2 Fix desktop package build (#792) 2024-03-01 19:04:24 +03:00
a17d5e04bb Remove unused dependency 2024-02-27 14:40:42 +03:00
e719ae24f0 Merge branch hotfix/v8.0.1 into master 2024-02-26 07:32:18 +00:00
b4922e6899 Merge pull request #788 from ONLYOFFICE/hotfix/v8.0.1
Hotfix/v8.0.1
2024-02-07 12:46:01 +03:00
d8c2505fb8 Fix xp build without path env 2024-02-07 12:44:24 +03:00
02426e413f Switch python2 version to version from bootstrap 2024-02-07 12:17:40 +03:00
bd05971ebb Patch python script on windows 2024-02-06 23:16:45 +03:00
4e12692325 Fix build 2024-02-06 20:02:22 +03:00
f7ea69acc9 Update VCRedist (#787) 2024-02-06 17:44:15 +03:00
3640cea64d Update hard-coded version to v8.0.1 2024-02-06 14:27:51 +00:00
f5ac8ac39d Merge branch release/v8.0.0 into develop 2024-02-05 08:38:40 +00:00
f801e77208 Merge branch release/v8.0.0 into master 2024-01-30 11:23:27 +00:00
2a8c5ea9eb Disable drawio by default 2024-01-26 14:17:16 +03:00
181a42e344 Fix xp plugins (desktop) 2024-01-22 12:14:58 +03:00
a0511ca3ac Fix build js for native 2024-01-20 21:24:02 +03:00
0b48f3a67f Refactoring build native scripts 2024-01-18 17:05:17 +03:00
bbdb9e0107 Merge pull request #774 from ONLYOFFICE/release/v8.0.0
Release/v8.0.0
2023-12-07 22:28:19 +03:00
d468b93e9f Merge pull request #769 from ONLYOFFICE/fix/license-checker-readme
[license_checker] update Readme for allowListFile
2023-11-28 16:37:33 +03:00
188ad0057f Merge pull request #770 from ONLYOFFICE/release/v7.6.0
Release/v7.6.0
2023-11-28 10:58:33 +03:00
bde91e3dbf [license_checker] update Readme for allowListFile 2023-11-25 22:08:51 +03:00
fa15db70c9 Merge branch release/v7.6.0 into develop (#765)
* Update hard-coded version to v7.6.0

* Fix vcredist [2] (#763)

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2023-11-16 17:32:35 +03:00
3d884963a7 Merge pull request #762 from ONLYOFFICE/release/v7.6.0
Merge branch release/v7.6.0 into develop
2023-11-15 17:08:32 +03:00
8eb2d689fd Merge branch hotfix/v7.5.1 into develop 2023-11-15 12:46:10 +00:00
a2639afd7a Merge branch hotfix/v7.5.1 into master 2023-10-31 14:39:47 +00:00
78 changed files with 3070 additions and 1394 deletions

View File

@ -1,25 +1,24 @@
name: Markdown check
name: Markdown Lint
on:
workflow_dispatch:
push:
branches:
- '*'
- '**'
paths:
- '*.md'
- 'develop/*.md'
- 'scripts/**.md'
- '.markdownlint.jsonc'
jobs:
markdownlint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: DavidAnson/markdownlint-cli2-action@v9
- uses: actions/checkout@v4
- uses: DavidAnson/markdownlint-cli2-action@v16
with:
command: config
globs: |
.markdownlint.jsonc
*.md
develop/*.md
scripts/**.md
scripts/**.md

View File

@ -3,16 +3,13 @@ name: Update hard-coded version
on: workflow_dispatch
jobs:
update-version:
if: >-
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
contains(github.ref, 'refs/heads/release/v') }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
@ -25,9 +22,9 @@ jobs:
run: echo "${{ env.version }}" > version
- name: Commit & push changes
uses: EndBug/add-and-commit@v8
uses: EndBug/add-and-commit@v9
with:
author_name: github-actions[bot]
author_email: github-actions[bot]@users.noreply.github.com
message: Update hard-coded version to v${{ env.version }}
message: Update hard-coded version to ${{ env.version }}
add: version

2
.gitignore vendored
View File

@ -12,3 +12,5 @@ tests/puppeteer/node_modules
tests/puppeteer/work_directory
tests/puppeteer/package.json
tests/puppeteer/package-lock.json
scripts/sdkjs_common/jsdoc/node_modules
scripts/sdkjs_common/jsdoc/package-lock.json

View File

@ -196,9 +196,8 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
**Note**: The created database must have **onlyoffice** both for user and password.
```bash
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
```
3. Configure the database:

149
build.pro
View File

@ -1,149 +0,0 @@
TEMPLATE = subdirs
ROOT_DIR=$$PWD/..
DEPLOY_DIR=$$PWD/deploy
CORE_ROOT_DIR=$$ROOT_DIR/core
include($$PWD/common.pri)
CONFIG += ordered
core:CONFIG += core_libraries
builder:CONFIG += core_libraries
desktop:CONFIG += core_libraries
server:CONFIG += core_libraries
mobile:CONFIG += core_libraries
!core_libraries:CONFIG += no_x2t
!core_libraries:CONFIG += no_use_common_binary
!core_libraries:CONFIG += no_tests
core_windows {
desktop:CONFIG += core_and_multimedia
}
core_linux {
desktop:CONFIG += core_and_multimedia
}
core_mac {
CONFIG += no_desktop_apps
}
core_ios {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
core_android {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
core_libraries {
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
addSubProject(cfcpp, $$CORE_ROOT_DIR/Common/cfcpp/cfcpp.pro)
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
cryptopp)
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
unicodeconverter)
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
kernel unicodeconverter)
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
kernel unicodeconverter)
addSubProject(pdffile, $$CORE_ROOT_DIR/PdfFile/PdfFile.pro,\
kernel unicodeconverter graphics)
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
kernel unicodeconverter graphics pdffile)
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
kernel unicodeconverter graphics pdffile)
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
kernel unicodeconverter graphics network)
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
kernel unicodeconverter graphics)
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
kernel unicodeconverter graphics htmlfile2)
}
!no_x2t {
addSubProject(docxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro)
addSubProject(pptxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro)
addSubProject(xlsbformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro)
addSubProject(docformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro)
addSubProject(pptformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro)
addSubProject(xlsformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro)
addSubProject(vbaformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro)
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/TxtFile/Projects/Linux/TxtXmlFormatLib.pro)
addSubProject(rtfformat, $$CORE_ROOT_DIR/RtfFile/Projects/Linux/RtfFormatLib.pro)
addSubProject(odffile, $$CORE_ROOT_DIR/OdfFile/Projects/Linux/OdfFormatLib.pro)
addSubProject(bindocument, $$CORE_ROOT_DIR/OOXML/Projects/Linux/BinDocument/BinDocument.pro)
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
docxformat pptxformat xlsbformat docformat pptformat xlsformat vbaformat txtxmlformat rtfformat odffile cfcpp bindocument fb2file epubfile docxrenderer)
}
!no_use_common_binary {
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
kernel unicodeconverter graphics)
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
kernel unicodeconverter graphics)
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
kernel unicodeconverter graphics doctrenderer)
addSubProject(pluginsmanager, $$CORE_ROOT_DIR/DesktopEditor/pluginsmanager/pluginsmanager.pro,\
kernel)
addSubProject(vboxtester, $$CORE_ROOT_DIR/DesktopEditor/vboxtester/vboxtester.pro,\
kernel)
}
!no_tests {
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
addSubProject(x2ttester, $$CORE_ROOT_DIR/Test/Applications/x2tTester/x2ttester.pro)
addSubProject(metafiletester, $$CORE_ROOT_DIR/Test/Applications/MetafileTester/MetafileTester.pro)
#TODO:
!linux_arm64:addSubProject(ooxml_crypt, $$CORE_ROOT_DIR/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro)
}
core_and_multimedia {
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
kernel unicodeconverter graphics)
}
desktop {
message(desktop)
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
kernel unicodeconverter graphics)
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdffile djvufile xpsfile)
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
documentscore)
!core_mac {
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
documentscore)
}
!no_desktop_apps {
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
documentscore videoplayer)
core_windows:!build_xp:addSubProject(updatedaemon, $$ROOT_DIR/desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro)
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
documentscore videoplayer)
}
}
mobile {
message(mobile)
!desktop {
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
}
}
osign {
addSubProject(osign, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/osign/lib/osign.pro)
}

View File

@ -21,12 +21,13 @@ parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp"
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")

View File

@ -1,3 +1,3 @@
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, drawio, zotero"
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition"
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, zotero"
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition, drawio"
sdkjs-addons="sdkjs-forms"

View File

@ -45,7 +45,7 @@ instruction show how to use docker without sudo.
```bash
cd build_tools/develop
docker pull onlyoffice/documentserver
docker build -t documentserver-develop .
docker build --no-cache -t documentserver-develop .
```
**Note**: The dot at the end is required.
@ -61,7 +61,6 @@ Clone development modules to the work dir
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
```bash
cd ../..
git clone https://github.com/ONLYOFFICE/sdkjs.git
git clone https://github.com/ONLYOFFICE/web-apps.git
git clone https://github.com/ONLYOFFICE/server.git
@ -75,14 +74,15 @@ along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow.
The folders `server` is optional
**Note**: Run command with the current working directory
containing `sdkjs`, `web-apps`...
**Note**: ONLYOFFICE server uses port 80.
Look for another application using port 80 and stop it
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
and takes 20 minutes with `server`
**Note**: Run command from work dir with development modules
### docker run on Windows (PowerShell)
**Note**: Run PowerShell as administrator to fix EACCES error when installing

31
make.py
View File

@ -1,19 +1,32 @@
#!/usr/bin/env python
import os
import sys
sys.path.append('scripts')
sys.path.append('scripts/develop')
sys.path.append('scripts/develop/vendor')
sys.path.append('scripts/core_common')
sys.path.append('scripts/core_common/modules')
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/scripts')
sys.path.append(__dir__name__ + '/scripts/develop')
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
sys.path.append(__dir__name__ + '/scripts/core_common')
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
import config
import base
import build
import build_sln
import build_js
import build_server
import deploy
import make_common
import develop
import argparse
base.check_python()
parser = argparse.ArgumentParser(description="options")
parser.add_argument("--build-only-branding", action="store_true")
args = parser.parse_args()
if (args.build_only_branding):
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
# parse configuration
config.parse()
@ -59,13 +72,15 @@ if ("1" == config.option("update")):
base.configure_common_apps()
# developing...
develop.make();
develop.make()
# check only js builds
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
build_js.make()
exit(0)
#base.check_tools()
# core 3rdParty
make_common.make()
@ -79,7 +94,7 @@ if config.check_option("module", "desktop"):
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
# build
build.make()
build_sln.make()
# js
build_js.make()

View File

@ -10,15 +10,17 @@ import package_utils as utils
# parse
parser = argparse.ArgumentParser(description="Build packages.")
parser.add_argument("-P", "--platform", dest="platform", type=str,
action="store", help="Defines platform", required=True)
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
action="store", help="Defines targets", required=True)
parser.add_argument("-R", "--branding", dest="branding", type=str,
action="store", help="Provides branding path")
action="store", help="Defines platform", required=True)
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
action="store", help="Defines targets", required=True)
parser.add_argument("-V", "--version", dest="version", type=str,
action="store", help="Defines version")
action="store", help="Defines version")
parser.add_argument("-B", "--build", dest="build", type=str,
action="store", help="Defines build")
action="store", help="Defines build")
parser.add_argument("-H", "--branch", dest="branch", type=str,
action="store", help="Defines branch")
parser.add_argument("-R", "--branding", dest="branding", type=str,
action="store", help="Provides branding path")
args = parser.parse_args()
# vars
@ -29,8 +31,16 @@ common.targets = args.targets
common.clean = "clean" in args.targets
common.sign = "sign" in args.targets
common.deploy = "deploy" in args.targets
common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0")
common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0")
if args.version: common.version = args.version
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
utils.set_env("PRODUCT_VERSION", common.version)
utils.set_env("BUILD_VERSION", common.version)
if args.build: common.build = args.build
else: common.build = utils.get_env("BUILD_NUMBER", "0")
utils.set_env("BUILD_NUMBER", common.build)
if args.branch: common.branch = args.branch
else: common.branch = utils.get_env("BRANCH_NAME", "null")
utils.set_env("BRANCH_NAME", common.branch)
common.branding = args.branding
common.timestamp = utils.get_timestamp()
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
@ -68,9 +78,11 @@ utils.delete_file(common.deploy_data)
if "core" in common.targets:
package_core.make()
if "closuremaps_opensource" in common.targets:
package_core.deploy_closuremaps("opensource")
package_core.deploy_closuremaps_sdkjs("opensource")
package_core.deploy_closuremaps_webapps("opensource")
if "closuremaps_commercial" in common.targets:
package_core.deploy_closuremaps("commercial")
package_core.deploy_closuremaps_sdkjs("commercial")
package_core.deploy_closuremaps_webapps("commercial")
if "desktop" in common.targets:
package_desktop.make()
if "builder" in common.targets:

View File

@ -39,6 +39,9 @@ def is_os_arm():
return False
return True
def get_platform():
return platform.machine().lower()
def is_python_64bit():
return (struct.calcsize("P") == 8)
@ -250,6 +253,9 @@ def copy_lib(src, dst, name):
create_dir(dst + "/simulator")
copy_dir(src + "/simulator/" + name + ".framework", dst + "/simulator/" + name + ".framework")
if is_dir(dst + "/" + name + ".xcframework"):
delete_dir(dst + "/" + name + ".xcframework")
cmd("xcodebuild", ["-create-xcframework",
"-framework", dst + "/" + name + ".framework",
"-framework", dst + "/simulator/" + name + ".framework",
@ -292,17 +298,34 @@ def copy_exe(src, dst, name):
copy_file(src + "/" + name + exe_ext, dst + "/" + name + exe_ext)
return
def readFileCommon(path):
file_data = ""
try:
with open(get_path(path), "r") as file:
file_data = file.read()
except Exception as e:
with open(get_path(path), "r", encoding="utf-8") as file:
file_data = file.read()
return file_data
def writeFileCommon(path, data):
file_data = ""
try:
with open(get_path(path), "w") as file:
file.write(data)
except Exception as e:
with open(get_path(path), "w", encoding="utf-8") as file:
file.write(data)
return
def replaceInFile(path, text, textReplace):
if not is_file(path):
print("[replaceInFile] file not exist: " + path)
return
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
filedata = readFileCommon(path)
filedata = filedata.replace(text, textReplace)
delete_file(path)
with open(get_path(path), "w") as file:
file.write(filedata)
writeFileCommon(path, filedata)
return
def replaceInFileUtf8(path, text, textReplace):
if not is_file(path):
@ -320,28 +343,21 @@ def replaceInFileRE(path, pattern, textReplace):
if not is_file(path):
print("[replaceInFile] file not exist: " + path)
return
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
filedata = readFileCommon(path)
filedata = re.sub(pattern, textReplace, filedata)
delete_file(path)
with open(get_path(path), "w") as file:
file.write(filedata)
writeFileCommon(path, filedata)
return
def readFile(path):
if not is_file(path):
return ""
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
return filedata
return readFileCommon(path)
def writeFile(path, data):
if is_file(path):
delete_file(path)
with open(get_path(path), "w") as file:
file.write(data)
writeFileCommon(path, data)
return
# system cmd methods ------------------------------------
@ -371,7 +387,7 @@ def cmd2(prog, args=[], is_no_errors=False):
sys.exit("Error (" + prog + "): " + str(ret))
return ret
def cmd_exe(prog, args):
def cmd_exe(prog, args, is_no_errors=False):
prog_dir = os.path.dirname(prog)
env_dir = os.environ
if ("linux" == host_platform()):
@ -393,7 +409,7 @@ def cmd_exe(prog, args):
command += (" \"" + arg + "\"")
process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir)
ret = process.wait()
if ret != 0:
if ret != 0 and True != is_no_errors:
sys.exit("Error (" + prog + "): " + str(ret))
return ret
@ -413,12 +429,13 @@ def cmd_and_return_cwd(prog, args=[], is_no_errors=False):
def run_command(sCommand):
popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
result = {'stdout' : '', 'stderr' : ''}
result = {'stdout' : '', 'stderr' : '', 'returncode' : 0}
try:
stdout, stderr = popen.communicate()
popen.wait()
result['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
result['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
result['returncode'] = popen.returncode
finally:
popen.stdout.close()
popen.stderr.close()
@ -483,12 +500,37 @@ def set_cwd(dir):
return
# git ---------------------------------------------------
def git_get_origin():
cur_dir = os.getcwd()
os.chdir(get_script_dir() + "/../")
ret = run_command("git config --get remote.origin.url")["stdout"]
os.chdir(cur_dir)
return ret
def git_is_ssh():
git_protocol = config.option("git-protocol")
if (git_protocol == "https"):
return False
if (git_protocol == "ssh"):
return True
origin = git_get_origin()
if (git_protocol == "auto") and (origin.find(":ONLYOFFICE/") != -1):
return True
return False
def get_ssh_base_url():
cur_origin = git_get_origin()
ind = cur_origin.find(":ONLYOFFICE/")
if (ind == -1):
return "git@github.com:ONLYOFFICE/"
return cur_origin[:ind+12]
def git_update(repo, is_no_errors=False, is_current_dir=False, git_owner=""):
print("[git] update: " + repo)
owner = git_owner if git_owner else "ONLYOFFICE"
url = "https://github.com/" + owner + "/" + repo + ".git"
if config.option("git-protocol") == "ssh":
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
if git_is_ssh():
url = get_ssh_base_url() + repo + ".git"
folder = get_script_dir() + "/../../" + repo
if is_current_dir:
folder = repo
@ -557,8 +599,8 @@ def get_branding_repositories(checker):
def create_pull_request(branches_to, repo, is_no_errors=False, is_current_dir=False):
print("[git] create pull request: " + repo)
url = "https://github.com/ONLYOFFICE/" + repo + ".git"
if config.option("git-protocol") == "ssh":
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
if git_is_ssh():
url = get_ssh_base_url() + repo + ".git"
folder = get_script_dir() + "/../../" + repo
if is_current_dir:
folder = repo
@ -687,6 +729,22 @@ def check_congig_option_with_platfom(platform, option_name):
return True
return False
def correct_makefile_after_qmake(platform, file):
if (0 == platform.find("android")):
if ("android_arm64_v8a" == platform):
replaceInFile(file, "_arm64-v8a.a", ".a")
replaceInFile(file, "_arm64-v8a.so", ".so")
if ("android_armv7" == platform):
replaceInFile(file, "_armeabi-v7a.a", ".a")
replaceInFile(file, "_armeabi-v7a.so", ".so")
if ("android_x86_64" == platform):
replaceInFile(file, "_x86_64.a", ".a")
replaceInFile(file, "_x86_64.so", ".so")
if ("android_x86" == platform):
replaceInFile(file, "_x86.a", ".a")
replaceInFile(file, "_x86.so", ".so")
return
def qt_config_platform_addon(platform):
config_addon = ""
if (0 == platform.find("win")):
@ -741,6 +799,21 @@ def qt_major_version():
qt_dir = qt_version()
return qt_dir.split(".")[0]
def qt_version_decimal():
qt_dir = qt_version()
return 10 * int(qt_dir.split(".")[0]) + int(qt_dir.split(".")[1])
def qt_config_as_param(value):
qt_version = qt_version_decimal()
ret_params = []
if (66 > qt_version):
ret_params.append("CONFIG+=" + value)
else:
params = value.split()
for name in params:
ret_params.append("CONFIG+=" + name)
return ret_params
def qt_copy_lib(lib, dir):
qt_dir = get_env("QT_DEPLOY")
if ("windows" == host_platform()):
@ -1002,15 +1075,15 @@ def web_apps_addons_param():
def download(url, dst):
return cmd_exe("curl", ["-L", "-o", dst, url])
def extract(src, dst):
def extract(src, dst, is_no_errors=False):
app = "7za" if ("mac" == host_platform()) else "7z"
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors)
def extract_unicode(src, dst):
def extract_unicode(src, dst, is_no_errors=False):
if "windows" == host_platform():
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
return
return extract(src, dst)
return extract(src, dst, is_no_errors)
def archive_folder(src, dst):
app = "7za" if ("mac" == host_platform()) else "7z"
@ -1180,6 +1253,13 @@ def mac_correct_rpath_docbuilder(dir):
cmd("chmod", ["-v", "+x", "./docbuilder"])
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True)
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
mac_correct_rpath_library("docbuilder.c", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer"])
def add_loader_path_to_rpath(libs):
for lib in libs:
cmd("install_name_tool", ["-add_rpath", "@loader_path", "lib" + lib + ".dylib"], True)
add_loader_path_to_rpath(["icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer", "docbuilder.c"])
os.chdir(cur_dir)
return
@ -1215,6 +1295,19 @@ def mac_correct_rpath_desktop(dir):
os.chdir(cur_dir)
return
def linux_set_origin_rpath_libraries(dir, libs):
tools_dir = get_script_dir() + "/../tools/linux/elf/"
cur_dir = os.getcwd()
os.chdir(dir)
for lib in libs:
cmd(tools_dir + "patchelf", ["--set-rpath", "\\$ORIGIN", "lib" + lib], True)
os.chdir(cur_dir)
return
def linux_correct_rpath_docbuilder(dir):
linux_set_origin_rpath_libraries(dir, ["docbuilder.jni.so", "docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so"])
return
def common_check_version(name, good_version, clean_func):
version_good = name + "_version_" + good_version
version_path = "./" + name + ".data"
@ -1268,7 +1361,7 @@ def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=Fal
git_dir = __file__script__path__ + "/../.."
if False:
# old version
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
return
src_dir_path = git_dir + "/onlyoffice.github.io/store/plugin"
name = "marketplace"
@ -1291,9 +1384,11 @@ def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=Fal
delete_dir(dst_dir_path + "/store/plugin-dev")
return
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, isXp=False):
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
plugins_list_config = config.option("sdkjs-plugin")
if isXp:
plugins_list_config="photoeditor, macros, highlightcode, doc2md"
if ("" == plugins_list_config):
return
plugins_list = plugins_list_config.rsplit(", ")
@ -1660,3 +1755,75 @@ def check_module_version(actual_version, clear_func):
writeFile(module_file, actual_version)
clear_func()
return
def check_python():
if ("linux" != host_platform()):
return
directory = __file__script__path__ + "/../tools/linux"
directory_bin = __file__script__path__ + "/../tools/linux/python3/bin"
if not is_dir(directory + "/python3"):
cmd("tar", ["xfz", directory + "/python3.tar.gz", "-C", directory])
cmd("ln", ["-s", directory_bin + "/python3", directory_bin + "/python"])
directory_bin = directory_bin.replace(" ", "\\ ")
os.environ["PATH"] = directory_bin + os.pathsep + os.environ["PATH"]
return
def check_tools():
if ("linux" == host_platform()):
directory = __file__script__path__ + "/../tools/linux"
if not is_os_arm() and config.check_option("platform", "linux_arm64"):
if not is_dir(directory + "/qt"):
create_dir(directory + "/qt")
cmd("python", [directory + "/arm/build_qt.py", "--arch", "arm64", directory + "/qt/arm64"])
return
def apply_patch(file, patch):
patch_content = readFile(patch)
index1 = patch_content.find("<<<<<<<")
index2 = patch_content.find("=======")
index3 = patch_content.find(">>>>>>>")
file_content_old = patch_content[index1 + 7:index2].strip()
file_content_new = patch_content[index2 + 7:index3].strip()
#file_content_new = "\n#if 0" + file_content_old + "#else" + file_content_new + "#endif\n"
replaceInFile(file, file_content_old, file_content_new)
return
def get_autobuild_version(product, platform="", branch="", build=""):
download_platform = platform
if ("" == download_platform):
osType = get_platform()
isArm = True if (-1 != osType.find("arm")) else False
is64 = True if (osType.endswith("64")) else False
if ("windows" == host_platform()):
download_platform = "win-"
elif ("linux" == host_platform()):
download_platform = "linux-"
else:
download_platform = "mac-"
download_platform += ("arm" if isArm else "")
download_platform += ("64" if is64 else "32")
else:
download_platform = download_platform.replace("_", "-")
download_build = build
if ("" == download_build):
download_build = "latest"
download_branch = branch
if ("" == download_branch):
download_branch = "develop"
download_addon = download_branch + "/" + download_build + "/" + product + "-" + download_platform + ".7z"
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/archive/" + download_addon
def create_x2t_js_cache(dir, product):
if is_file(dir + "/libdoctrenderer.dylib") and (os.path.getsize(dir + "/libdoctrenderer.dylib") < 5*1024*1024):
return
if (product in ["builder", "server"]):
cmd_in_dir(dir, "./x2t", ["-create-js-cache"], True)
cmd_in_dir(dir, "./x2t", ["-create-js-snapshots"], True)
return

View File

@ -1,118 +0,0 @@
#!/usr/bin/env python
import config
import base
import os
import multiprocessing
def make_pro_file(makefiles_dir, pro_file, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
old_env = dict(os.environ)
# if you need change output libraries path - set the env variable
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
isAndroid = False if (-1 == platform.find("android")) else True
if isAndroid:
toolchain_platform = "linux-x86_64"
if ("mac" == base.host_platform()):
toolchain_platform = "darwin-x86_64"
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
old_path = base.get_env("PATH")
new_path = base.qt_setup(platform) + "/bin:"
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
new_path += old_path
base.set_env("PATH", new_path)
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
# makefile suffix
file_suff = platform
if (config.check_option("config", "debug")):
file_suff += "_debug_"
file_suff += config.option("branding")
# setup qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# qmake CONFIG+=...
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
print("THIS PLATFORM IS NOT SUPPORTED")
continue
# non windows platform
if not base.is_windows():
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if not base.is_file(pro_file):
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("0" != config.option("multiprocess")):
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
else:
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
else:
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
base.run_as_bat(qmake_bat)
os.environ.clear()
os.environ.update(old_env)
base.delete_file(".qmake.stash")
# make build.pro
def make():
make_pro_file("makefiles", "build.pro")
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
make_pro_file("makefiles", "build.pro", "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
return

View File

@ -74,20 +74,27 @@ def make():
base.create_dir(out_dir + "/mobile/sdkjs")
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
base.create_dir(out_dir + "/mobile/sdkjs/word")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/cell")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/slide")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
return
@ -139,15 +146,25 @@ def build_sdk_native(directory, minimize=True):
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
return
def build_sdkjs_develop(root_dir):
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
def build_js_develop(root_dir):
#_run_npm_cli(root_dir + "/sdkjs/build")
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
build_sdkjs_develop(root_dir)
_run_npm(root_dir + external_folder + "/web-apps/build")
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])

View File

@ -14,6 +14,9 @@ parser.add_option("--output",
parser.add_option("--write-version",
action="store_true", dest="write_version", default=False,
help="Create version file of build")
parser.add_option("--minimize",
action="store", type="string", dest="minimize", default="0",
help="Is minimized version")
(options, args) = parser.parse_args(arguments)
def write_version_files(output_dir):
@ -32,7 +35,11 @@ def write_version_files(output_dir):
# parse configuration
config.parse()
config.parse_defaults()
config.extend_option("jsminimize", "0")
isMinimize = False
if ("1" == options.minimize or "true" == options.minimize):
isMinimize = True
config.set_option("jsminimize", "disable")
branding = config.option("branding-name")
if ("" == branding):
@ -46,23 +53,30 @@ if (options.output):
base.create_dir(out_dir)
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners.js")
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts(prefix_js, out_dir + "/banners.js")
base.create_dir(out_dir + "/word")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
base.create_dir(out_dir + "/cell")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
base.create_dir(out_dir + "/slide")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
base.delete_file(out_dir + "/banners.js")

61
scripts/build_sln.py Normal file
View File

@ -0,0 +1,61 @@
#!/usr/bin/env python
import config
import base
import os
import sys
sys.path.append(os.path.dirname(__file__) + "/..")
import sln
import qmake
# make solution
def make(solution=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
if ("" == solution):
solution = "./sln.json"
projects = sln.get_projects(solution, platform)
for pro in projects:
qmake_main_addon = ""
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
print("[WARNING:] temporary enable strip for x2t library in debug")
qmake_main_addon += "build_strip_debug"
qmake.make(platform, pro, qmake_main_addon)
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
# build Java docbuilder wrapper
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
for platform in platforms:
if not platform in config.platforms:
continue
# build JNI library
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
# build Java code to JAR
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
return

View File

@ -182,6 +182,9 @@ def extend_option(name, value):
else:
options[name] = value
def set_option(name, value):
options[name] = value
def branding():
branding = option("branding-name")
if ("" == branding):

View File

@ -13,7 +13,7 @@ import cef
import icu
import openssl
import curl
import websocket
import websocket_all
import v8
import html2
import hunspell
@ -54,5 +54,5 @@ def make():
if config.check_option("module", "mobile"):
if (config.check_option("platform", "android")):
curl.make()
websocket.make()
websocket_all.make()
return

View File

@ -0,0 +1,170 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import re
def get_android_ndk_version():
env_val = base.get_env("ANDROID_NDK_ROOT")
if (env_val == ""):
env_val = "21.1.6352462"
return env_val.strip("/").split("/")[-1]
def get_android_ndk_version_major():
val = get_android_ndk_version().split(".")[0]
val = re.sub("[^0-9]", "", val)
return int(val)
def get_sdk_api():
if (23 > get_android_ndk_version_major()):
return "21"
return "23"
global archs
archs = ["arm64", "arm", "x86_64", "x86"]
global platforms
platforms = {
"arm64" : {
"abi" : "arm64-v8a",
"target" : "aarch64-linux-android",
"dst" : "arm64_v8a",
"api" : get_sdk_api(),
"old" : "aarch64-linux-android"
},
"arm" : {
"abi" : "armeabi-v7a",
"target" : "armv7a-linux-androideabi",
"dst" : "armv7",
"api" : get_sdk_api(),
"old" : "arm-linux-android"
},
"x86_64" : {
"arch" : "x86_64",
"target" : "x86_64-linux-android",
"dst" : "x86_64",
"api" : get_sdk_api(),
"old" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86",
"target" : "i686-linux-android",
"dst" : "x86",
"api" : get_sdk_api(),
"old" : "i686-linux-android"
}
}
# todo: check arm host!
global host
if ("linux" == base.host_platform()):
host = {
"name" : "linux",
"arch" : "linux-x86_64"
}
else:
host = {
"name" : "darwin",
"arch" : "darwin-x86_64"
}
def get_android_ndk_version():
#return "26.2.11394342"
return "21.1.6352462"
def get_android_ndk_version_major():
return int(get_android_ndk_version().split(".")[0])
def get_options_dict_as_array(opts):
value = []
for key in opts:
value.append(key + "=" + opts[key])
return value
def get_options_array_as_string(opts):
return " ".join(opts)
def ndk_dir():
return base.get_env("ANDROID_NDK_ROOT")
def sdk_dir():
ndk_path = ndk_dir()
if (-1 != ndk_path.find("/ndk/")):
return ndk_path + "/../.."
return ndk_path + "/.."
def toolchain_dir():
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
def prepare_platform(arch, cpp_standard=11):
target = platforms[arch]["target"]
api = platforms[arch]["api"]
ndk_directory = ndk_dir()
toolchain = toolchain_dir()
base.set_env("TARGET", target)
base.set_env("TOOLCHAIN", toolchain)
base.set_env("NDK_STANDARD_ROOT", toolchain)
base.set_env("ANDROIDVER", api)
base.set_env("ANDROID_API", api)
base.set_env("AR", toolchain + "/bin/llvm-ar")
base.set_env("AS", toolchain + "/bin/llvm-as")
base.set_env("LD", toolchain + "/bin/ld")
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
base.set_env("CC", target + api + "-clang")
base.set_env("CXX", target + api + "-clang++")
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
if (23 > get_android_ndk_version_major()):
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
base.set_env("LDFLAGS", ld_flags)
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
cflags = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-Wno-unused-function",
"-fPIC",
"-I" + toolchain + "/sysroot/usr/include",
"-D__ANDROID_API__=" + api,
"-DANDROID"
]
cflags_string = " ".join(cflags)
cppflags_string = cflags_string
if (cpp_standard >= 11):
cppflags_string += " -std=c++11"
base.set_env("CFLAGS", cflags_string)
base.set_env("CXXFLAGS", cppflags_string)
base.set_env("CPPPLAGS", cflags_string)
return
def extend_cflags(params):
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
return
def extend_cxxflags(params):
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
return
def extend_ldflags(params):
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
return

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_version = "curl-7_68_0"
lib_name = "curl-7.68.0"
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
params = []
if ("arm64" == arch):
params.append("--host=aarch64-linux-android")
elif ("arm" == arch):
params.append("--host=arm-linux-androideabi")
elif ("x86_64" == arch):
params.append("--host=x86_64-linux-android")
elif ("x86" == arch):
params.append("--host=i686-linux-android")
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
params.append("--enable-ipv6")
params.append("--enable-static")
params.append("--disable-shared")
params.append("--prefix=" + arch_build_dir)
params.append("--with-ssl=" + openssl_dir)
base.cmd("./configure", params)
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
icu_major = "58"
icu_minor = "3"
options = {
"--enable-strict" : "no",
"--enable-extras" : "no",
"--enable-draft" : "yes",
"--enable-samples" : "no",
"--enable-tests" : "no",
"--enable-renaming" : "yes",
"--enable-icuio" : "no",
"--enable-layoutex" : "no",
"--with-library-bits" : "nochange",
"--with-library-suffix" : "",
"--enable-static" : "yes",
"--enable-shared" : "no",
"--with-data-packaging" : "archive"
}
cpp_flags_base = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-fPIC"
]
cpp_flags = [
"-fno-short-wchar",
"-fno-short-enums",
"-DU_USING_ICU_NAMESPACE=0",
"-DU_HAVE_NL_LANGINFO_CODESET=0",
"-DU_TIMEZONE=0",
"-DU_DISABLE_RENAMING=0",
"-DUCONFIG_NO_COLLATION=0",
"-DUCONFIG_NO_FORMATTING=0",
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
"-DUCONFIG_NO_TRANSLITERATION=0",
"-DU_STATIC_IMPLEMENTATION"
]
def fetch_icu():
if not base.is_dir(current_dir + "icu"):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + icu_major, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
base.delete_dir_with_access_error(current_dir + "icu2")
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
if False and ("mac" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
return
def build_host():
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
if not base.is_dir(cross_build_dir):
base.create_dir(cross_build_dir)
os.chdir(cross_build_dir)
ld_flags = "-pthread"
if ("linux" == base.host_platform()):
ld_flags += " -Wl,--gc-sections"
else:
# gcc on OSX does not support --gc-sections
ld_flags += " -Wl,-dead_strip"
base.set_env("LDFLAGS", ld_flags)
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
host_type = "Linux"
if ("mac" == base.host_platform()):
host_type = "MacOSX/GCC"
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(current_dir + "build")
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
os.chdir(current_dir)
return
def build_arch(arch):
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
android_ndk.extend_cflags(" ".join(cpp_flags))
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
base.create_dir(arch_build_dir)
os.chdir(arch_build_dir)
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
base.cmd("make", ["-j4"])
os.chdir(current_dir)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
base.delete_dir(arch_build_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
old_env = dict(os.environ)
fetch_icu()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_name="openssl-1.1.1t"
options = [
"no-shared",
"no-tests",
"enable-ssl3",
"enable-ssl3-method",
"enable-md2",
"no-asm"
]
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
# not needed, just create directories
if not base.is_dir(current_dir + "/build"):
base.create_dir(current_dir + "/build")
if not base.is_dir(current_dir + "/build/android"):
base.create_dir(current_dir + "/build/android")
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.create_dir(dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -1,108 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
platforms = {
"arm64_v8a" : {
"name" : "arm64-v8a",
"toolset" : "arm64v8a",
"clang_triple" : "aarch64-linux-android21",
"tool_triple" : "aarch64-linux-android",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
},
"armv7" : {
"name" : "armeabi-v7a",
"toolset" : "armeabiv7a",
"clang_triple" : "armv7a-linux-androideabi16",
"tool_triple" : "arm-linux-androideabi",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "32",
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
"linker_flags" : "-Wl,--fix-cortex-a8"
},
"x86" : {
"name" : "x86",
"toolset" : "x86",
"clang_triple" : "i686-linux-android16",
"tool_triple" : "i686-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "32",
"compiler_flags" : "",
"linker_flags" : ""
},
"x86_64" : {
"name" : "x86_64",
"toolset" : "x8664",
"clang_triple" : "x86_64-linux-android21",
"tool_triple" : "x86_64-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
}
}
base_dir = base.get_script_dir()
def make(platform):
tmp_build_dir = base_dir + "/core_common/modules/boost"
if (base.is_dir(tmp_build_dir)):
base.delete_dir(tmp_build_dir)
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
current_platform = platforms[platform]
if (base.host_platform() == "mac"):
source = "prebuilt/linux-x86_64"
dest = "prebuilt/darwin-x86_64"
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
build_dir_tmp = tmp_build_dir + "/tmp"
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
old_path = base.get_env("PATH")
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
print(current_platform)
base.cmd("./b2", ["-q", "-j4",
"toolset=clang-" + current_platform["toolset"],
"binary-format=elf",
"address-model=" + current_platform["address_model"],
"architecture=" + current_platform["arch"],
"abi=" + current_platform["abi"],
"link=static",
"threading=multi",
"target-os=android",
"--user-config=" + tmp_build_dir + "/user-config.jam",
"--ignore-site-config",
"--layout=system",
"install"], True)
base.set_env("PATH", old_path)
base.delete_dir(tmp_build_dir)
return

View File

@ -5,7 +5,7 @@ sys.path.append('../..')
import config
import base
import os
import build
import qmake
def make(src_dir, modules, build_platform="android", qmake_addon=""):
old_cur = os.getcwd()
@ -23,17 +23,13 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("TARGET = boost_" + module)
pro_file_content.append("TEMPLATE = lib")
pro_file_content.append("CONFIG += staticlib")
if (build_platform == "android"):
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
pro_file_content.append("")
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
pro_file_content.append("include($$PWD/../../../../../base.pri)")
pro_file_content.append("")
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
pro_file_content.append("")
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
pro_file_content.append("INCLUDEPATH += $$PWD/include")
@ -43,7 +39,7 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
os.chdir(module_dir)
build.make_pro_file("./", module + ".pro", qmake_addon)
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
os.chdir(old_cur)
return

View File

@ -2,21 +2,19 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import subprocess
import os
import base
import curl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
old_cur = os.getcwd()
os.chdir(path)
if (-1 != config.option("platform").find("android")):
if base.is_dir(path + "/build/android"):
os.chdir(old_cur)
return
subprocess.call(["./build-android-curl.sh"])
curl_android.make()
elif (-1 != config.option("platform").find("ios")):
if base.is_dir(path + "/build/ios"):
os.chdir(old_cur)

View File

@ -2,6 +2,7 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import base
import os
@ -38,6 +39,8 @@ def make():
if (-1 != config.option("platform").find("android")):
icu_android.make()
os.chdir(base_dir)
icu_major = "58"
icu_minor = "3"

View File

@ -1,179 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
def fetch_icu(major, minor):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + major, "https://github.com/unicode-org/icu.git", "./icu2"])
base.copy_dir("./icu2/icu4c", "./icu")
base.delete_dir_with_access_error("icu2")
#base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
return
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
toolshains_dir = current_dir + "/toolchains"
icu_major = "58"
icu_minor = "3"
icu_is_shared = False
current_path = base.get_env("PATH")
platforms = {
"arm64" : {
"arch" : "aarch64-linux-android",
"bin" : "aarch64-linux-android"
},
"arm" : {
"arch" : "arm-linux-androideabi",
"bin" : "arm-linux-androideabi"
},
"x86_64" : {
"arch" : "x86_64-linux-android",
"bin" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86-linux-android",
"bin" : "i686-linux-android"
}
}
def build_arch(arch, api_version):
print("icu build: " + arch + " ----------------------------------------")
if base.is_dir(current_dir + "/icu/" + arch):
base.delete_dir(current_dir + "/icu/" + arch)
base.create_dir(current_dir + "/icu/" + arch)
os.chdir(current_dir + "/icu/" + arch)
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
"--platform=android-" + api_version,
"--install-dir=" + current_dir + "/toolchain/" + arch,
"--toolchain=" + platforms[arch]["arch"],
"--force"
])
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
if not icu_is_shared:
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
base.cmd("../source/configure", command_args.split())
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
base.set_env("PATH", current_path)
os.chdir(current_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
if base.is_dir(current_dir + "/build"):
return
current_dir_old = os.getcwd()
print("[fetch & build]: icu_android")
os.chdir(current_dir)
if not base.is_dir("icu"):
fetch_icu(icu_major, icu_minor)
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
if ("mac" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
if not base.is_dir(current_dir + "/icu/cross_build"):
base.create_dir(current_dir + "/icu/cross_build")
os.chdir(current_dir + "/icu/cross_build")
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
os.chdir(current_dir)
build_arch("arm64", "21")
build_arch("arm", "16")
build_arch("x86_64","21")
build_arch("x86", "16")
os.chdir(current_dir)
base.create_dir(current_dir + "/build")
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
if icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
# patch elf information
os.chdir(current_dir + "/build")
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
os.chdir("./patchelf")
base.cmd("./bootstrap.sh")
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
base.cmd("make")
base.cmd("make", ["install"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
base.delete_dir(current_dir + "/build/patchelf")
if not icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
os.chdir(current_dir_old)
return

View File

@ -62,6 +62,7 @@ def make():
vlc_dir = base_dir + "/vlc"
vlc_version = "3.0.18"
tools_dir = base.get_script_dir() + "/../tools"
old_cur = os.getcwd()
os.chdir(base_dir)
@ -73,7 +74,7 @@ def make():
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
if "windows" == base.host_platform():
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
base.create_dir("build")
base.copy_file("tools/ignore-cache-time.patch", "vlc")
@ -83,7 +84,7 @@ def make():
base.copy_file("tools/win_64/build.patch", "vlc")
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
if config.check_option("platform", "win_32"):
base.copy_file("tools/win_32/build.patch", "vlc")
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
@ -91,9 +92,11 @@ def make():
# linux
if config.check_option("platform", "linux_64"):
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
# mac
if "mac" == base.host_platform():
os.chdir(vlc_dir)

View File

@ -1,18 +1,19 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
sys.path.append('android')
import base
import config
import os
import subprocess
import openssl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(path)
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
subprocess.call(["./build-android-openssl.sh"])
if (-1 != config.option("platform").find("android")):
openssl_android.make()
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
subprocess.call(["./build-ios-openssl.sh"])

View File

@ -27,8 +27,16 @@ def make():
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
# patches
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
# no tls realization (remove if socket.io fix this)
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"

View File

@ -93,6 +93,7 @@ def make():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
@ -225,6 +226,7 @@ def make_xp():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
@ -232,7 +234,7 @@ def make_xp():
# old variant
#path_to_python2 = "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin"
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_26_bin/python/bin"
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_23_bin/python/bin"
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
base_dir + path_to_python2,
config.option("vs-path") + "/../Common7/IDE",
@ -269,6 +271,13 @@ def make_xp():
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
]);
programFilesDir = base.get_env("ProgramFiles")
if ("" != base.get_env("ProgramFiles(x86)")):
programFilesDir = base.get_env("ProgramFiles(x86)")
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
if (base.is_dir(dev_path)):
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
if config.check_option("platform", "win_64_xp"):
if not base.is_dir("win_64/release"):

View File

@ -7,6 +7,23 @@ import base
import os
import subprocess
def change_bootstrap():
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
content = "# changed by build_tools\n\n"
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
content += "@Subdir python\n"
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
content += "@Subdir python3\n"
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
content += "@Subdir git\n"
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
return
def make_args(args, platform, is_64=True, is_debug=False):
args_copy = args[:]
if is_64:
@ -46,6 +63,12 @@ def ninja_windows_make(args, is_64=True, is_debug=False):
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
base.cmd("ninja", ["-C", directory_out])
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
@ -85,6 +108,7 @@ def make():
os.chdir(base_dir)
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
change_bootstrap()
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]

View File

@ -4,12 +4,10 @@ import sys
sys.path.append('../..')
import config
import base
import ixwebsocket
import socketrocket
#import ixwebsocket
#import socketrocket
import socket_io
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
def make():
#ixwebsocket.make()
#socketrocket.make()

View File

@ -76,6 +76,12 @@ def make():
if (0 == platform.find("win")):
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
# python wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
# java wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
# app
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
@ -111,9 +117,14 @@ def make():
if ("ios" == platform):
base.generate_plist(root_dir)
if (0 == platform.find("linux")):
base.linux_correct_rpath_docbuilder(root_dir)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
base.mac_correct_rpath_docbuilder(root_dir)
base.create_x2t_js_cache(root_dir, "builder")
return

View File

@ -39,6 +39,7 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
@ -61,13 +62,14 @@ def make():
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
# js cache
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.create_x2t_js_cache(archive_dir, "core")
base.delete_file(archive_dir + "/DoctRenderer.config")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
if base.is_file(archive_dir + ".7z"):
base.delete_file(archive_dir + ".7z")
base.archive_folder(archive_dir + "/*", archive_dir + ".7z")
return

View File

@ -213,12 +213,17 @@ def make():
base.create_dir(root_dir + "/editors")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
if len(os.listdir(root_dir + "/editors/sdkjs")) == 0:
base.delete_dir(root_dir + "/editors/sdkjs") # delete empty folder. for bug 62528
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
base.delete_file(file)
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
base.create_dir(root_dir + "/editors/sdkjs-plugins")
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
if not isWindowsXP:
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
# remove some default plugins
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
@ -255,6 +260,8 @@ def make():
if isUseJSC:
base.delete_file(root_dir + "/converter/icudtl.dat")
base.create_x2t_js_cache(root_dir + "/converter", "desktop")
if (0 == platform.find("win")):
base.delete_file(root_dir + "/cef_sandbox.lib")
base.delete_file(root_dir + "/libcef.lib")

View File

@ -5,6 +5,7 @@ import base
import re
import shutil
import glob
from tempfile import mkstemp
def make():
@ -113,6 +114,12 @@ def make():
js_dir = root_dir
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
base.copy_file(js_dir + "/web-apps/apps/api/documents/api.js", js_dir + "/web-apps/apps/api/documents/api.js.tpl")
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
base.delete_file(file)
base.create_x2t_js_cache(converter_dir, "server")
# add embed worker code
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])

View File

@ -5,9 +5,6 @@ import base
import os
import json
def get_core_url(arch, branch):
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
def make():
git_dir = base.get_script_dir() + "/../.."
old_cur = os.getcwd()
@ -18,16 +15,10 @@ def make():
os.chdir(work_dir)
arch = "x64"
arch2 = "_64"
if ("windows" == base.host_platform()) and not base.host_platform_is64():
arch = "x86"
arch2 = "_32"
url = get_core_url(arch, config.option("branch"))
url = base.get_autobuild_version("core", "", config.option("branch"))
data_url = base.get_file_last_modified_url(url)
if (data_url == "" and config.option("branch") != "develop"):
url = get_core_url(arch, "develop")
url = base.get_autobuild_version("core", "", "develop")
data_url = base.get_file_last_modified_url(url)
old_data_url = base.readFile("./core.7z.data")
@ -49,12 +40,6 @@ def make():
base.extract("./core.7z", "./")
base.writeFile("./core.7z.data", data_url)
platform = ""
if ("windows" == base.host_platform()):
platform = "win" + arch2
else:
platform = base.host_platform() + arch2
base.copy_files("./core/*", "./")
else:
print("-----------------------------------------------------------")
@ -66,6 +51,12 @@ def make():
if not base.is_dir(git_dir + "/sdkjs-plugins"):
base.create_dir(git_dir + "/sdkjs-plugins")
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
base.create_dir(git_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
@ -101,7 +92,8 @@ def make():
server_addons = []
if (config.option("server-addons") != ""):
server_addons = config.option("server-addons").rsplit(", ")
if ("server-lockstorage" in server_addons):
#server-lockstorage is private
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
server_config["editorDataStorage"] = "editorDataRedis"
sdkjs_addons = []
@ -125,6 +117,8 @@ def make():
sql["type"] = config.option("sql-type")
if (config.option("db-port") != ""):
sql["dbPort"] = config.option("db-port")
if (config.option("db-name") != ""):
sql["dbName"] = config.option("db-name")
if (config.option("db-user") != ""):
sql["dbUser"] = config.option("db-user")
if (config.option("db-pass") != ""):

View File

@ -483,8 +483,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
mysqlPath += 'bin'
return mysqlPath
def get_mysqlLoginSrting():
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
def get_mysqlLoginString():
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
def get_mysqlServersInfo():
arrInfo = []
@ -511,14 +511,14 @@ def get_mysqlServersInfo():
def check_mysqlServer():
base.print_info('Check MySQL Server')
dependence = CDependencies()
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
if (host_platform != 'windows'):
result = os.system(mysqlLoginSrt + ' -e "exit"')
if (result == 0):
connectionResult = base.run_command(connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print('MySQL configuration is valid')
dependence.sqlPath = 'mysql'
return dependence
@ -535,7 +535,7 @@ def check_mysqlServer():
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print(mysql_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -559,23 +559,43 @@ def check_mysqlServer():
return dependence
def check_MySQLConfig(mysqlPath = ''):
result = True
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
print('Database onlyoffice not found')
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
print('Database "' + config.option("db-name") + '" not found')
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
if (not result):
return False
print('Creating ' + config.option("db-name") + ' tables ...')
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
print('Password encryption is not valid')
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
return result
def execMySQLScript(mysql_path_to_bin, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginSrting()
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
mysqlLoginSrt = get_mysqlLoginString()
print('CREATE DATABASE ' + dbName + ';')
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
print('failed CREATE DATABASE ' + dbName + ';')
return False
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# return False
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# return False
return True
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginString()
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
return False
@ -584,7 +604,7 @@ def execMySQLScript(mysql_path_to_bin, scriptPath):
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
print('Setting MySQL password encrypting...')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
if (code != 0):
print('Setting password encryption failed!')
return False
@ -611,7 +631,7 @@ def get_postrgre_path_to_bin(postgrePath = ''):
def get_postgreLoginSrting(userName):
if (host_platform == 'windows'):
return 'psql -U' + userName + ' '
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U' + userName + ' -hlocalhost '
def get_postgreSQLInfoByFlag(flag):
arrInfo = []
@ -647,7 +667,7 @@ def check_postgreSQL():
result = os.system(postgreLoginSrt + ' -c "\q"')
connectionResult = base.run_command(connectionString)['stdout']
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
if (result != 0 or connectionResult.find(config.option("db-port")) == -1):
print('Valid PostgreSQL not found!')
dependence.append_install('PostgreSQL')
dependence.append_uninstall('PostgreSQL')
@ -657,7 +677,7 @@ def check_postgreSQL():
return dependence
arrInfo = get_postgreSQLInfo()
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
for info in arrInfo:
if (base.is_dir(info['Location']) == False):
continue
@ -665,7 +685,7 @@ def check_postgreSQL():
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
if (connectionResult.find(config.option("db-port")) != -1):
print(postgre_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -683,12 +703,12 @@ def check_postgreSQL():
def check_postgreConfig(postgrePath = ''):
result = True
if (host_platform == 'windows'):
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
rootUser = install_params['PostgreSQL']['root']
dbUser = install_params['PostgreSQL']['dbUser']
dbName = install_params['PostgreSQL']['dbName']
dbPass = install_params['PostgreSQL']['dbPass']
dbUser = config.option("db-user")
dbName = config.option("db-name")
dbPass = config.option("db-pass")
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
postgreLoginRoot = get_postgreLoginSrting(rootUser)
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
@ -705,7 +725,7 @@ def check_postgreConfig(postgrePath = ''):
base.print_info('Creating ' + dbName + ' user...')
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find(config.option("db-name")) == -1):
print('Database ' + dbName + ' not found')
base.print_info('Creating ' + dbName + ' database...')
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
@ -884,13 +904,22 @@ def install_gruntcli():
def install_mysqlserver():
if (host_platform == 'windows'):
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
program_files = os.environ["ProgramFiles(x86)"]
mysql_installer_path = f"\"{program_files}\MySQL\MySQL Installer for Windows\MySQLInstallerConsole.exe\""
mysql_installer_params = " community install server;"
mysql_installer_params += "8.0.21" + ";"
mysql_installer_params += "x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid="
mysql_installer_params += "3306" + ';'
mysql_installer_params += "enable_tcpip=true;port=" + "3306" + ";"
mysql_installer_params += "rootpasswd=" + "onlyoffice"
mysql_installer_params += " -silent"
base.cmd2(mysql_installer_path, [mysql_installer_params])
elif (host_platform == 'linux'):
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
code = os.system('sudo apt-get -y install zsh htop') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
return os.system('yes | sudo apt install mysql-server') and code
return 1
@ -912,7 +941,7 @@ def install_postgresql():
file_name = "install.exe"
base.download(download_url, file_name)
base.print_info("Install PostgreSQL...")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
else:
base.print_info("Install PostgreSQL...")
install_command = 'sudo apt install postgresql -y'
@ -923,7 +952,7 @@ def install_postgresql():
if (host_platform == 'windows'):
base.delete_file(file_name)
else:
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
return code
@ -974,18 +1003,11 @@ install_params = {
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
'Git': '/VERYSILENT /NORESTART',
'MySQLServer': {
'port': '3306',
'user': 'root',
'pass': 'onlyoffice',
'version': '8.0.21'
},
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
'PostgreSQL': {
'root': 'postgres',
'dbPort': '5432',
'dbName': 'onlyoffice',
'dbUser': 'onlyoffice',
'dbPass': 'onlyoffice'
'root': 'postgres'
}
}
uninstall_params = {

View File

@ -49,7 +49,18 @@ def run_integration_example():
def start_linux_services():
base.print_info('Restart MySQL Server')
def update_config(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
def make_start():
base.configure_common_apps()
@ -64,15 +75,8 @@ def make_start():
start_linux_services()
def make_configure(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
base.print_info('Build modules')
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
update_config(args)
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
def make_install():
platform = base.host_platform()

View File

@ -56,13 +56,6 @@ save text files with reports.
"reportFolder": "build_tools/scripts/license_checker/reports"
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "build_tools/scripts/license_checker/license_template.txt"
```
* `printChecking` specifies whether to output
information about which file is
being checked to the console.
@ -110,36 +103,14 @@ Possible array values:
```json
"fileExtensions": [".js"]
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "header.license"
```
* `startMultiComm` the line that starts the multiline comment.
**For example:**
```json
"startMultiComm": "/*"
```
* `endMultiComm` the line that ends the multiline comment.
You should carefully consider the formatting
of the string, all spaces are taken into account.
This affects how the license check works.
**For example:**
```json
"endMultiComm": " */"
```
Space at the beginning for a prettier comment.
* `prefix` the line on which each comment
line will begin, except for the
beginning and end.
**For example:**
```json
"prefix": " *"
```
Space at the beginning for a prettier comment.
* `ignoreListDir` folder paths to ignore.
**For example:**
@ -172,6 +143,18 @@ Possible array values:
]
```
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
**For example:**
```json
"ignoreListDir": [
"sdkjs/develop"
],
"allowListFile": [
"sdkjs/develop/awesomeFileToAllow.js",
]
```
Any number of configurations can be
specified, they can overlap
if we need to check

View File

@ -1,7 +1,6 @@
{
"basePath": "../../../",
"reportFolder": "build_tools/scripts/license_checker/reports",
"licensePath": "build_tools/scripts/license_checker/header.license",
"printChecking": false,
"printReports": false,
"fix": ["OUTDATED"],
@ -9,9 +8,7 @@
{
"dir": "core",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDir": [
"core/build",
"core/Common/cfcpp/test",
@ -59,9 +56,7 @@
{
"dir": "core-ext",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDir": [
"core-ext/AutoTester",
"core-ext/cell_android",
@ -83,9 +78,7 @@
{
"dir": "sdkjs",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
@ -106,9 +99,7 @@
{
"dir": "sdkjs-forms",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -117,9 +108,7 @@
{
"dir": "sdkjs-ooxml",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -128,9 +117,7 @@
{
"dir": "web-apps",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor",
@ -138,6 +125,7 @@
],
"ignoreListDir": [
"web-apps/apps/common/mobile",
"web-apps/apps/common/main/lib/mods",
"web-apps/apps/documenteditor/mobile",
"web-apps/apps/spreadsheeteditor/mobile",
"web-apps/apps/presentationeditor/mobile",
@ -145,7 +133,6 @@
],
"ignoreListFile": [
"web-apps/apps/api/documents/api.js",
"web-apps/apps/common/main/lib/mods/perfect-scrollbar.js",
"web-apps/apps/common/main/lib/core/application.js",
"web-apps/apps/common/main/lib/core/keymaster.js",
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
@ -154,9 +141,7 @@
{
"dir": "web-apps-mobile",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
@ -165,9 +150,7 @@
{
"dir": "server",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDir": [
"server/FileConverter/bin"
],
@ -178,9 +161,7 @@
{
"dir": "server-lockstorage",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
@ -188,9 +169,7 @@
{
"dir": "server-license",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
@ -198,9 +177,7 @@
{
"dir": "server-license-key",
"fileExtensions": [".js"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
@ -208,9 +185,7 @@
{
"dir": "editors-ios",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"startMultiComm": "/*",
"endMultiComm": " */",
"prefix": " *",
"licensePath": "header.license",
"ignoreListDirName": [
"vendor",
"Vendor",

View File

@ -1,28 +1,31 @@
(c) Copyright Ascensio System SIA 2010-2023
This program is a free software product. You can redistribute it and/or
modify it under the terms of the GNU Affero General Public License (AGPL)
version 3 as published by the Free Software Foundation. In accordance with
Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
that Ascensio System SIA expressly excludes the warranty of non-infringement
of any third-party rights.
This program is distributed WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
street, Riga, Latvia, EU, LV-1050.
The interactive user interfaces in modified source and object code versions
of the Program must display Appropriate Legal Notices, as required under
Section 5 of the GNU AGPL version 3.
Pursuant to Section 7(b) of the License you must retain the original Product
logo when distributing the program. Pursuant to Section 7(e) we decline to
grant you any rights under trademark law for use of our trademarks.
All the Product's GUI elements, including illustrations and icon sets, as
well as technical writing content are licensed under the terms of the
Creative Commons Attribution-ShareAlike 4.0 International. See the License
terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/

View File

@ -25,9 +25,6 @@ class Config(object):
Attributes:
dir: Directory to check.
fileExtensions: file extensions to check.
startMultiComm: characters to start a multi-line comment.
endMultiComm: characters to end a multi-line comment.
prefix: prefix for multiline comments
ignoreListDir: Ignored folder paths.
ignoreListDirName: Ignored folder names.
ignoreListFile: Ignored file paths.
@ -36,9 +33,7 @@ class Config(object):
def __init__(self,
dir: str,
fileExtensions: list[str],
startMultiComm: str,
endMultiComm: str,
prefix: str = '',
licensePath: str = 'header.license',
allowListFile: list[str] = [],
ignoreListDir: list[str] = [],
ignoreListDirName: list[str] = [],
@ -46,13 +41,19 @@ class Config(object):
self._dir = dir
self._fileExtensions = fileExtensions
self._startMultiComm = startMultiComm
self._endMultiComm = endMultiComm
self._prefix = prefix
self._allowListFile = allowListFile
self._ignoreListDir = ignoreListDir
self._ignoreListDirName = ignoreListDirName
self._ignoreListFile = ignoreListFile
"""Read license template."""
with open(licensePath, 'r', encoding="utf8") as file:
lines = file.readlines()
if not lines:
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
non_empty_lines = [s for s in lines if not s.isspace()]
self._startMultiComm = non_empty_lines[0]
self._endMultiComm = non_empty_lines[-1]
self._license_lines = lines
def getDir(self) -> str:
return self._dir
@ -62,8 +63,8 @@ class Config(object):
return self._startMultiComm
def getEndMultiComm(self) -> str:
return self._endMultiComm
def getPrefix(self) -> str:
return self._prefix
def getLicense(self) -> list[str]:
return self._license_lines
def getAllowListFile(self) -> list[str]:
return self._allowListFile
def getIgnoreListDir(self) -> list[str]:
@ -77,7 +78,6 @@ with open(CONFIG_PATH, 'r') as j:
_json: dict = json.load(j)
BASE_PATH: str = _json.get('basePath') or '../../../'
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
LICENSE_TEMPLATE_PATH: str = _json.get('licensePath') or 'build_tools/scripts/license_checker/header.license'
if (_json.get('fix')):
try:
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
@ -93,23 +93,6 @@ with open(CONFIG_PATH, 'r') as j:
os.chdir(BASE_PATH)
with open(LICENSE_TEMPLATE_PATH, 'r') as f:
LICENSE: list[str] = f.readlines()
if not LICENSE:
raise Exception(f'Error getting license template. Cannot read {LICENSE_TEMPLATE_PATH} file. Is not it empty?')
def getLicense(start: str, prefix: str, end: str) -> list[str]:
"""Returns a valid license for any kind of comment prefix."""
result = [start]
for i in LICENSE:
if i == '\n':
result.append(prefix)
else:
result.append(f'{" ".join([prefix, i.strip()])}')
result.append(prefix)
result.append(end)
return result
class Error(object):
def __init__(self, errorType: ErrorType) -> None:
self._errorType = errorType
@ -144,8 +127,6 @@ class Checker(object):
self._reports: list[Report] = []
def getReports(self):
return self._reports
def getLicense(self):
return getLicense(start=self._config.getStartMultiComm(), prefix=self._config.getPrefix(), end=self._config.getEndMultiComm())
def _checkLine(self, line: str, prefix: str) -> bool:
"""Checks if a line has a prefix."""
"""Trim to catch invalid license without leading spaces"""
@ -172,7 +153,7 @@ class Checker(object):
break
return result
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
license = self.getLicense()
license = self._config.getLicense()
if len(license) != len(test):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.LEN_MISMATCH),
@ -180,32 +161,29 @@ class Checker(object):
invalidLinesCount = 0
lastWrongLine = 0
for i in range(len(license)):
if (license[i] != test[i].strip('\n')):
if (license[i] != test[i]):
invalidLinesCount += 1
lastWrongLine = i
if (invalidLinesCount == 1):
r = r'\d\d\d\d\-\d\d\d\d'
testDate = re.search(r, test[lastWrongLine])
licenseDate = re.search(r, license[lastWrongLine])
r = r'\d\d\d\d'
testDate = re.findall(r, test[lastWrongLine])
licenseDate = re.findall(r, license[lastWrongLine])
if testDate and licenseDate:
testDate = testDate.group()
licenseDate = licenseDate.group()
else:
if not (testDate and licenseDate):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Something wrong...')
testLastYear = testDate.split('-')[1]
licenseLastYear = licenseDate.split('-')[1]
if (int(testLastYear) < int(licenseLastYear)):
testLastYear = int(testDate[-1])
licenseLastYear = int(licenseDate[-1])
if (testLastYear < licenseLastYear):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.OUTDATED),
message=f'Found date {testDate}, expected {licenseDate}')
message=f'Found date {testLastYear}, expected {licenseLastYear}')
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f"Found something similar to the date: {testDate}, but it's not correct. Expected: {licenseDate}")
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
elif (invalidLinesCount > 0):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
@ -286,8 +264,9 @@ class Fixer(object):
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
with open(pathToFile, 'w', encoding="utf8") as file:
license = self._checker.getLicense()
file.writelines(map(lambda x: "".join([x, '\n']), license))
license = self._config.getLicense()
file.writelines(license)
file.write('\n')
file.writelines(buffer)
return
def _fixLicense(self, pathToFile: str):
@ -301,8 +280,8 @@ class Fixer(object):
for i in oldLicense:
buffer.remove(i)
with open(pathToFile, 'w', encoding=writeEncoding) as file:
license = self._checker.getLicense()
file.writelines(map(lambda x: "".join([x, '\n']), license))
license = self._config.getLicense()
file.writelines(license)
file.writelines(buffer)
return
@ -328,7 +307,7 @@ def writeReports(reports: list[Report]) -> None:
for i in reports:
files[i.getError().getErrorType().name].append(i)
for i in ErrorType:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w') as f:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
for config in CONFIGS:

View File

@ -17,10 +17,7 @@ if utils.is_windows():
desktop_product_name = "Desktop Editors"
desktop_product_name_s = desktop_product_name.replace(" ","")
desktop_package_name = company_name + "-" + desktop_product_name_s
desktop_vcredist_list = ["2022"]
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
desktop_changes_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/changes"
desktop_updates_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/updates"
if utils.is_macos():
desktop_package_name = "ONLYOFFICE"
@ -33,6 +30,59 @@ if utils.is_macos():
builder_product_name = "Document Builder"
if utils.is_linux():
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
builder_make_targets = ["deb", "rpm"] # tar
server_make_targets = ["deb", "rpm", "tar"]
builder_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "builder/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "builder/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "builder/linux/rhel/"
}
]
desktop_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "desktop/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "desktop/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "desktop/linux/rhel/"
},
{
"make": "rpm-suse",
"src": "rpm-suse/build/RPMS/*/*.rpm",
"dst": "desktop/linux/suse/"
}
]
server_make_targets = [
{
"make": "deb",
"src": "deb/*.deb",
"dst": "server/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "server/linux/rhel/"
},
{
"make": "tar",
"src": "*.tar*",
"dst": "server/linux/snap/"
}
]

View File

@ -7,14 +7,17 @@ import package_branding as branding
def make():
utils.log_h1("BUILDER")
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
utils.log("Unsupported host OS")
return
if common.deploy:
make_archive()
if utils.is_windows():
make_windows()
elif utils.is_macos():
make_macos()
elif utils.is_linux():
make_linux()
else:
utils.log("Unsupported host OS")
return
def s3_upload(files, dst):
@ -29,6 +32,37 @@ def s3_upload(files, dst):
ret &= upload
return ret
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
utils.log_h2("builder archive build")
utils.delete_file("builder.7z")
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("builder archive build", ret)
utils.log_h2("builder archive deploy")
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("builder archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.add_deploy_data(dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
return
def make_windows():
global inno_file, zip_file, suffix, key_prefix
utils.set_cwd("document-builder-package")
@ -127,7 +161,7 @@ def make_macos():
if common.deploy and ret:
utils.log_h2("builder deploy")
ret = s3_upload([builder_tar], "builder/mac/")
ret = s3_upload([builder_tar], "builder/mac/generic/")
utils.set_summary("builder deploy", ret)
utils.set_cwd(common.workspace_dir)
@ -137,7 +171,7 @@ def make_linux():
utils.set_cwd("document-builder-package")
utils.log_h2("builder build")
make_args = branding.builder_make_targets
make_args = [t["make"] for t in branding.builder_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -146,32 +180,10 @@ def make_linux():
utils.set_summary("builder build", ret)
if common.deploy:
if ret:
if "tar" in branding.builder_make_targets:
utils.log_h2("builder tar deploy")
ret = s3_upload(
utils.glob_path("tar/*.tar.gz"),
"builder/linux/generic/")
utils.set_summary("builder tar deploy", ret)
if "deb" in branding.builder_make_targets:
utils.log_h2("builder deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"builder/linux/debian/")
utils.set_summary("builder deb deploy", ret)
if "rpm" in branding.builder_make_targets:
utils.log_h2("builder rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
"builder/linux/rhel/")
utils.set_summary("builder rpm deploy", ret)
else:
if "tar" in branding.builder_make_targets:
utils.set_summary("builder tar deploy", False)
if "deb" in branding.builder_make_targets:
utils.set_summary("builder deb deploy", False)
if "rpm" in branding.builder_make_targets:
utils.set_summary("builder rpm deploy", False)
for t in branding.builder_make_targets:
utils.log_h2("builder " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("builder " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -10,70 +10,87 @@ def make():
utils.log("Unsupported host OS")
return
if common.deploy:
make_core()
make_archive()
return
def make_core():
prefix = common.platformPrefixes[common.platform]
company = branding.company_name.lower()
repos = {
"windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build },
"windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build },
"darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build },
"darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build },
"linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build },
}
repo = repos[common.platform]
branch = utils.get_env("BRANCH_NAME")
core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company))
dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"])
dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"])
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
if branch is None:
utils.log_err("BRANCH_NAME variable is undefined")
utils.set_summary("core deploy", False)
return
if not utils.is_file(core_7z):
utils.log_err("file not exist: " + core_7z)
utils.set_summary("core deploy", False)
return
utils.log_h2("core archive build")
utils.delete_file("core.7z")
args = ["7z", "a", "-y", "core.7z", "./core/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("core archive build", ret)
utils.log_h2("core deploy")
utils.log_h2("core archive deploy")
dest = "core-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
core_7z,
"s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z")
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("core archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z")
utils.add_deploy_data(dest_version + "/core.7z")
ret = utils.s3_sync(
"s3://" + branding.s3_bucket + "/" + dest_version + "/",
"s3://" + branding.s3_bucket + "/" + dest_latest + "/",
delete=True)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z")
utils.set_summary("core deploy", ret)
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.add_deploy_data(dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
return
def deploy_closuremaps(license):
def deploy_closuremaps_sdkjs(license):
if not common.deploy: return
utils.log_h1("CLOSURE MAPS")
utils.set_cwd(utils.get_path("sdkjs/build/maps"))
utils.log_h1("SDKJS CLOSURE MAPS")
maps = utils.glob_path("*.js.map")
if not maps:
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("closure maps " + license + " deploy", False)
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
return
utils.log_h2("closure maps " + license + " deploy")
utils.log_h2("sdkjs closure maps %s deploy" % license)
ret = True
for f in maps:
key = "closure-maps/%s/%s/%s/%s" % (license, common.version, common.build, f)
base = utils.get_basename(f)
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.add_deploy_data(key)
utils.set_summary("closure maps " + license + " deploy", ret)
utils.set_cwd(common.workspace_dir)
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
return
def deploy_closuremaps_webapps(license):
if not common.deploy: return
utils.log_h1("WEB-APPS CLOSURE MAPS")
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("web-apps closure maps %s deploy" % license, False)
return
utils.log_h2("web-apps closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.add_deploy_data(key)
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
return

View File

@ -36,171 +36,104 @@ def s3_upload(files, dst):
#
def make_windows():
global package_version, arch_list, source_dir, branding_dir, desktop_dir, viewer_dir, \
inno_file, inno_sa_file, inno_update_file, inno_update_file_new, advinst_file
global package_name, package_version, arch, xp, suffix
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
package_name = branding.desktop_package_name
package_version = common.version + "." + common.build
arch_list = {
arch = {
"windows_x64": "x64",
"windows_x64_xp": "x64",
"windows_x86": "x86",
"windows_x86_xp": "x86"
}
suffix = arch_list[common.platform]
if common.platform.endswith("_xp"): suffix += "-xp"
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
inno_update_file = "update\\editors_update_%s.exe" % suffix.replace("-","_")
inno_update_file_new = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
if branding.onlyoffice:
branding_dir = "."
else:
branding_dir = common.workspace_dir + "\\" + common.branding + "\\desktop-apps\\win-linux\\package\\windows"
}[common.platform]
xp = common.platform.endswith("_xp")
suffix = arch + ("-xp" if xp else "")
if common.clean:
utils.log_h2("desktop clean")
utils.delete_dir("build")
# utils.delete_dir("data\\vcredist")
utils.delete_dir("DesktopEditors-cache")
utils.delete_files("*.exe")
utils.delete_files("*.msi")
utils.delete_files("*.aic")
utils.delete_files("*.tmp")
utils.delete_files("*.zip")
utils.delete_files("update\\*.exe")
utils.delete_files("update\\*.xml")
utils.delete_files("update\\*.html")
utils.log_h2("copy arifacts")
source_dir = "%s\\build_tools\\out\\%s\\%s" \
% (common.workspace_dir, common.prefix, branding.company_name)
utils.create_dir("build")
desktop_dir = "build\\" + branding.desktop_product_name_s
utils.copy_dir(source_dir + "\\" + branding.desktop_product_name_s, desktop_dir)
if not branding.onlyoffice:
viewer_dir = "build\\" + branding.viewer_product_name_s
utils.copy_dir(source_dir + "\\" + branding.viewer_product_name_s, viewer_dir)
utils.delete_files("data\\*.exe")
make_prepare()
make_zip()
if not download_vcredist():
utils.set_summary("desktop inno build", False)
utils.set_summary("desktop inno standalone build", False)
utils.set_summary("desktop inno update build", False)
utils.set_summary("desktop advinst build", False)
utils.set_cwd(common.workspace_dir)
return
make_inno()
if common.platform == "windows_x64":
make_update_files()
if common.platform in ["windows_x64", "windows_x86"]:
make_advinst()
make_advinst()
utils.set_cwd(common.workspace_dir)
return
def make_zip():
utils.log_h2("desktop zip build")
def make_prepare():
args = [
"-Target", common.platform,
"-BuildDir", "build",
"-DesktopDir", branding.desktop_product_name_s
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-MultimediaDir", branding.viewer_product_name_s]
args += ["-BrandingDir", branding_dir]
if branding.onlyoffice and not common.platform.endswith("_xp"):
args += ["-ExcludeHelp"]
if xp:
args += ["-Target", "xp"]
if common.sign:
args += ["-Sign", "-CertName", branding.cert_name]
args += ["-Sign"]
utils.log_h2("desktop prepare")
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("desktop prepare", ret)
return
def make_zip():
zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
if xp:
args += ["-Target", "xp"]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("desktop zip build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("desktop zip build", ret)
if common.deploy and ret:
utils.log_h2("desktop zip deploy")
ret = s3_upload(utils.glob_path("*.zip"), "desktop/win/generic/")
ret = s3_upload([zip_file], "desktop/win/generic/")
utils.set_summary("desktop zip deploy", ret)
return
def download_vcredist():
vcredist = {
# Microsoft Visual C++ 2015-2022 Redistributable - 14.38.33130
"windows_x64": {
"url": "https://aka.ms/vs/17/release/vc_redist.x64.exe",
"md5": "101b0b9f74cdc6cdbd2570bfe92e302c"
},
"windows_x86": {
"url": "https://aka.ms/vs/17/release/vc_redist.x86.exe",
"md5": "0d762264d9765e21c15a58edc43f4706"
},
# Microsoft Visual C++ 2015-2019 Redistributable - 14.27.29114
"windows_x64_xp": {
"url": "https://download.visualstudio.microsoft.com/download/pr/722d59e4-0671-477e-b9b1-b8da7d4bd60b/591CBE3A269AFBCC025681B968A29CD191DF3C6204712CBDC9BA1CB632BA6068/VC_redist.x64.exe",
"md5": "bc8e3e714b727b3bb18614bd6a51a3d3"
},
"windows_x86_xp": {
"url": "https://download.visualstudio.microsoft.com/download/pr/c168313d-1754-40d4-8928-18632c2e2a71/D305BAA965C9CD1B44EBCD53635EE9ECC6D85B54210E2764C8836F4E9DEFA345/VC_redist.x86.exe",
"md5": "ec3bee79a85ae8e3581a8c181b336d1e"
}
}
vcredist_file = "data\\vcredist_%s.exe" % arch_list[common.platform]
utils.log_h2("vcredist download " + vcredist_file)
ret = utils.download_file(
vcredist[common.platform]["url"],
vcredist_file,
vcredist[common.platform]["md5"],
verbose=True)
utils.set_summary("vcredist download", ret)
return ret
def make_inno():
utils.log_h2("desktop inno build")
inno_arch_list = {
"windows_x64": "64",
"windows_x86": "32",
"windows_x64_xp": "64",
"windows_x86_xp": "32"
}
iscc_args = [
"/Qp",
"/DVERSION=" + package_version,
"/DsAppVersion=" + package_version,
"/DDEPLOY_PATH=" + desktop_dir,
"/DARCH=" + arch_list[common.platform],
"/D_ARCH=" + inno_arch_list[common.platform],
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
inno_update_file = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
update_wrapper = not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper)
args = [
"-Version", package_version,
"-Arch", arch
]
if branding.onlyoffice:
iscc_args.append("/D_ONLYOFFICE=1")
else:
iscc_args.append("/DsBrandingFolder=" + \
utils.get_abspath(common.workspace_dir + "\\" + common.branding + "\\desktop-apps"))
if common.platform.endswith("_xp"):
iscc_args.append("/D_WIN_XP=1")
if common.sign:
iscc_args.append("/DENABLE_SIGNING=1")
iscc_args.append("/Sbyparam=signtool.exe sign /a /v /n $q" + \
branding.cert_name + "$q /t " + common.tsa_server + " $f")
args = ["iscc"] + iscc_args + ["common.iss"]
ret = utils.cmd(*args, creates=inno_file, verbose=True)
args += ["-Sign"]
utils.log_h2("desktop inno build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp"], verbose=True)
else:
ret = utils.ps1("make_inno.ps1", args, verbose=True)
utils.set_summary("desktop inno build", ret)
if branding.onlyoffice and not common.platform.endswith("_xp"):
args = ["iscc"] + iscc_args + ["/DEMBED_HELP", "/DsPackageEdition=Standalone", "common.iss"]
ret = utils.cmd(*args, creates=inno_sa_file, verbose=True)
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone")
ret = utils.ps1("make_inno.ps1", args + ["-Target", "standalone"], verbose=True)
utils.set_summary("desktop inno standalone build", ret)
if not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper):
args = ["iscc"] + iscc_args + ["/DTARGET_NAME=" + inno_file, "update_common.iss"]
ret = utils.cmd(*args, creates=inno_update_file, verbose=True)
if update_wrapper:
utils.log_h2("desktop inno update build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp_update"], verbose=True)
else:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "update"], verbose=True)
utils.set_summary("desktop inno update build", ret)
if common.deploy:
@ -208,127 +141,45 @@ def make_inno():
ret = s3_upload([inno_file], "desktop/win/inno/")
utils.set_summary("desktop inno deploy", ret)
if branding.onlyoffice and not common.platform.endswith("_xp"):
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone deploy")
ret = s3_upload([inno_sa_file], "desktop/win/inno/")
utils.set_summary("desktop inno standalone deploy", ret)
utils.log_h2("desktop inno update deploy")
if utils.is_file(inno_update_file):
ret = s3_upload(
[inno_update_file], "desktop/win/inno/" + inno_update_file_new)
ret = s3_upload([inno_update_file], "desktop/win/inno/")
elif utils.is_file(inno_file):
ret = s3_upload(
[inno_file], "desktop/win/inno/" + inno_update_file_new)
ret = s3_upload([inno_file], "desktop/win/inno/" + inno_update_file)
else:
ret = False
utils.set_summary("desktop inno update deploy", ret)
return
def make_update_files():
utils.log_h2("desktop update files build")
changes_dir = common.workspace_dir + "\\" + utils.get_path(branding.desktop_changes_dir) + "\\" + common.version
if common.deploy and utils.glob_path(changes_dir + "\\*.html"):
utils.log_h2("desktop update files deploy")
changes_dir = common.workspace_dir + "\\" \
+ utils.get_path(branding.desktop_changes_dir) + "\\" + common.version
if common.platform == "windows_x64" and \
common.deploy and \
utils.glob_path(changes_dir + "\\*.html"):
utils.log_h2("desktop changelog deploy")
ret = s3_upload(
utils.glob_path(changes_dir + "\\*.html"),
"desktop/win/update/%s/%s/" % (common.version, common.build))
utils.set_summary("desktop update files deploy", ret)
utils.set_summary("desktop changelog deploy", ret)
return
def make_advinst():
utils.log_h2("desktop advinst build")
msi_build = {
"windows_x64": "MsiBuild64",
"windows_x86": "MsiBuild32"
}[common.platform]
if not branding.onlyoffice:
multimedia_dir = common.workspace_dir + "\\" + common.branding + "\\multimedia"
utils.copy_file(branding_dir + "\\dictionary.ail", "dictionary.ail")
utils.copy_dir_content(branding_dir + "\\data", "data", ".bmp")
utils.copy_dir_content(branding_dir + "\\data", "data", ".png")
utils.copy_dir_content(
branding_dir + "\\..\\..\\extras\\projicons\\res",
"..\\..\\extras\\projicons\\res",
".ico")
utils.copy_file(
branding_dir + "\\..\\..\\..\\common\\package\\license\\eula_" + common.branding + ".rtf",
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
utils.copy_file(
multimedia_dir + "\\imageviewer\\icons\\ico\\" + common.branding + ".ico",
"..\\..\\extras\\projicons\\res\\icons\\gallery.ico")
utils.copy_file(
multimedia_dir + "\\videoplayer\\icons\\" + common.branding + ".ico",
"..\\..\\extras\\projicons\\res\\icons\\media.ico")
utils.write_file(desktop_dir + "\\converter\\package.config", "package=msi")
aic_content = [";aic"]
if not common.sign:
aic_content += [
"ResetSig"
]
if branding.onlyoffice:
for path in utils.glob_path(desktop_dir + "\\editors\\web-apps\\apps\\*\\main\\resources\\help"):
utils.delete_dir(path)
aic_content += [
"DelFolder CUSTOM_PATH"
]
else:
aic_content += [
"SetProperty UpgradeCode=\"" + branding.desktop_upgrade_code + "\"",
"AddUpgradeCode {47EEF706-B0E4-4C43-944B-E5F914B92B79} \
-min_ver 7.1.1 -include_min_ver \
-max_ver 7.2.2 -include_max_ver \
-include_lang 1049 \
-property_name UPGRADE_2 -enable_migrate",
"DelLanguage 1029 -buildname " + msi_build,
"DelLanguage 1031 -buildname " + msi_build,
"DelLanguage 1041 -buildname " + msi_build,
"DelLanguage 1046 -buildname " + msi_build,
"DelLanguage 2070 -buildname " + msi_build,
"DelLanguage 1060 -buildname " + msi_build,
"DelLanguage 1036 -buildname " + msi_build,
"DelLanguage 3082 -buildname " + msi_build,
"DelLanguage 1033 -buildname " + msi_build,
"SetCurrentFeature ExtendedFeature",
"NewSync CUSTOM_PATH " + viewer_dir,
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + viewer_dir + "\\ImageViewer.exe",
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + viewer_dir + "\\VideoPlayer.exe",
"SetProperty ProductName=\"" + branding.desktop_product_name_full + "\"",
"SetProperty ASCC_REG_PREFIX=" + branding.ascc_reg_prefix
]
if common.platform == "windows_x86":
aic_content += [
"SetComponentAttribute -feature_name ExtendedFeature -unset -64bit_component"
]
if common.platform == "windows_x86":
aic_content += [
"SetComponentAttribute -feature_name MainFeature -unset -64bit_component",
"SetComponentAttribute -feature_name FileProgIds -unset -64bit_component",
"SetComponentAttribute -feature_name FileOpenWith -unset -64bit_component",
"SetComponentAttribute -feature_name FileProgramCapatibilities -unset -64bit_component",
"SetComponentAttribute -feature_name FileTypeAssociations -unset -64bit_component",
"SetComponentAttribute -feature_name FileNewTemplates -unset -64bit_component"
]
aic_content += [
"SetCurrentFeature MainFeature",
"NewSync APPDIR " + desktop_dir,
"UpdateFile APPDIR\\DesktopEditors.exe " + desktop_dir + "\\DesktopEditors.exe",
"UpdateFile APPDIR\\updatesvc.exe " + desktop_dir + "\\updatesvc.exe",
"SetProperty VERSION=\"" + package_version + "\"",
"SetProperty VERSION_SHORT=\"" + re.sub(r"^(\d+\.\d+).+", "\\1", package_version) + "\"",
"SetVersion " + package_version,
"SetPackageName " + advinst_file + " -buildname " + msi_build,
"Rebuild -buildslist " + msi_build
if not common.platform in ["windows_x64", "windows_x86"]:
return
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
utils.write_file("DesktopEditors.aic", "\r\n".join(aic_content), "utf-8-sig")
ret = utils.cmd("AdvancedInstaller.com", "/execute", \
"DesktopEditors.aip", "DesktopEditors.aic", verbose=True)
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop advinst build")
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
utils.set_summary("desktop advinst build", ret)
if common.deploy and ret:
@ -460,17 +311,6 @@ def make_sparkle_updates():
)
utils.set_summary("desktop sparkle files build", ret)
utils.log("")
utils.log_h3("generate checksums")
utils.sh(
"md5 *.zip *.delta > md5sums.txt",
chdir="build/update", verbose=True
)
utils.sh(
"shasum -a 256 *.zip *.delta > sha256sums.txt",
chdir="build/update", verbose=True
)
if common.deploy:
utils.log_h2("desktop sparkle files deploy")
ret = s3_upload(
@ -489,7 +329,7 @@ def make_linux():
utils.set_cwd("desktop-apps/win-linux/package/linux")
utils.log_h2("desktop build")
make_args = branding.desktop_make_targets
make_args = [t["make"] for t in branding.desktop_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -497,68 +337,11 @@ def make_linux():
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("desktop build", ret)
rpm_arch = "*"
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
if common.deploy:
if ret:
utils.log_h2("desktop tar deploy")
if "tar" in branding.desktop_make_targets:
ret = s3_upload(
utils.glob_path("tar/*.tar*"),
"desktop/linux/generic/")
utils.set_summary("desktop tar deploy", ret)
if "deb" in branding.desktop_make_targets:
utils.log_h2("desktop deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"desktop/linux/debian/")
utils.set_summary("desktop deb deploy", ret)
if "deb-astra" in branding.desktop_make_targets:
utils.log_h2("desktop deb-astra deploy")
ret = s3_upload(
utils.glob_path("deb-astra/*.deb"),
"desktop/linux/astra/")
utils.set_summary("desktop deb-astra deploy", ret)
if "rpm" in branding.desktop_make_targets:
utils.log_h2("desktop rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/rhel/")
utils.set_summary("desktop rpm deploy", ret)
if "suse-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop suse-rpm deploy")
ret = s3_upload(
utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/suse/")
utils.set_summary("desktop suse-rpm deploy", ret)
if "apt-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop apt-rpm deploy")
ret = s3_upload(
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/altlinux/")
utils.set_summary("desktop apt-rpm deploy", ret)
if "urpmi" in branding.desktop_make_targets:
utils.log_h2("desktop urpmi deploy")
ret = s3_upload(
utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"desktop/linux/rosa/")
utils.set_summary("desktop urpmi deploy", ret)
else:
if "tar" in branding.desktop_make_targets:
utils.set_summary("desktop tar deploy", False)
if "deb" in branding.desktop_make_targets:
utils.set_summary("desktop deb deploy", False)
if "deb-astra" in branding.desktop_make_targets:
utils.set_summary("desktop deb-astra deploy", False)
if "rpm" in branding.desktop_make_targets:
utils.set_summary("desktop rpm deploy", False)
if "suse-rpm" in branding.desktop_make_targets:
utils.set_summary("desktop suse-rpm deploy", False)
if "apt-rpm" in branding.desktop_make_targets:
utils.set_summary("desktop apt-rpm deploy", False)
if "urpmi" in branding.desktop_make_targets:
utils.set_summary("desktop urpmi deploy", False)
for t in branding.desktop_make_targets:
utils.log_h2("desktop " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("desktop " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -22,7 +22,7 @@ def make_mobile():
utils.sh("rm -rfv *.zip", verbose=True)
utils.log_h2("mobile build")
ret = utils.sh("zip -r " + zip_file + " ./android* ./js", verbose=True)
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
utils.set_summary("mobile build", ret)
if common.deploy:

View File

@ -61,7 +61,8 @@ def make_linux(edition):
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
make_args = [t["make"] for t in branding.server_make_targets]
make_args += ["-e", "PRODUCT_NAME=" + product_name]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -70,40 +71,10 @@ def make_linux(edition):
utils.set_summary("server " + edition + " build", ret)
if common.deploy:
if ret:
if "deb" in branding.server_make_targets:
utils.log_h2("server " + edition + " deb deploy")
ret = s3_upload(
utils.glob_path("deb/*.deb"),
"server/linux/debian/")
utils.set_summary("server " + edition + " deb deploy", ret)
if "rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " rpm deploy")
ret = s3_upload(
utils.glob_path("rpm/builddir/RPMS/*/*.rpm"),
"server/linux/rhel/")
utils.set_summary("server " + edition + " rpm deploy", ret)
if "apt-rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " apt-rpm deploy")
ret = s3_upload(
utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"),
"server/linux/altlinux/")
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
if "tar" in branding.server_make_targets:
utils.log_h2("server " + edition + " snap deploy")
ret = s3_upload(
utils.glob_path("*.tar.gz"),
"server/linux/snap/")
utils.set_summary("server " + edition + " snap deploy", ret)
else:
if "deb" in branding.server_make_targets:
utils.set_summary("server " + edition + " deb deploy", False)
if "rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " rpm deploy", False)
if "apt-rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " apt-rpm deploy", False)
if "tar" in branding.server_make_targets:
utils.set_summary("server " + edition + " snap deploy", False)
for t in branding.server_make_targets:
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -12,7 +12,6 @@ import subprocess
import sys
import time
import package_common as common
import base
def host_platform():
return platform.system().lower()
@ -73,6 +72,9 @@ def get_path(path):
return path.replace("/", "\\")
return path
def get_relpath(path, rel_path):
return os.path.relpath(get_path(path), get_path(rel_path))
def get_abspath(path):
return os.path.abspath(get_path(path))
@ -199,13 +201,12 @@ def copy_files(src, dst, override=True, verbose=True):
copy_files(file + "/*", dst + "/" + file_name, override)
return
def copy_dir(src, dst, override=True, verbose=True):
def copy_dir(src, dst, verbose=True):
if verbose:
log("- copy_dir:")
log(" src: " + src)
log(" dst: " + dst)
log(" override: " + str(override))
base.copy_dir(src, dst)
shutil.copytree(src, dst)
return
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
@ -215,20 +216,18 @@ def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose
log(" dst: " + dst)
log(" include: " + filter_include)
log(" exclude: " + filter_exclude)
src_folder = src
if ("/" != src[-1:]):
src_folder += "/"
src_folder += "*"
for file in glob.glob(src_folder):
basename = os.path.basename(file)
if ("" != filter_include) and (-1 == basename.find(filter_include)):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if ("" != filter_include) and (-1 == item.find(filter_include)):
continue
if ("" != filter_exclude) and (-1 != basename.find(filter_exclude)):
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
continue
if is_file(file):
copy_file(file, dst, verbose=False)
elif is_dir(file):
copy_dir(file, dst + "/" + basename, verbose=False)
if os.path.isdir(s):
shutil.copytree(s, d)
else:
shutil.copy2(s, d)
log(item)
return
def delete_file(path, verbose=True):
@ -323,33 +322,11 @@ def ps1(file, args=[], **kwargs):
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return True
ret = subprocess.call(
["powershell", "-File", file] + args, stderr=subprocess.STDOUT, shell=True
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def download_file(url, path, md5, verbose=False):
if verbose:
log("- download_file:")
log(" url: " + path)
log(" path: " + url)
log(" md5: " + md5)
if is_file(path):
if get_hash_md5(path) == md5:
log_err("file already exist (match checksum)")
return True
else:
log_err("wrong checksum (%s), delete" % md5)
os.remove(path)
ret = powershell(
"(New-Object System.Net.WebClient).DownloadFile('%s','%s')" % (url, path),
verbose=True
)
md5_new = get_hash_md5(path)
if md5 != md5_new:
log_err("checksum didn't match (%s != %s)" % (md5, md5_new))
return False
return ret
def sh(command, **kwargs):
if kwargs.get("verbose"):
log("- sh:")
@ -408,15 +385,13 @@ def s3_upload(src, dst, **kwargs):
ret = sh(" ".join(args), verbose=True)
return ret
def s3_sync(src, dst, **kwargs):
def s3_copy(src, dst, **kwargs):
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "sync", "--no-progress"]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
if kwargs.get("delete") and kwargs["delete"]:
args += ["--delete"]
args += [src, dst]
if is_windows():
ret = cmd(*args, verbose=True)

147
scripts/qmake.py Normal file
View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
import os
import sys
__dir__name__ = os.path.dirname(__file__)
sys.path.append(__dir__name__ + '/core_common/modules/android')
import base
import config
import android_ndk
import multiprocessing
def get_make_file_suffix(platform):
suffix = platform
if config.check_option("config", "debug"):
suffix += "_debug_"
suffix += config.option("branding")
return suffix
def get_j_num():
if ("0" != config.option("multiprocess")):
return ["-j" + str(multiprocessing.cpu_count())]
return []
def check_support_platform(platform):
qt_dir = base.qt_setup(platform)
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
return False
return True
def make(platform, project, qmake_config_addon="", is_no_errors=False):
# check platform
if not check_support_platform(platform):
print("THIS PLATFORM IS NOT SUPPORTED")
return
old_env = dict(os.environ)
# qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# pro & makefile
file_pro = os.path.abspath(project)
pro_dir = os.path.dirname(file_pro)
if (pro_dir.endswith("/.")):
pro_dir = pro_dir[:-2]
if (pro_dir.endswith("/")):
pro_dir = pro_dir[:-1]
makefile_name = "Makefile." + get_make_file_suffix(platform)
makefile = pro_dir + "/" + makefile_name
stash_file = pro_dir + "/.qmake.stash"
old_cur = os.getcwd()
os.chdir(pro_dir)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
if (base.is_file(makefile)):
base.delete_file(makefile)
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
# setup android env
if (-1 != platform.find("android")):
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
# setup ios env
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
if base.is_file(makefile):
base.delete_file(makefile)
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
clean_params = ["clean", "-f", makefile]
distclean_params = ["distclean", "-f", makefile]
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
qmake_app = qt_dir + "/bin/qmake"
# non windows platform
if not base.is_windows():
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd("make", clean_params, True)
base.cmd_and_return_cwd("make", distclean_params, True)
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
else:
config_params_array = base.qt_config_as_param(config_param)
config_params_string = ""
for item in config_params_array:
config_params_string += (" \"" + item + "\"")
qmake_addon_string = " ".join(qmake_addon)
if ("" != qmake_addon_string):
qmake_addon_string = " " + qmake_addon_string
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake " + " ".join(clean_params))
qmake_bat.append("call nmake " + " ".join(distclean_params))
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefile)
base.run_as_bat(qmake_bat, is_no_errors)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
os.chdir(old_cur)
os.environ.clear()
os.environ.update(old_env)
return
def make_all_platforms(project, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
make(platform, project, qmake_config_addon)
return

View File

@ -2,6 +2,8 @@
import os
import shutil
import re
import argparse
def readFile(path):
with open(path, "r", errors='replace') as file:
filedata = file.read()
@ -112,6 +114,8 @@ class EditorApi(object):
line = line.replace("}", "")
lineWithoutSpaces = line.replace(" ", "")
if not is_found_function and 0 == line.find("function "):
if -1 == decoration.find("@constructor"):
return
codeCorrect += (line + addon_for_func + "\n")
is_found_function = True
if not is_found_function and -1 != line.find(".prototype."):
@ -177,7 +181,7 @@ class EditorApi(object):
def generate(self):
for file in self.files:
file_content = readFile(file)
file_content = readFile(f'{sdkjs_dir}/{file}')
arrRecords = file_content.split("/**")
arrRecords = arrRecords[1:-1]
for record in arrRecords:
@ -185,8 +189,8 @@ class EditorApi(object):
self.numfile += 1
correctContent = ''.join(self.records)
correctContent += "\n"
os.mkdir('deploy/api_builder/' + self.folder)
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
os.mkdir(args.destination + self.folder)
writeFile(args.destination + self.folder + "/api.js", correctContent)
return
def convert_to_interface(arrFiles, sEditorType):
@ -195,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType):
editor.generate()
return
old_cur = os.getcwd()
os.chdir("../../../sdkjs")
if True == os.path.isdir('deploy/api_builder'):
shutil.rmtree('deploy/api_builder', ignore_errors=True)
os.mkdir('deploy/api_builder')
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)
sdkjs_dir = "../../../sdkjs"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
)
args = parser.parse_args()
old_cur = os.getcwd()
if True == os.path.isdir(args.destination):
shutil.rmtree(args.destination, ignore_errors=True)
os.mkdir(args.destination)
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)

View File

@ -0,0 +1,80 @@
# Documentation Generation Guide
This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
## Requirements
```bash
Node.js v20 and above
Python v3.10 and above
```
## Installation
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
cd build_tools/scripts/sdkjs_common/jsdoc
npm install
```
## Scripts Overview
### `generate_docs_json.py`
This script generates JSON documentation based on the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api`.
### `generate_docs_plugins_json.py`
This script generates JSON documentation based on the `api_plugins.js` files.
- **Usage**:
```bash
python generate_docs_plugins_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api-plugins`.
### `generate_docs_md.py`
This script generates Markdown documentation from the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `../../../../office-js-api/`.
## Example
To generate JSON documentation with the default output path:
```bash
python generate_docs_json.py /path/to/save/json
```
To generate JSON documentation with the default output path:
```bash
python generate_docs_plugins_json.py /path/to/save/json
```
To generate Markdown documentation and specify a custom output path:
```bash
python generate_docs_md.py /path/to/save/markdown
```
## Notes
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
- The output directories will be created if they do not exist.

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,216 @@
exports.handlers = {
processingComplete: function(e) {
// array for filtered doclets
let filteredDoclets = [];
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
const classesDocletsMap = {}; // doclets for classes write at the end
let passedClasses = []; // passed classes for current editor
// Remove dublicates doclets
const latestDoclets = {};
e.doclets.forEach(doclet => {
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
latestDoclets[doclet.longname] = doclet;
}
});
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
// check available classess for current editor
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAdd =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' &&
(!isMethod || hasTypeofEditorsTag);
if (shouldAdd) {
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
passedClasses.push(cleanName(doclet.memberof));
}
}
else if (doclet.kind == 'class') {
classesDocletsMap[cleanName(doclet.name)] = doclet;
}
}
// remove unavailave classes in current editor
passedClasses = passedClasses.filter(className => {
const doclet = classesDocletsMap[className];
if (!doclet) {
return true;
}
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
// class is passes if there is no editor tag or the current editor is among the tags
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
return isPassed;
});
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod) {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
continue;
}
// We leave only the necessary fields
doclet.memberof = cleanName(doclet.memberof);
doclet.longname = cleanName(doclet.longname);
doclet.name = cleanName(doclet.name);
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
memberof: cleanName(doclet.memberof),
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
name: doclet.name,
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: doclet.scope,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
see: doclet.see
};
// Add the filtered doclet to the array
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'class') {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.name))) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
augments: doclet.augments || undefined,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see || undefined
};
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'typedef') {
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type
};
filteredDoclets.push(filteredDoclet);
}
}
// Replace doclets with a filtered array
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/cell/api_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,85 @@
exports.handlers = {
processingComplete: function(e) {
const filteredDoclets = [];
function checkNullProps(oDoclet) {
for (let key of Object.keys(oDoclet)) {
if (oDoclet[key] == null) {
delete oDoclet[key];
}
if (typeof(oDoclet[key]) == "object") {
checkNullProps(oDoclet[key]);
}
}
}
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
if (true == doclet.undocumented || doclet.kind == 'package') {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
kind: doclet.kind,
since: doclet.since,
name: doclet.name,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
description: doclet.description,
memberof: doclet.memberof,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
longname: doclet.longname,
scope: doclet.scope,
alias: doclet.alias,
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
see: doclet.see
};
checkNullProps(filteredDoclet)
filteredDoclets.push(filteredDoclet);
}
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs-forms/apiPlugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/slide/api_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,110 @@
import os
import subprocess
import json
import argparse
import re
import platform
root = '../../../..'
# Configuration files
configs = [
"./config/builder/word.json",
"./config/builder/cell.json",
"./config/builder/slide.json",
"./config/builder/forms.json"
]
editors_maps = {
"word": "CDE",
"cell": "CSE",
"slide": "CPE",
"forms": "CFE"
}
def generate(output_dir, md=False):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command_set_env = "export"
if (platform.system().lower() == "windows"):
command_set_env = "set"
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = f'{root}/' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
if md == True:
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
if md == False:
document_type = editor_name
if "forms" == document_type:
document_type = "pdf"
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation for builder completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default=f"{root}/office-js-api-declarations/office-js-api"
)
args = parser.parse_args()
generate(args.destination)

View File

@ -0,0 +1,277 @@
import os
import json
import re
import shutil
import argparse
import generate_docs_json
# Configuration files
editors = [
"word",
"cell",
"slide",
"forms"
]
missing_examples = []
def load_json(file_path):
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
def write_markdown_file(file_path, content):
with open(file_path, 'w', encoding='utf-8') as md_file:
md_file.write(content)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
def correct_description(string):
if string is None:
return 'No description provided.'
# Replace opening <b> tag with **
string = re.sub(r'<b>', '**', string)
# Replace closing </b> tag with **
string = re.sub(r'</b>', '**', string)
# Note
return re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
def correct_default_value(value, enumerations, classes):
if value is None:
return ''
if value == True:
value = "true"
elif value == False:
value = "false"
else:
value = str(value)
return generate_data_types_markdown([value], enumerations, classes)
def remove_line_breaks(string):
return re.sub(r'[\r\n]', ' ', string)
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
param_types_md = ' &#124; '.join(types)
for enum in enumerations:
if enum['name'] in types:
param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)")
for cls in classes:
if cls in types:
param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)")
def replace_with_links(match):
element = match.group(1).strip()
base_type = element.split('.')[0] # Take only the first part before the dot, if any
if any(enum['name'] == base_type for enum in enumerations):
return f"<[{element}]({root}Enumeration/{base_type}.md)>"
elif base_type in classes:
return f"<[{element}]({root}{base_type}/{base_type}.md)>"
return f"&lt;{element}&gt;"
return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md)
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
content += generate_properties_markdown(properties, enumerations, classes, '../')
content += "## Methods\n\n"
for method in methods:
method_name = method['name']
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
return content
def generate_method_markdown(method, enumerations, classes):
method_name = method['name']
description = method.get('description', 'No description provided.')
description = correct_description(description)
params = method.get('params', [])
returns = method.get('returns', [])
example = method.get('example', '')
memberof = method.get('memberof', '')
content = f"# {method_name}\n\n{description}\n\n"
# Syntax section
param_list = ', '.join([param['name'] for param in params]) if params else ''
content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n"
if memberof:
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
content += "## Parameters\n\n"
if params:
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
for param in params:
param_name = param.get('name', 'Unnamed')
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
param_required = "Required" if not param.get('optional') else "Optional"
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
else:
content += "This method doesn't have any parameters.\n"
content += "\n## Returns\n\n"
if returns:
return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown'
# Check for enumerations and classes in return type and add links if they exist
return_type_md = generate_data_types_markdown([return_type], enumerations, classes)
content += return_type_md
else:
content += "This method doesn't return any data."
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def generate_properties_markdown(properties, enumerations, classes, root='../../'):
if (properties is None):
return ''
content = "## Properties\n\n"
content += "| Name | Type | Description |\n"
content += "| ---- | ---- | ----------- |\n"
for prop in properties:
prop_name = prop['name']
prop_description = prop.get('description', 'No description provided.')
prop_description = remove_line_breaks(correct_description(prop_description))
param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root)
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
content += "\n"
return content
def generate_enumeration_markdown(enumeration, enumerations, classes):
enum_name = enumeration['name']
description = enumeration.get('description', 'No description provided.')
description = correct_description(description)
example = enumeration.get('example', '')
content = f"# {enum_name}\n\n{description}\n\n"
if 'TypeUnion' == enumeration['type']['parsedType']['type']:
content += "## Type\n\nEnumeration\n\n"
content += "## Values\n\n"
elements = enumeration['type']['parsedType']['elements']
for element in elements:
element_name = element['name'] if element['type'] != 'NullLiteral' else 'null'
# Check if element is in enumerations or classes before adding link
if any(enum['name'] == element_name for enum in enumerations):
content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n"
elif element_name in classes:
content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n"
else:
content += f"- {element_name}\n"
elif enumeration['properties'] is not None:
content += "## Type\n\nObject\n\n"
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
else:
content += "## Type\n\n"
types = enumeration['type']['names']
for t in types:
t = generate_data_types_markdown([t], enumerations, classes)
content += t + "\n\n"
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def process_doclets(data, output_dir, editor_name):
classes = {}
classes_props = {}
enumerations = []
editor_dir = os.path.join(output_dir, editor_name)
for doclet in data:
if doclet['kind'] == 'class':
class_name = doclet['name']
classes[class_name] = []
classes_props[class_name] = doclet.get('properties', None)
elif doclet['kind'] == 'function':
class_name = doclet.get('memberof')
if class_name:
if class_name not in classes:
classes[class_name] = []
classes[class_name].append(doclet)
elif doclet['kind'] == 'typedef':
enumerations.append(doclet)
# Process classes
for class_name, methods in classes.items():
class_dir = os.path.join(editor_dir, class_name)
methods_dir = os.path.join(class_dir, 'Methods')
os.makedirs(methods_dir, exist_ok=True)
# Write class file
class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes)
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
# Write method files
for method in methods:
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
method_content = generate_method_markdown(method, enumerations, classes)
write_markdown_file(method_file_path, method_content)
if not method.get('example', ''):
missing_examples.append(os.path.relpath(method_file_path, output_dir))
# Process enumerations
enum_dir = os.path.join(editor_dir, 'Enumeration')
os.makedirs(enum_dir, exist_ok=True)
for enum in enumerations:
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
write_markdown_file(enum_file_path, enum_content)
if not enum.get('example', ''):
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
def generate(output_dir):
print('Generating Markdown documentation...')
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
for editor_name in editors:
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True)
data = load_json(input_file)
process_doclets(data, output_dir, editor_name.title())
shutil.rmtree(output_dir + 'tmp_json')
print('Done')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../office-js-api/" # Default value
)
args = parser.parse_args()
generate(args.destination)
print("START_MISSING_EXAMPLES")
print(",".join(missing_examples))
print("END_MISSING_EXAMPLES")

View File

@ -0,0 +1,99 @@
import os
import subprocess
import json
import argparse
import re
# Configuration files
configs = [
"./config/plugins/common.json",
"./config/plugins/word.json",
"./config/plugins/cell.json",
"./config/plugins/slide.json",
"./config/plugins/forms.json"
]
root = '../../../..'
def generate(output_dir, md=False):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command = f"npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = f'{root}/' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
doclet['examples'] = [remove_js_comments(comment) + code_content]
if md == False:
document_type = editor_name
if "forms" == document_type:
document_type = "pdf"
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation for builder completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default=f"{root}/office-js-api-declarations/office-js-api-plugins"
)
args = parser.parse_args()
generate(args.destination)

View File

@ -0,0 +1,7 @@
{
"dependencies": {
"jsdoc-to-markdown": "7.1.1",
"dmd": "6.1.0",
"handlebars": "4.7.7"
}
}

177
scripts/sln.py Normal file
View File

@ -0,0 +1,177 @@
#!/usr/bin/env python
import sys
sys.path.append('scripts')
import config
import json
import os
is_log = False
def is_exist_in_array(projects, proj):
for p in projects:
if p == proj:
return True
return False
def get_full_projects_list(json_data, list):
result = []
for rec in list:
if rec in json_data:
result += get_full_projects_list(json_data, json_data[rec])
else:
result.append(rec)
return result
def adjust_project_params(params):
ret_params = params
# check aliases
all_windows = []
all_windows_xp = []
all_linux = []
all_mac = []
all_android = []
for i in config.platforms:
if (0 == i.find("win")):
all_windows.append(i)
if (-1 != i.find("xp")):
all_windows_xp.append(i)
elif (0 == i.find("linux")):
all_linux.append(i)
elif (0 == i.find("mac")):
all_mac.append(i)
elif (0 == i.find("android")):
all_android.append(i)
if is_exist_in_array(params, "win"):
ret_params += all_windows
if is_exist_in_array(params, "!win"):
ret_params += ["!" + x for x in all_windows]
if is_exist_in_array(params, "win_xp"):
ret_params += all_windows_xp
if is_exist_in_array(params, "!win_xp"):
ret_params += ["!" + x for x in all_windows_xp]
if is_exist_in_array(params, "linux"):
ret_params += all_linux
if is_exist_in_array(params, "!linux"):
ret_params += ["!" + x for x in all_linux]
if is_exist_in_array(params, "mac"):
ret_params += all_mac
if is_exist_in_array(params, "!mac"):
ret_params += ["!" + x for x in all_mac]
if is_exist_in_array(params, "android"):
ret_params += all_android
if is_exist_in_array(params, "!android"):
ret_params += ["!" + x for x in all_android]
return ret_params
def get_projects(pro_json_path, platform):
json_path = os.path.abspath(pro_json_path)
data = json.load(open(json_path))
root_dir_json = "../"
if ("root" in data):
root_dir_json = data["root"]
root_dir = os.path.dirname(json_path)
if ("/" != root_dir[-1] and "\\" != root_dir[-1]):
root_dir += "/"
root_dir += root_dir_json
result = []
modules = config.option("module").split(" ")
for module in modules:
if (module == ""):
continue
if not module in data:
continue
# check aliases to modules
records_src = data[module]
records = get_full_projects_list(data, records_src)
print(records)
for rec in records:
params = []
record = rec
if (0 == rec.find("[")):
pos = rec.find("]")
if (-1 == pos):
continue
record = rec[pos+1:]
header = rec[1:pos].replace(" ", "")
params_tmp = rec[1:pos].split(",")
for par in params_tmp:
if (par != ""):
params.append(par)
params = adjust_project_params(params)
if is_exist_in_array(result, record):
continue
if is_log:
print("params: " + ",".join(params))
print("file: " + record)
if is_exist_in_array(params, "!" + platform):
continue
platform_records = []
platform_records += config.platforms
platform_records += ["win", "win_xp", "linux", "mac", "android"]
# if one platform exists => all needed must exists
is_needed_platform_exist = False
for pl in platform_records:
if is_exist_in_array(params, pl):
is_needed_platform_exist = True;
break
# if one config exists => all needed must exists
is_needed_config_exist = False
for item in params:
if (0 == item.find("!")):
continue
if is_exist_in_array(platform_records, item):
continue
is_needed_config_exist = True
break;
if is_needed_platform_exist:
if not is_exist_in_array(params, platform):
continue
config_params = config.option("config").split(" ") + config.option("features").split(" ")
config_params = [x for x in config_params if x]
is_append = True
for conf in config_params:
if is_exist_in_array(params, "!" + conf):
is_append = False
break
if is_needed_config_exist and not is_exist_in_array(params, conf):
is_append = False
break
if is_append:
result.append(root_dir + record)
if is_log:
print(result)
return result
# test example
if __name__ == '__main__':
# test
config.parse()
is_log = True
projects = get_projects("./../sln.json", "win_64")

103
sln.json Normal file
View File

@ -0,0 +1,103 @@
{
"root" : "../",
"spell" : [
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
],
"core" : [
"core/Common/3dParty/cryptopp/project/cryptopp.pro",
"core/Common/cfcpp/cfcpp.pro",
"core/UnicodeConverter/UnicodeConverter.pro",
"core/Common/kernel.pro",
"core/Common/Network/network.pro",
"core/DesktopEditor/graphics/pro/graphics.pro",
"core/PdfFile/PdfFile.pro",
"core/DjVuFile/DjVuFile.pro",
"core/XpsFile/XpsFile.pro",
"core/HtmlFile2/HtmlFile2.pro",
"core/Fb2File/Fb2File.pro",
"core/EpubFile/CEpubFile.pro",
"core/HtmlRenderer/htmlrenderer.pro",
"core/DocxRenderer/DocxRenderer.pro",
"core/DesktopEditor/doctrenderer/doctrenderer.pro",
"[!no_x2t]core/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro",
"[!no_x2t]core/TxtFile/Projects/Linux/TxtXmlFormatLib.pro",
"[!no_x2t]core/RtfFile/Projects/Linux/RtfFormatLib.pro",
"[!no_x2t]core/OdfFile/Projects/Linux/OdfFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/BinDocument/BinDocument.pro",
"[!no_x2t]core/X2tConverter/build/Qt/X2tConverter.pro",
"[win,linux,mac]core/DesktopEditor/AllFontsGen/AllFontsGen.pro",
"[win,linux,mac]core/DesktopEditor/allthemesgen/allthemesgen.pro",
"[win,linux,mac]core/DesktopEditor/doctrenderer/app_builder/docbuilder.pro",
"[win,linux,mac]core/DesktopEditor/pluginsmanager/pluginsmanager.pro",
"[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro",
"spell",
"[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro",
"[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro"
],
"builder" : [
"core",
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro"
],
"server" : [
"core"
],
"multimedia" : [
"[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro"
],
"desktop" : [
"core",
"multimedia",
"core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro",
"[win,linux]desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro",
"[win,linux]desktop-apps/win-linux/ASCDocumentEditor.pro",
"[win]desktop-apps/win-linux/extras/projicons/ProjIcons.pro",
"[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro"
],
"mobile" : [
"core"
],
"osign" : [
"[win,linux,mac]core/DesktopEditor/xmlsec/src/osign/lib/osign.pro"
]
}

View File

@ -7,6 +7,9 @@ import os
import glob
import shutil
sys.stdin.reconfigure(encoding='utf-8')
sys.stdout.reconfigure(encoding='utf-8')
params = sys.argv[1:]
if (3 > len(params)):
@ -20,8 +23,14 @@ directory_input = params[0].replace("\\", "/")
directory_output = params[1].replace("\\", "/")
author_name = params[2]
if not os.path.exists(directory_output):
os.mkdir(directory_output)
input_files = []
count = 1
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
print(count, file)
count += 1
input_files.append(file.replace("\\", "/"))
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
@ -30,7 +39,7 @@ def change_author_name(file_dist, output_file, author_name):
app = "7za" if ("mac" == base.host_platform()) else "7z"
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
with open(temp_dir + "/docProps/core.xml", 'r') as file:
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
data = file.read()
creator_open = "<dc:creator>"
@ -67,7 +76,7 @@ def change_author_name(file_dist, output_file, author_name):
else:
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
with open(temp_dir + "/docProps/core.xml", 'w') as file:
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
file.write(data)
shutil.copyfile(file_dist, output_file)
@ -80,7 +89,12 @@ for input_file in input_files:
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
change_author_name(input_file, output_file, author_name)
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]).replace(' ', '_') + u"." + input_file.split(".")[-1]
try:
change_author_name(input_file, output_file, author_name)
except:
print("Error in converting document: ", input_file)
continue
base.delete_dir(temp_dir)
output_cur += 1

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../../scripts')
@ -51,19 +52,43 @@ if base.is_file(directory_fonts_local + "/AllFonts.js"):
directory_fonts = directory_fonts_local
# ---------------------------------------------------
json_params = "{'spreadsheetLayout':{'fitToWidth':1,'fitToHeight':1},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}}"
json_params = "{"
json_params += "'spreadsheetLayout':{"
# True for fit, False for 100%
isScaleSheetToPage = False
json_fit_text = "0"
if isScaleSheetToPage:
json_fit_text = "1"
json_params += "'fitToWidth':" + json_fit_text + ",'fitToHeight':" + json_fit_text + ","
if True:
json_params += "'orientation':'landscape',"
page_margins = "'pageMargins':{'bottom':10,'footer':5,'header':5,'left':5,'right':5,'top':10}"
page_setup = "'pageSetup':{'orientation':1,'width':210,'height':297,'paperUnits':0,'scale':100,'printArea':false,'horizontalDpi':600,'verticalDpi':600,'usePrinterDefaults':true,'fitToHeight':0,'fitToWidth':0}"
json_params += "'sheetsProps':{'0':{'headings':false,'printTitlesWidth':null,'printTitlesHeight':null," + page_margins + "," + page_setup + "}}},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}"
json_params += "}"
json_params = json_params.replace("'", "&quot;")
output_len = len(input_files)
output_cur = 1
for input_file in input_files:
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0])
output_file_tmp = os.path.join(output_dir, "temp")
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0].strip())
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
xml_convert += u"<TaskQueueDataConvert>"
xml_convert += (u"<m_sFileFrom>" + input_file + u"</m_sFileFrom>")
xml_convert += (u"<m_sFileTo>" + output_file + u".zip</m_sFileTo>")
xml_convert += (u"<m_sFileTo>" + output_file_tmp + u".zip</m_sFileTo>")
xml_convert += u"<m_nFormatTo>1029</m_nFormatTo>"
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
xml_convert += (u"<m_sFontDir>" + directory_fonts + u"</m_sFontDir>")
@ -83,8 +108,9 @@ for input_file in input_files:
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
base.extract_unicode(output_file + u".zip", output_file)
base.delete_file(output_dir + "/" + os.path.splitext(os.path.basename(input_file))[0] + ".zip")
base.extract_unicode(output_file_tmp + u".zip", output_file_tmp)
base.move_dir(str(output_file_tmp), str(output_file))
base.delete_file(output_file_tmp + u".zip")
output_cur += 1
base.delete_dir(temp_dir)

View File

@ -0,0 +1,38 @@
FROM arm32v7/ubuntu:16.04
# basic dependencies
RUN apt-get -y update && \
apt-get -y install wget xz-utils
# qt source
RUN mkdir /source && cd /source && \
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
# build dependencies
RUN apt-get -y install \
build-essential \
glib-2.0-dev \
libglu1-mesa-dev \
libgtk-3-dev \
libpulse-dev \
libasound2-dev \
libatspi2.0-dev \
libcups2-dev \
libdbus-1-dev \
libicu-dev \
libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev \
libx11-xcb-dev \
libxcb* \
libxi-dev \
libxrender-dev \
libxss-dev
# increase or decrease CORES value to change the number of parallel jobs while building qt
ENV CORES=4
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
make -j$CORES && \
make install

View File

@ -0,0 +1,38 @@
FROM arm64v8/ubuntu:16.04
# basic dependencies
RUN apt-get -y update && \
apt-get -y install wget xz-utils
# qt source
RUN mkdir /source && cd /source && \
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
# build dependencies
RUN apt-get -y install \
build-essential \
glib-2.0-dev \
libglu1-mesa-dev \
libgtk-3-dev \
libpulse-dev \
libasound2-dev \
libatspi2.0-dev \
libcups2-dev \
libdbus-1-dev \
libicu-dev \
libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev \
libx11-xcb-dev \
libxcb* \
libxi-dev \
libxrender-dev \
libxss-dev
# increase or decrease CORES value to change the number of parallel jobs while building qt
ENV CORES=4
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
make -j$CORES && \
make install

30
tools/linux/arm/build_qt.py Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env python
import sys
import os
import argparse
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/../../../scripts')
import base
def docker_build(image_name, dockerfile_dir, build_dir):
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
base.cmd("docker", ["run", "--rm", "-v", build_dir + ":/build", image_name])
base.cmd("docker", ["image", "rm", image_name])
return
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Build qt for linux arm architecture')
parser.add_argument('build_dir', help='the path to build directory (directory may not exist)')
parser.add_argument('-a', '--arch', action='store', help='target architecture (arm32 or arm64)', choices=['arm32', 'arm64'], required=True)
args = parser.parse_args()
build_dir = args.build_dir
if base.is_dir(build_dir):
base.delete_dir(build_dir)
base.create_dir(build_dir)
abs_build_path = os.path.abspath(build_dir)
arch = args.arch
docker_build('qt-' + arch, __dir__name__ + "/" + arch, abs_build_path)

View File

@ -54,17 +54,10 @@ def install_deps():
print("Installed Node.js version: " + str(nodejs_cur_version_major) + "." + str(nodejs_cur_version_minor))
except:
nodejs_cur = 1
if (nodejs_cur < 14000):
print("Node.js version cannot be less 14")
if (nodejs_cur < 16000):
print("Node.js version cannot be less 16")
print("Reinstall")
if (base.is_dir("./node_js_setup_14.x")):
base.delete_dir("./node_js_setup_14.x")
base.cmd("sudo", ["apt-get", "remove", "--purge", "-y", "nodejs"])
base.download("https://deb.nodesource.com/setup_14.x", "./node_js_setup_14.x")
base.cmd('curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -')
base.cmd("sudo", ["bash", "./node_js_setup_14.x"])
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
base.cmd("sudo", ["npm", "install", "-g", "npm@6"])
base.run_as_bat(["curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - &&sudo apt-get install -y nodejs"])
else:
print("OK")
base.cmd("sudo", ["apt-get", "-y", "install", "npm", "yarn"], True)

BIN
tools/linux/python3.tar.gz Normal file

Binary file not shown.

View File

@ -1 +1 @@
8.0.0
8.1.1