Compare commits

...

331 Commits

Author SHA1 Message Date
6736f84d54 Fixed jsdoc md generation 2024-08-02 21:26:44 +07:00
0983e67f21 [jsdoc] Fixed api docs generation 2024-08-02 20:31:11 +07:00
8e7db87554 Refactoring linux packages deploy (#853)
* Fix deploy desktop editors linux packages (#842)

* Small fix

* Fix make targets

* Small fix

* Refactoring linux packages deploy
2024-07-31 14:18:27 +03:00
9d000b2284 Merge pull request #852 from ONLYOFFICE/fix/generation-path
Added branch name to dist path for jddoc json generation
2024-07-29 04:07:03 -07:00
e29fd0ca09 Added branch name to dist path for jsdoc json generation 2024-07-29 18:06:17 +07:00
dcfde5b5e7 Refactoring 2024-07-29 13:57:17 +03:00
871750d6ae Merge pull request #851 from ONLYOFFICE/fix/jsdoc
Fixed comments
2024-07-29 01:40:16 -07:00
d6b5dc0830 Fixed comments 2024-07-29 15:35:26 +07:00
e99a3e8978 Merge pull request #850 from ONLYOFFICE/fix/jsdoc
Fixed path in jsdoc generation script
2024-07-29 00:17:11 -07:00
13db6d3155 Fixed path in jsdoc generation script 2024-07-29 14:15:46 +07:00
f8845d4fc5 Merge pull request #849 from ONLYOFFICE/fix/jsdoc
Fixed docs generation scripts
2024-07-28 23:28:44 -07:00
efcfb00239 Fixed docs generation scripts 2024-07-29 13:27:41 +07:00
1727313e54 Merge pull request #848 from ONLYOFFICE/fix/jsdoc
Fixed getting doclets for docs generation
2024-07-26 08:55:03 -07:00
f6d55d07c1 Fixed getting doclets for docs generation 2024-07-26 22:51:35 +07:00
331bbadaad Merge pull request #847 from ONLYOFFICE/fix/documentation
Build json docs fixes
2024-07-26 06:47:02 -07:00
f012c604b8 Build json docs fixes 2024-07-26 20:42:09 +07:00
a8f6b0c599 Merge pull request #846 from ONLYOFFICE/feature/documentation
Added documentation generation scripts
2024-07-26 06:24:06 -07:00
e46d73869c Added documentation generation scripts 2024-07-26 20:20:55 +07:00
6bf413a008 Merge branch hotfix/v8.1.1 into release/v8.2.0 2024-07-26 08:02:47 +00:00
963c3bf212 Merge pull request #801 from ONLYOFFICE/feature/split_functions
Split functions
2024-07-25 04:05:41 +01:00
f7071569d9 Merge remote-tracking branch 'remotes/origin/release/v8.2.0' into feature/split_functions
# Conflicts:
#	scripts/develop/run_server.py
2024-07-25 03:56:14 +01:00
4e5eadbf82 For bug 68924 2024-07-22 00:07:45 +03:00
113e2e7821 Fixed builder interface script generation 2024-07-19 15:00:50 +07:00
21c8c699dd [desktop] for bug 62528 2024-07-18 14:15:51 +03:00
db36b7dc40 [develop] Fix mysql check. "SHOW DATABASES" returns lowercase result 2024-07-17 14:56:25 +03:00
38522989d3 [develop] Add db-name config option 2024-07-16 19:26:19 +03:00
3af65bf276 Version up 2024-07-12 11:12:32 +03:00
0a51c3bdea Fix bug 46933 2024-07-11 19:32:44 +03:00
ba6c3a8f38 Fix bug 68571 2024-07-07 23:43:03 +03:00
66e196b5ec [develop] Remove confusion with working dir in readme 2024-07-03 17:37:59 +03:00
d4a49d7137 Update github actions (#832) 2024-07-02 16:57:51 +03:00
1cca8af54f Merge branch 'develop' of https://github.com/ONLYOFFICE/build_tools into feature/split_functions 2024-07-02 12:15:59 +03:00
7e925fd931 Merge pull request #830 from ONLYOFFICE/feature/libvlc-linux
Correct RPATHs for libvlc build
2024-06-29 11:32:38 -07:00
45448171d4 Correct rpaths for libvlc build 2024-06-28 20:25:21 +04:00
64ae3d9029 Merge branch release/v8.1.0 into develop 2024-06-26 10:39:48 +00:00
edccac17f6 Merge pull request #827 from ONLYOFFICE/fix/readme
Fix/readme
2024-06-24 12:46:54 +03:00
1d36cad17e [develop] Clarify cwd for docker run command 2024-06-19 11:54:16 +03:00
08e6d5ba53 [docs] Fix PostgreSQL database creation commands in readme 2024-06-19 11:26:02 +03:00
6505ee1b35 Merge branch release/v8.1.0 into master 2024-06-19 08:19:22 +00:00
709612090a Refactoring 2024-06-14 17:17:00 +03:00
1af5c373e4 Refactoring desktop packages build (#824) 2024-06-14 17:10:49 +03:00
8181d187dd Fix previous commit 2024-06-14 17:04:27 +03:00
4b448e3305 Add new options for spreadsheets convertation 2024-06-14 16:40:18 +03:00
fd579511ae Update hard-coded version to v8.1.0 2024-06-11 13:21:19 +00:00
e166237e5d Fix previous commit 2024-06-04 23:29:20 +03:00
b934429e41 Rename modules for standard libs correctly work 2024-06-04 23:27:09 +03:00
d61c1da666 Resolve conflicts when importing modules 2024-06-04 09:21:34 +03:00
8f633771d9 Merge pull request #821 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-06-03 17:17:02 +03:00
684f478c54 Fix mac builder upload path (#820) 2024-06-03 16:29:31 +03:00
cb0099d746 Refactoring 2024-05-27 13:08:05 +03:00
a72ead91dc [develop] Add no-cache option to readme 2024-05-23 01:25:48 +03:00
fd7c3c6cf3 Merge pull request #815 from ONLYOFFICE/release/v8.1.0
Merge release/v8.1.0 into develop
2024-05-22 12:03:32 +03:00
5ef8abacfa Update vcredist checksums (#813) 2024-05-22 11:52:04 +03:00
a01221ffc6 Merge pull request #811 from ONLYOFFICE/fix/docbuilder-python-deploy
Fix docbuilder python deploy on linux and mac
2024-05-21 18:12:17 +03:00
cbd4ab2e15 Fix for linux 2024-05-21 19:00:19 +04:00
e70152b85b Correct deploy rpath for linux 2024-05-21 18:30:58 +04:00
8a9c9a587e Fix rpath for mac 2024-05-21 17:38:31 +04:00
29c15d9acd Fix typo 2024-05-19 12:03:08 +03:00
bf6773f666 Add python wrapper for builder 2024-05-19 11:23:05 +03:00
bba0ff87da Add script for qt arm builds 2024-05-16 13:31:32 +03:00
c9de5278ea Fix build 2024-05-16 12:10:50 +03:00
6f5a791a1f Add hack for android debug builds 2024-05-06 15:18:54 +03:00
1e7a720e74 Disable unused modules 2024-05-03 12:31:04 +03:00
10a7080928 Merge pull request #806 from ONLYOFFICE/feature/build-qt-arm
Qt build for linux arm
2024-04-26 19:09:57 +03:00
7349c64253 Add arm32 build 2024-04-26 19:18:48 +04:00
88649507c7 Fix problem with cores 2024-04-26 14:08:27 +04:00
cc503473f9 Add dockerfile and python script 2024-04-25 22:07:46 +04:00
10fcec1dd8 [license_checker] Update web-apps config 2024-04-24 17:15:47 +03:00
0679c0f6d7 [license_checker] Allow different license templates 2024-04-24 14:23:51 +03:00
a1a69bdbab Add build-only-branding param 2024-04-15 14:41:05 +03:00
da02b358e2 [develop] Check private repo existence(server-lockstorage) 2024-04-10 01:47:31 +03:00
60dcea6ff4 Fix creation xcframeworks if destination exist 2024-04-09 23:34:37 +03:00
b5796d5e6c Add v1 plugins engine to local server 2024-04-02 21:57:00 +03:00
6338fd58c3 Split functions 2024-03-19 17:39:28 +03:00
39b6841557 Fix build 2024-03-18 12:05:26 +03:00
f3a20e8e59 Merge pull request #800 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-15 10:45:45 +03:00
830df65573 Add mobile module support 2024-03-15 10:44:58 +03:00
2aeb9e1315 Fix build on linux 2024-03-14 21:55:49 +03:00
696c48c251 Merge pull request #799 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-14 20:08:54 +03:00
dcf02e7e93 Fix mobile package 2024-03-14 20:03:40 +03:00
581091591b Fix typo 2024-03-14 19:39:49 +03:00
0e6f1a064d Change project type to json 2024-03-14 16:45:40 +03:00
70975098e2 Add web-apps js maps deploy (#796)
* Exclude js maps

* Add web-apps js maps deploy
2024-03-14 14:10:56 +03:00
5b27f9843f test commit 2024-03-14 09:38:37 +03:00
71e29a6599 Fix windows build 2024-03-14 09:38:17 +03:00
6fd43a4b18 Add support short names for ndk 2024-03-14 09:19:09 +03:00
11f207fbe2 Fix typo 2024-03-14 09:12:38 +03:00
6559d589dd Merge pull request #798 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-13 18:33:44 +03:00
b7e9acc242 Merge pull request #797 from ONLYOFFICE/fix/8.0.2
Fix/8.0.2
2024-03-13 18:33:13 +03:00
bfd1cd0555 Fix build with old ndk 2024-03-13 17:14:01 +03:00
590dffdb78 Revert ndk version 2024-03-13 11:24:49 +03:00
0205dd6853 Refactoring 2024-03-10 21:53:45 +03:00
cd03a42c1b Fix packages build (#793) 2024-03-05 11:16:34 +03:00
c1a8d181d2 Fix desktop package build (#792) 2024-03-01 19:04:24 +03:00
a17d5e04bb Remove unused dependency 2024-02-27 14:40:42 +03:00
e719ae24f0 Merge branch hotfix/v8.0.1 into master 2024-02-26 07:32:18 +00:00
b4922e6899 Merge pull request #788 from ONLYOFFICE/hotfix/v8.0.1
Hotfix/v8.0.1
2024-02-07 12:46:01 +03:00
d8c2505fb8 Fix xp build without path env 2024-02-07 12:44:24 +03:00
02426e413f Switch python2 version to version from bootstrap 2024-02-07 12:17:40 +03:00
bd05971ebb Patch python script on windows 2024-02-06 23:16:45 +03:00
4e12692325 Fix build 2024-02-06 20:02:22 +03:00
f7ea69acc9 Update VCRedist (#787) 2024-02-06 17:44:15 +03:00
3640cea64d Update hard-coded version to v8.0.1 2024-02-06 14:27:51 +00:00
f5ac8ac39d Merge branch release/v8.0.0 into develop 2024-02-05 08:38:40 +00:00
f801e77208 Merge branch release/v8.0.0 into master 2024-01-30 11:23:27 +00:00
2a8c5ea9eb Disable drawio by default 2024-01-26 14:17:16 +03:00
181a42e344 Fix xp plugins (desktop) 2024-01-22 12:14:58 +03:00
a0511ca3ac Fix build js for native 2024-01-20 21:24:02 +03:00
0b48f3a67f Refactoring build native scripts 2024-01-18 17:05:17 +03:00
15727e83cc [desktop] add noconnect.html to package 2023-12-26 19:17:58 +03:00
7d06432a76 Fix vcredist download (#777) 2023-12-20 18:46:46 +03:00
761c47e26d Add fonts to desktop package 2023-12-11 21:25:53 +03:00
edc6a38dfb Fix typo 2023-12-11 21:25:20 +03:00
2b79e127c4 Fix native build 2023-12-09 23:43:22 +03:00
449875d5b8 Fix msi icon paths (#775) 2023-12-08 15:18:30 +03:00
bbdb9e0107 Merge pull request #774 from ONLYOFFICE/release/v8.0.0
Release/v8.0.0
2023-12-07 22:28:19 +03:00
0a613734f7 Fix build 2023-12-07 22:15:12 +03:00
ff2aa0434a Fix android build 2023-12-07 14:51:19 +03:00
2fa22ca2b3 Fix build 2023-12-07 13:31:08 +03:00
25473c1b5c Merge branch 'release/v8.0.0' of https://github.com/ONLYOFFICE/build_tools into release/v8.0.0 2023-12-07 12:08:33 +03:00
7c087e20b7 Fix build 2023-12-07 12:08:17 +03:00
7250b59f19 Update hard-coded version to v8.0.0 2023-12-07 07:29:35 +00:00
e54e7ad6ec Merge pull request #773 from ONLYOFFICE/release/v7.6.0
Change fetching icu (github deprecated svn)
2023-12-06 18:52:21 +03:00
4a2fd9fb72 Fix aws s3 artifacts upload (#772) 2023-12-06 18:33:28 +03:00
afd5f2b3be Change fetching icu (github deprecated svn) 2023-12-06 12:55:07 +03:00
d468b93e9f Merge pull request #769 from ONLYOFFICE/fix/license-checker-readme
[license_checker] update Readme for allowListFile
2023-11-28 16:37:33 +03:00
188ad0057f Merge pull request #770 from ONLYOFFICE/release/v7.6.0
Release/v7.6.0
2023-11-28 10:58:33 +03:00
bde91e3dbf [license_checker] update Readme for allowListFile 2023-11-25 22:08:51 +03:00
3e9b233ecb [license] For new repo server-license-key 2023-11-21 00:08:52 +03:00
1f6a3010b5 Add method for auto-check modules 2023-11-17 15:08:47 +03:00
fa15db70c9 Merge branch release/v7.6.0 into develop (#765)
* Update hard-coded version to v7.6.0

* Fix vcredist [2] (#763)

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2023-11-16 17:32:35 +03:00
b74c359523 Fix vcredist [2] (#763) 2023-11-16 16:08:28 +03:00
6d7e67820a Update hard-coded version to v7.6.0 2023-11-15 14:52:29 +00:00
3d884963a7 Merge pull request #762 from ONLYOFFICE/release/v7.6.0
Merge branch release/v7.6.0 into develop
2023-11-15 17:08:32 +03:00
265cac6474 Fix vcredist download (#761) 2023-11-15 17:03:30 +03:00
96ff18b45c Merge branch hotfix/v7.5.1 into release/v7.6.0 2023-11-15 12:46:13 +00:00
8eb2d689fd Merge branch hotfix/v7.5.1 into develop 2023-11-15 12:46:10 +00:00
d2888db960 [desktop] for bug 65074 2023-11-14 23:07:15 +03:00
5d7de5a7ba [deploy] Build and deploy server without coping and grunt module 2023-11-14 18:42:21 +03:00
4d3e9c39b1 Fix msi build (#758) 2023-11-12 21:11:59 +03:00
a2639afd7a Merge branch hotfix/v7.5.1 into master 2023-10-31 14:39:47 +00:00
6d8f89deba Fix typo 2023-10-27 10:37:52 +03:00
b3a2493767 Update hard-coded version to v7.5.1 2023-10-26 07:59:57 +00:00
5d3cbbe194 Merge pull request #752 from ONLYOFFICE/fix/develop-instruction
[develop] Fix develop readme instruction
2023-10-25 23:05:04 +03:00
fd2e480e17 Fix build v8 for xp 2023-10-25 22:25:59 +03:00
342556b763 Fix windows build 2023-10-24 22:49:15 +03:00
1dd67ac7a9 [desktop] "connection error" page moved to "start" page 2023-10-24 18:17:53 +03:00
2311c55319 [develop] Fix develop readme instruction 2023-10-19 23:32:25 +03:00
eb80d0d6c1 [build] Revert nodejs version to 16 due to endless error messages when building on node18-linux-arm64(pkg-fetch@3.5 node@v18.15.0) 2023-10-19 12:43:47 +03:00
c629596198 Merge branch release/v7.5.0 into master 2023-10-19 08:18:08 +00:00
3fe86f753f Merge pull request #747 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-10-18 16:57:28 +03:00
031e5a74d7 Fix build with old python 2023-10-18 16:55:34 +03:00
b26baed61e Merge branch release/v7.5.0 into master 2023-10-17 12:04:35 +00:00
a311f41f0c Fix deploy plugin store in desktop 2023-10-17 00:03:32 +03:00
07c3fb05a8 Move speechrecognition to server only 2023-10-16 20:46:53 +03:00
0625ad2652 Fix del dir with long filenames 2023-10-15 06:35:01 +03:00
ed3e4082a1 Fix copy directory with long names 2023-10-15 00:17:29 +03:00
b187130c34 Fix typo 2023-10-14 21:54:09 +03:00
3c56477f3a Deploy marketplace plugin from base repo 2023-10-14 19:19:44 +03:00
8c15ed7887 Merge pull request #744 from ONLYOFFICE/feature/new-default-plugins
Add some plugins by default.
2023-10-13 11:25:43 +03:00
1f46c647f9 Add some plugins by default.
drawio, zotero, speech intput.
2023-10-13 11:11:57 +03:00
8791ddf547 [desktop] fix build 2023-10-11 11:32:28 +03:00
04679efe76 Merge pull request #743 from ONLYOFFICE/feature/connection-error-path 2023-10-09 23:18:50 +03:00
9197d31552 [desktop] added connection error page 2023-10-09 23:12:48 +03:00
47977e3b37 Update windows desktop build (#742) 2023-10-09 19:01:02 +03:00
e358689181 [develop] Fix markdown errors 2023-10-05 13:30:18 +03:00
4cda4793bc [develop] Fix Dockerfile. Allow branding command line params
# Conflicts:
#	develop/run_build_js.py
2023-10-05 13:30:18 +03:00
7b470fa1f8 Fix build icu for android on mac 2023-10-05 12:46:35 +03:00
33b14d8848 Fix vlc-cache-gen paths (#740) 2023-09-27 15:09:50 +03:00
01f6464a71 Fix typo 2023-09-27 11:55:26 +03:00
c0c0755505 Fix check config option 2023-09-27 11:50:25 +03:00
5bccf567fd Fix update DesktopEditors vlc plugin cache (#739) 2023-09-27 10:32:12 +03:00
670235480b Merge pull request #738 from ONLYOFFICE/feature/win-vlc-cache
Update windows package DesktopEditors vlc plugin cache
2023-09-26 16:26:23 +03:00
bfab104961 Small fix 2023-09-26 16:23:47 +03:00
658ce63a04 Update windows package DesktopEditors vlc plugin cache 2023-09-26 16:15:16 +03:00
208b602c97 Add vlc-cache-gen to deploy folder 2023-09-26 15:09:00 +03:00
49c65d9f64 Merge pull request #737 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-09-26 12:16:00 +03:00
afdd241116 [desktop] use fonts from core-fonts repo 2023-09-21 23:46:17 +03:00
241748308a [build] Bump nodejs version to 18; npm ci instead of npm i 2023-09-21 12:59:23 +03:00
fc0e0adbc7 Add catch error on file copying 2023-09-21 10:10:30 +03:00
ce648a2649 Add patch for xcode 15.0 2023-09-20 22:46:17 +03:00
92602510a5 Merge pull request #736 from ONLYOFFICE/feature/MetafileTester
Added console matafile tester
2023-09-20 12:45:51 +03:00
2429745cc0 Added console matafile tester 2023-09-20 12:19:09 +03:00
98f70179ef Fix dictionaries deploy 2023-09-17 15:01:38 +03:00
bd167f6258 Add dictiories path to doctrenderer.config 2023-09-16 18:42:06 +03:00
88423908f2 Fix build 2023-09-16 17:24:43 +03:00
23a4c4b0b5 Add support config addon for each platform 2023-09-16 16:36:58 +03:00
879b6b2810 Merge pull request #732 from ONLYOFFICE/fix/videoplayer
Updated libvlc builds
2023-09-13 10:39:31 +03:00
3a3652e753 Fix closure maps deploy path (#733) 2023-09-12 18:28:50 +03:00
e79079f4d4 [libvlc] Fixes for linux and win builds 2023-09-11 18:06:31 +04:00
3660eb62ec [libvlc] Small fix for mac builds 2023-09-11 17:59:15 +04:00
c6d41ba35d [libvlc] Remove dependency on qtmultimedia. 2023-09-04 16:48:26 +03:00
c9fb306823 Merge pull request #730 from ONLYOFFICE/feature/libvlc
Feature/libvlc
2023-08-31 21:23:01 +03:00
6d9a9032b2 Merge pull request #729 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-08-31 21:19:58 +03:00
fab8edef79 [test] Use os.walk 2023-08-31 15:22:36 +03:00
df60f1c273 [test] Add convert_directory_test.py to convert_directory with test exe(without doctrender) 2023-08-31 15:01:45 +03:00
fc0e3972a3 Merge branch 'hotfix/v7.4.2' into release/v7.5.0 2023-08-25 17:14:55 +03:00
3ff6c327f9 Add method for deploy dictionaries 2023-08-25 17:10:51 +03:00
c22b067e30 Fix remove repo method
(cherry picked from commit 4e760a2a38)
2023-08-25 15:20:50 +03:00
6d6eff662d Fix build
(cherry picked from commit 8a1a2b93c6)
2023-08-25 15:20:41 +03:00
48ecc3915e Increased minimum required version of Node.js 2023-08-22 19:35:32 +03:00
8c87ead486 Merge pull request #727 from ONLYOFFICE/fix/nodeV
Increased minimum required version of Node.js
2023-08-22 19:34:31 +03:00
66cffd6722 Increased minimum required version of Node.js 2023-08-22 15:38:48 +08:00
eadad135e2 Merge pull request #726 from ONLYOFFICE/feature/docker-instruction-2
Feature/docker instruction 2
2023-08-22 01:23:06 +03:00
9a44988707 [develop] Add note; remove branch from Dockerfile to prepare for master 2023-08-22 01:19:42 +03:00
d9b2f92e64 [develop] Fix markdown errors 2023-08-22 01:17:08 +03:00
a8cb907b71 [develop] Fix markdown errors 2023-08-22 01:17:07 +03:00
633c176e50 [develop] Add ALLOW_PRIVATE_IP_ADDRESS (to fix http://localhost/example/images/logo.png insertion) 2023-08-22 01:17:07 +03:00
91e8c60036 [develop] Fix typo 2023-08-22 01:17:06 +03:00
9c1398814d [develop] Add check__docker_dependencies call for external sdkjs and web-apps 2023-08-22 01:17:06 +03:00
672d1dc800 [develop] Fix missing platform 2023-08-22 01:17:06 +03:00
fca666825a [develop] Fix linux path 2023-08-22 01:17:05 +03:00
dac76abd74 [develop] Fix Dockerfile 2023-08-22 01:17:05 +03:00
e8ad53b990 [develop] Fix Dockerfile 2023-08-22 01:17:05 +03:00
002504fca3 [develop] Fix new supervisor config path 2023-08-22 01:17:04 +03:00
16c36d346e [develop] Edit readme for linux 2023-08-22 01:17:04 +03:00
18142fc257 [develop] Update Dockerfile and instruction 2023-08-22 01:17:04 +03:00
72d3244dcb [develop] Fix readme 2023-08-22 01:17:03 +03:00
34ee246673 [develop] Fix readme 2023-08-22 01:17:03 +03:00
3ea1cff8de [develop] Update readme 2023-08-22 01:17:03 +03:00
a9de3f6f0e [linter] Fix gitflow 2023-08-22 01:17:02 +03:00
a54bf745ae [linter] Fix gitflow 2023-08-22 01:16:03 +03:00
8ee547cad7 [linter] Set code_block_line_length linter rule to 300 2023-08-22 01:15:04 +03:00
325a68877b [develop] Fix readme(linter) 2023-08-22 01:14:03 +03:00
fd13759a79 [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
283ac31f9b [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
ea253634d2 [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
426c24ac52 [develop] Fix readme(linter) 2023-08-22 01:14:01 +03:00
15f7a39997 [develop] Modify readme 2023-08-22 01:14:01 +03:00
69107bb48c [develop] Edit readme to mount server volume; Set up debug logging and start test example in docker by default; 2023-08-22 01:13:52 +03:00
681e9deafd Remove gsttools in vlc package 2023-08-21 15:46:08 +03:00
62911b8490 Refactoring 2023-08-20 18:53:39 +03:00
4e760a2a38 Fix remove repo method 2023-08-20 13:50:27 +03:00
8a1a2b93c6 Fix build 2023-08-19 10:20:56 +03:00
102458d9c8 Fix typo 2023-08-18 15:59:16 +03:00
2a75912ca4 Fix typo 2023-08-18 15:24:48 +03:00
81c6410394 Change deploy with libvlc option 2023-08-18 14:58:53 +03:00
d8b759841e Merge branch 'release/v7.5.0' into develop 2023-08-16 18:51:03 +03:00
d66d9a03ec Revert partially "[develop] Use npm ci instead of npm i when building web-apps"
This reverts commit 5012e4e9bd.
because error with npm ci (-v 9) on package-lock.json(-v 6) file with local dependencies
https://github.com/npm/cli/issues/5125
https://github.com/npm/cli/issues/529
2023-08-16 18:49:19 +03:00
1894c5c971 Merge release/v7.5.0 into develop
Release/v7.5.0
2023-08-10 12:07:40 +03:00
5012e4e9bd [develop] Use npm ci instead of npm i when building web-apps 2023-08-08 19:24:19 +03:00
b9ccd9849a Refactoring packages deploy (#720)
* Refactoring packages deploy

* Small fix
2023-08-04 17:59:21 +03:00
898f961e2a some restyling in libvlc build script 2023-08-04 13:00:33 +04:00
88843a1f2d Fix windows package build (#719) 2023-08-03 17:40:20 +03:00
8ea37f2b03 MacOS package build refactoring (#718) 2023-08-03 11:00:15 +03:00
162b5dcb00 automatic plugins.dat generation on mac 2023-08-02 18:50:23 +04:00
2889258304 ignoring timestamps on cache loading
+ forming linux_64 build directory reworked
+ patching on mac enhanced
2023-08-02 16:53:58 +04:00
7770a41f08 Update hard-coded version to v7.5.0 2023-08-02 12:14:58 +00:00
0ba4a6a968 Merge branch hotfix/v7.4.1 into develop 2023-08-01 08:09:30 +00:00
c2d39b1357 Merge branch hotfix/v7.4.1 into master 2023-07-31 07:20:29 +00:00
415d47658b libvlc: fixed build for mac_arm64 2023-07-28 12:49:33 +04:00
a3e58605a5 Remove open from io 2023-07-27 17:17:48 +03:00
7936c3d097 libvlc: added build for mac 2023-07-27 18:12:25 +04:00
d4da415e4d Add method for replace path of text file with utf8 content (python2&3 support) 2023-07-26 22:30:02 +03:00
3197700bc0 Fix bug #62960 / Fix zip sign (#715) 2023-07-21 19:44:06 +05:00
285b99a5ac libvlc: build libvlc for linux in docker 2023-07-19 17:15:32 +04:00
fac40064ce libvlc: added build script for linux 2023-07-17 19:26:53 +04:00
4c4ef3ad64 Fix msi build (#714) 2023-07-17 18:37:22 +05:00
69edb29412 [develop] npm ci in server dir 2023-07-13 10:34:41 +03:00
a1deadc40c Fix bug in deploy osign library 2023-07-13 02:55:31 +05:00
495aa71860 Add module osign 2023-07-13 00:46:40 +03:00
2cf672ed17 Refactoring build mobile version 2023-07-10 12:09:52 +05:00
95770429ef Merge pull request #710 from ONLYOFFICE/feature/embedJS
Feature/embed js
2023-07-09 17:21:45 +03:00
724b42f938 libvlc: added build in docker for win32 and win64 2023-07-07 22:32:40 +04:00
010f1f7a77 Fix build scripts 2023-07-05 21:53:51 +03:00
1d37344d01 Merge pull request #707 from ONLYOFFICE/feature/vboxtester
build
2023-07-02 00:44:30 +03:00
a5e412ee85 Add vboxtexter to deploy 2023-07-02 00:43:46 +03:00
bdd1d765bf Add macos builder archive deploy (#706) 2023-06-30 21:13:22 +05:00
68de1c72e7 Fix check action (#704) 2023-06-29 15:48:56 +05:00
2062bd0b92 Fix package build (#703) 2023-06-29 15:48:44 +05:00
0f1dcb88d4 Merge pull request #701 from ONLYOFFICE/fix/nodejsV
Updated max permitted node.js version
2023-06-29 08:23:16 +03:00
12500bbd70 Changed node.js minor version 2023-06-29 10:31:06 +08:00
7bc15e05d6 Updated max ermitted node.js version 2023-06-28 21:50:24 +08:00
84a8032233 Add files to gitignore 2023-06-28 11:47:25 +03:00
67a4ab0dfe Merge pull request #700 from ONLYOFFICE/feature/tests
Add tests for editors
2023-06-28 11:41:20 +03:00
eff25a9245 Merge branch 'release/v7.4.0' into develop 2023-06-28 11:07:28 +03:00
5497cb527b Disable precompiled version on release build 2023-06-27 18:49:41 +03:00
e811ce765c Add tests for editors 2023-06-12 22:31:18 +03:00
f70431f7a5 build 2023-06-06 15:00:28 +03:00
095bbc9d19 Update vcredist checksums (#674) (#675) 2023-05-17 16:19:41 +05:00
99741d0805 Merge pull request #672 from ONLYOFFICE/merge-hotfix
Merge branch hotfix/v7.3.3 into develop
2023-05-15 12:38:52 +05:00
fcec89bf9c Merge branch 'hotfix/v7.3.3' into develop 2023-05-15 10:28:28 +03:00
8fbf7485dc Merge pull request #666 from ONLYOFFICE/release/v7.4.0
Fix build arm64 version
2023-05-02 02:57:21 +05:00
063ffc6ece Merge pull request #665 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-28 21:37:19 +05:00
fb3dda807e Merge pull request #662 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-27 01:55:46 +05:00
014b74bb1d Merge pull request #647 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-13 10:20:53 +03:00
fb9d1e69a4 Merge pull request #634 from ONLYOFFICE/feature/license-checker
Feature/license checker
2023-03-24 13:30:34 +03:00
d45cd9932b revert test 2023-03-10 21:23:37 +03:00
58b6a91f65 test action 2023-03-10 21:21:25 +03:00
942875d1a1 fix actions 2023-03-10 21:20:33 +03:00
6b740baf73 revert actions 2023-03-02 17:28:29 +03:00
1ada97c409 fix actions 2023-03-02 17:26:10 +03:00
89caa5f87c fix docs 2023-03-02 17:24:38 +03:00
1badc69477 fix doc 2023-03-02 17:10:01 +03:00
6769ade9a9 fix doc 2023-03-02 17:07:35 +03:00
0e783f0413 fix docs 2023-03-02 17:05:48 +03:00
577ab77f1d [build] fix docs 2023-03-02 17:01:43 +03:00
7ee44be072 fix docs 2023-03-02 16:57:36 +03:00
fa7bbaf98b fix doc 2023-03-02 16:53:21 +03:00
672fcfdb6d add test doc 2023-03-02 16:50:00 +03:00
fc01b4ad8a [build] changed check 2023-03-02 16:49:17 +03:00
ca7f0f5951 deleted test doc 2023-03-02 16:44:11 +03:00
f003ad3277 [build] changed paths 2023-03-02 16:43:28 +03:00
dc6f59943f test doc 2023-03-02 16:38:03 +03:00
ee51adb675 [build] revert base.py 2023-03-02 16:35:31 +03:00
5406c24771 [build] revert base.py 2023-03-02 16:34:33 +03:00
d9c768c2d0 [build] fix deleted semicolons 2023-03-02 16:19:50 +03:00
d876c4d100 [build] small fix 2023-03-02 15:32:37 +03:00
894aaa9fa9 Merge pull request #633 from ONLYOFFICE/develop
Develop
2023-03-02 15:30:24 +03:00
010f22ea3b Revert "[build] added checks for plugins"
This reverts commit 72cf0a5837.
2023-03-02 15:24:40 +03:00
0a560c9594 Revert "[develop] Add plugins deploy for developer version"
This reverts commit 03d371d9fc.
2023-03-02 15:23:57 +03:00
f0a3325ab8 [license] Update address in Copyright 2023-03-01 23:38:07 +03:00
ba0c7173c9 [license] Update config; Fix bug with bom, leading space; and minor changes 2023-02-28 15:20:59 +03:00
3ae37d764b [build] fix 2023-02-01 12:40:24 +03:00
6b15d7fca2 [build] add marketplace plugin at ignore 2023-02-01 12:26:19 +03:00
d8167ea9dd [build] fix plugins 2023-02-01 12:22:12 +03:00
1710df79f2 [build] fix prev commit 2023-02-01 07:24:49 +03:00
72cf0a5837 [build] added checks for plugins 2023-02-01 06:48:36 +03:00
03d371d9fc [develop] Add plugins deploy for developer version 2023-01-31 14:17:08 +03:00
06773a22c9 [build] license_checker small fix 2023-01-26 17:57:31 +03:00
6ddcbc7c18 fix 2023-01-25 13:45:16 +03:00
1cdc9142df license_checker readme added 2023-01-25 13:30:15 +03:00
3bc88c4bf3 fix license checker 2023-01-24 22:14:10 +03:00
c4b21c554f [build] updated license checker 2023-01-24 22:06:28 +03:00
f7bbe2d9f7 [build] license checker fix docs 2023-01-20 16:43:30 +03:00
92760b2835 [build] license checker updated config options
Added ignoreListDirName
2023-01-20 16:41:28 +03:00
379718dbf9 [build] fix license_checker
deleted unused ignore
2023-01-20 15:54:44 +03:00
787d690c41 [build] license checker added config for repos 2023-01-20 14:43:29 +03:00
32f124517a [build] small fix license checker 2023-01-19 18:09:28 +03:00
f501a6ebac [build] license checker is ready
(tested only for sdkjs)
2023-01-19 17:53:11 +03:00
2f632a0f8d init commit 2023-01-17 12:35:47 +03:00
91 changed files with 5166 additions and 1700 deletions

View File

@ -3,7 +3,7 @@ name: Bug Report
about: Report an issue with build_tools you've discovered.
---
**Describe your problem**:
# Describe your problem:
*Be clear in your description of the problem.
Open an issue with a descriptive title and a summary in complete sentences.*

View File

@ -1,18 +1,24 @@
name: Markdown check
name: Markdown Lint
on:
workflow_dispatch:
push:
branches:
- '**'
paths:
- '*.md'
- 'develop/*.md'
- 'scripts/**.md'
- '.markdownlint.jsonc'
jobs:
markdownlint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: DavidAnson/markdownlint-cli2-action@v9
- uses: actions/checkout@v4
- uses: DavidAnson/markdownlint-cli2-action@v16
with:
globs: '*.md,develop/*.md'
separator: ','
globs: |
*.md
develop/*.md
scripts/**.md

View File

@ -3,16 +3,13 @@ name: Update hard-coded version
on: workflow_dispatch
jobs:
update-version:
if: >-
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
contains(github.ref, 'refs/heads/release/v') }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
@ -25,9 +22,9 @@ jobs:
run: echo "${{ env.version }}" > version
- name: Commit & push changes
uses: EndBug/add-and-commit@v8
uses: EndBug/add-and-commit@v9
with:
author_name: github-actions[bot]
author_email: github-actions[bot]@users.noreply.github.com
message: Update hard-coded version to v${{ env.version }}
message: Update hard-coded version to ${{ env.version }}
add: version

7
.gitignore vendored
View File

@ -7,3 +7,10 @@ config
*.*~
**~
*.DS_Store
scripts/license_checker/reports
tests/puppeteer/node_modules
tests/puppeteer/work_directory
tests/puppeteer/package.json
tests/puppeteer/package-lock.json
scripts/sdkjs_common/jsdoc/node_modules
scripts/sdkjs_common/jsdoc/package-lock.json

5
.markdownlint.jsonc Normal file
View File

@ -0,0 +1,5 @@
{
"line-length": {
"code_block_line_length": 300
}
}

View File

@ -196,9 +196,8 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
**Note**: The created database must have **onlyoffice** both for user and password.
```bash
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
```
3. Configure the database:

130
build.pro
View File

@ -1,130 +0,0 @@
TEMPLATE = subdirs
ROOT_DIR=$$PWD/..
DEPLOY_DIR=$$PWD/deploy
CORE_ROOT_DIR=$$ROOT_DIR/core
include($$PWD/common.pri)
CONFIG += ordered
core_windows {
desktop:CONFIG += core_and_multimedia
}
core_linux {
desktop:CONFIG += core_and_multimedia
}
core_mac {
CONFIG += no_desktop_apps
}
core_ios {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
core_android {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
addSubProject(cfcpp, $$CORE_ROOT_DIR/Common/cfcpp/cfcpp.pro)
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
cryptopp)
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
unicodeconverter)
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
kernel unicodeconverter)
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
kernel unicodeconverter)
addSubProject(pdffile, $$CORE_ROOT_DIR/PdfFile/PdfFile.pro,\
kernel unicodeconverter graphics)
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
kernel unicodeconverter graphics pdffile)
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
kernel unicodeconverter graphics pdffile)
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
kernel unicodeconverter graphics network)
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
kernel unicodeconverter graphics)
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
kernel unicodeconverter graphics htmlfile2)
!no_x2t {
addSubProject(docxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro)
addSubProject(pptxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro)
addSubProject(xlsbformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro)
addSubProject(docformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro)
addSubProject(pptformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro)
addSubProject(xlsformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro)
addSubProject(vbaformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro)
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/TxtFile/Projects/Linux/TxtXmlFormatLib.pro)
addSubProject(rtfformat, $$CORE_ROOT_DIR/RtfFile/Projects/Linux/RtfFormatLib.pro)
addSubProject(odffile, $$CORE_ROOT_DIR/OdfFile/Projects/Linux/OdfFormatLib.pro)
addSubProject(bindocument, $$CORE_ROOT_DIR/OOXML/Projects/Linux/BinDocument/BinDocument.pro)
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
docxformat pptxformat xlsbformat docformat pptformat xlsformat vbaformat txtxmlformat rtfformat odffile cfcpp bindocument fb2file epubfile docxrenderer)
}
!no_use_common_binary {
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
kernel unicodeconverter graphics)
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
kernel unicodeconverter graphics)
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
kernel unicodeconverter graphics doctrenderer)
addSubProject(pluginsmanager, $$CORE_ROOT_DIR/DesktopEditor/pluginsmanager/pluginsmanager.pro,\
kernel)
}
!no_tests {
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
addSubProject(x2ttester, $$CORE_ROOT_DIR/Test/Applications/x2tTester/x2ttester.pro)
#TODO:
!linux_arm64:addSubProject(ooxml_crypt, $$CORE_ROOT_DIR/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro)
}
core_and_multimedia {
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
kernel unicodeconverter graphics)
}
desktop {
message(desktop)
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
kernel unicodeconverter graphics)
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdffile djvufile xpsfile)
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
documentscore)
!core_mac {
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
documentscore)
}
!no_desktop_apps {
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
documentscore videoplayer)
core_windows:!build_xp:addSubProject(updatedaemon, $$ROOT_DIR/desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro)
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
documentscore videoplayer)
}
}
mobile {
message(mobile)
!desktop {
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
}
}

View File

@ -21,6 +21,7 @@ parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp"
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")

View File

@ -1,3 +1,3 @@
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode"
sdkjs-plugin-server="speech, zotero, mendeley"
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, zotero"
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition, drawio"
sdkjs-addons="sdkjs-forms"

View File

@ -3,11 +3,17 @@ RUN apt-get update -y && \
apt-get install git -y \
python3 -y \
openjdk-11-jdk -y \
bzip2 -y \
npm -y && \
npm install -g grunt-cli -y && \
npm install -g grunt grunt-cli -y && \
ln -s /usr/bin/python3 /usr/bin/python && \
ln -s /usr/bin/pip3 /usr/bin/pip && \
git clone --depth 1 https://github.com/ONLYOFFICE/build_tools.git var/www/onlyoffice/documentserver/build_tools && \
sed -i '/documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}/d' /app/ds/run-document-server.sh && \
#Set Up Debug Logging
sed -i 's/WARN/ALL/g' /etc/onlyoffice/documentserver/log4js/production.json && \
#Start test example
if [ -s /etc/supervisor/conf.d/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /etc/supervisor/conf.d/ds-example.conf; fi && \
if [ -s /app/ds/setup/config/supervisor/ds/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /app/ds/setup/config/supervisor/ds/ds-example.conf; fi && \
rm -rf /var/lib/apt/lists/*
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver && /bin/sh -c /app/ds/run-document-server.sh
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver $@ && /bin/sh -c /app/ds/run-document-server.sh

View File

@ -1,50 +1,219 @@
# Docker
This directory containing instruction for developers,
who want to change something in sdkjs or web-apps module,
who want to change something in sdkjs or web-apps or server module,
but don't want to compile pretty compilcated core product to make those changes.
## Installing ONLYOFFICE Docs
## System requirements
## How to use - Linux or macOS
### Windows
**Note**: You need the latest Docker version installed.
You need the latest
[Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/)
installed.
You might need to pull **onlyoffice/documentserver** image:
**Note**: Docker Desktop does not start automatically after installation.
You should manually start the **Docker Desktop** application.
**Note**: If you have problems running Docker Desktop with the
"Use WSL 2 instead of Hyper-V" installation option,
try reinstalling it without this option.
### Linux or macOS
You need the latest
[Docker](https://docs.docker.com/engine/install/)
version installed.
## Create develop Docker Images
To create a image with the ability to include external non-minified sdkjs code,
use the following commands:
### Clone development environment to work dir
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
```
### Modify Docker Images
**Note**: Do not prefix docker command with sudo.
[This](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
instruction show how to use docker without sudo.
```bash
docker pull onlyoffice/documentserver
```
### Create develop image
To create a image with the ability to include external non-minified sdkjs code,
use the following command:
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
cd build_tools/develop
docker build -t documentserver-develop .
docker pull onlyoffice/documentserver
docker build --no-cache -t documentserver-develop .
```
**Note**: The dot at the end is required.
### Connecting external folders
**Note**: Sometimes script may fail due to network errors. Just restart it.
To connect external folders to the container,
you need to pass the "-v" parameter
along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow
## Clone development modules
Clone development modules to the work dir
* `sdkjs` repo is located [here](https://github.com/ONLYOFFICE/sdkjs/)
* `web-apps` repo is located [here](https://github.com/ONLYOFFICE/web-apps/)
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
```bash
docker run -i -t -d -p 80:80 --restart=always \
-v /host-dir/sdkjs:/var/www/onlyoffice/documentserver/sdkjs \
-v /host-dir/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
git clone https://github.com/ONLYOFFICE/sdkjs.git
git clone https://github.com/ONLYOFFICE/web-apps.git
git clone https://github.com/ONLYOFFICE/server.git
```
## Start server with external folders
To mount external folders to the container,
you need to pass the "-v" parameter
along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow.
The folders `server` is optional
**Note**: Run command with the current working directory
containing `sdkjs`, `web-apps`...
**Note**: ONLYOFFICE server uses port 80.
Look for another application using port 80 and stop it
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
and takes 20 minutes with `server`
### docker run on Windows (PowerShell)
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
run with `sdkjs` and `web-apps`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $pwd/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
### docker run on Linux or macOS
run with `sdkjs` and `web-apps`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $(pwd)/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
## Open editor
After the server starts successfully, you will see Docker log messages like this
```bash
[Date] [WARN] [localhost] [docId] [userId] nodeJS
```
To try the document editor, open a browser tab and type
[http://localhost/example](http://localhost/example) into the URL bar.
**Note**: Disable **ad blockers** for localhost page.
It may block some scripts (like Analytics.js)
## Modify sources
### To change something in `sdkjs` do the following steps
1)Edit source file. Let's insert an image url into each open document.
Following command inserts (in case of problems, you can replace URL)
`this.AddImageUrl(['http://localhost/example/images/logo.png']);`
after event
`this.sendEvent('asc_onDocumentContentReady');`
in file
`sdkjs/common/apiBase.js`
### change sdkjs on Windows (PowerShell)
```bash
(Get-Content sdkjs/common/apiBase.js) -replace "this\.sendEvent\('asc_onDocumentContentReady'\);", "this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);" | Set-Content sdkjs/common/apiBase.js
```
### change sdkjs on Linux or macOS
```bash
sed -i "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
```
2)Delete browser cache or hard reload the page `Ctrl + Shift + R`
3)Open new file in browser
### To change something in `server` do the following steps
1)Edit source file. Let's send `"Hello World!"`
chart message every time a document is opened.
Following command inserts
`yield* onMessage(ctx, conn, {"message": "Hello World!"});`
in function
`sendAuthInfo`
in file
`server/DocService/sources/DocsCoServer.js`
### change server on Windows (PowerShell)
```bash
(Get-Content server/DocService/sources/DocsCoServer.js) -replace 'opt_hasForgotten, opt_openedAt\) \{', 'opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});' | Set-Content server/DocService/sources/DocsCoServer.js
```
### change server on Linux or macOS
```bash
sed -i 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
```
2)Restart document server process
**Note**: Look for ``CONTAINER_ID`` in the result of ``docker ps``.
```bash
docker exec -it CONTAINER_ID supervisorctl restart all
```
3)Open new file in browser
## Start server with additional functionality(addons)
To get additional functionality and branding you need to connect a branding folder,
additional addon folders and pass command line arguments
For example run with `onlyoffice` branding and
addons:`sdkjs-forms`, `sdkjs-ooxml`, `web-apps-mobile`
### docker run on Windows (PowerShell) with branding
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true `
-v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps `
-v $pwd/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $pwd/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $pwd/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $pwd/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile `
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
```
### docker run on Linux or macOS with branding
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true \
-v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps \
-v $(pwd)/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $(pwd)/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $(pwd)/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $(pwd)/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile \
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
```

View File

@ -2,11 +2,56 @@
import sys
sys.path.append(sys.argv[1] + '/build_tools/scripts')
sys.path.append(sys.argv[1] + '/build_tools/scripts/develop')
import build_js
import run_server
import config
import base
base.cmd_in_dir(sys.argv[1] + '/build_tools/', 'python3', ['configure.py'])
config.parse()
git_dir = sys.argv[1];
build_js.build_js_develop(sys.argv[1])
base.print_info('argv :'+' '.join(sys.argv))
base.cmd_in_dir(git_dir + '/build_tools/', 'python3', ['configure.py', '--develop', '1'] + sys.argv[2:])
config.parse()
config.parse_defaults()
if base.is_exist(git_dir + "/server/FileConverter/bin/fonts.log"):
base.print_info('remove font cache to regenerate fonts in external sdkjs volume')
base.delete_file(git_dir + "/server/FileConverter/bin/fonts.log");
# external server volume
if base.is_exist(sys.argv[1] + '/server/DocService/package.json'):
base.print_info('replace supervisor cfg to run docservice and converter from source')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.print_info('run_server.run_docker_server')
run_server.run_docker_server();
else:
#Fix theme generation for external sdkjs volume
if base.is_exist(git_dir + "/server/FileConverter/bin/DoctRenderer.config"):
base.print_info('replace DoctRenderer.config for external sdkjs volume')
base.generate_doctrenderer_config(git_dir + "/server/FileConverter/bin/DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
addons = {}
addons.update(base.get_sdkjs_addons())
addons.update(base.get_web_apps_addons())
staticContent = ""
for addon in addons:
if (addon):
staticContent += '"/' + addon + '": {"path": "/var/www/onlyoffice/documentserver/' + addon + '","options": {"maxAge": "7d"}},'
if staticContent:
base.print_info('replace production-linux.json for addons'+staticContent)
base.replaceInFileRE("/etc/onlyoffice/documentserver/production-linux.json", '"static_content": {.*', '"static_content": {' + staticContent)
base.print_info('replace supervisor cfg to run docservice and converter from pkg')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.print_info('run_server.run_docker_sdk_web_apps: ' + git_dir)
run_server.run_docker_sdk_web_apps(git_dir)

34
make.py
View File

@ -1,19 +1,32 @@
#!/usr/bin/env python
import os
import sys
sys.path.append('scripts')
sys.path.append('scripts/develop')
sys.path.append('scripts/develop/vendor')
sys.path.append('scripts/core_common')
sys.path.append('scripts/core_common/modules')
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/scripts')
sys.path.append(__dir__name__ + '/scripts/develop')
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
sys.path.append(__dir__name__ + '/scripts/core_common')
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
import config
import base
import build
import build_sln
import build_js
import build_server
import deploy
import make_common
import develop
import argparse
base.check_python()
parser = argparse.ArgumentParser(description="options")
parser.add_argument("--build-only-branding", action="store_true")
args = parser.parse_args()
if (args.build_only_branding):
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
# parse configuration
config.parse()
@ -59,13 +72,15 @@ if ("1" == config.option("update")):
base.configure_common_apps()
# developing...
develop.make();
develop.make()
# check only js builds
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
build_js.make()
exit(0)
#base.check_tools()
# core 3rdParty
make_common.make()
@ -78,11 +93,8 @@ if config.check_option("module", "desktop"):
base.set_env("DESKTOP_URL_UPDATES_MAIN_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
if ("windows" == base.host_platform()):
base.set_env("VIDEO_PLAYER_VLC_DIR", base_dir + "/../desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc")
# build
build.make()
build_sln.make()
# js
build_js.make()

View File

@ -22,7 +22,6 @@ parser.add_argument("-B", "--build", dest="build", type=str,
args = parser.parse_args()
# vars
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
common.os_family = utils.host_platform()
common.platform = args.platform
common.prefix = common.platformPrefixes[common.platform] if common.platform in common.platformPrefixes else ""
@ -30,14 +29,14 @@ common.targets = args.targets
common.clean = "clean" in args.targets
common.sign = "sign" in args.targets
common.deploy = "deploy" in args.targets
common.version = args.version if (args.version is not None) else utils.get_env("PRODUCT_VERSION", "1.0.0")
common.build = args.build if (args.build is not None) else utils.get_env("BUILD_NUMBER", "1")
common.channel = utils.get_env("BUILD_CHANNEL", "other")
common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0")
common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0")
common.branding = args.branding
common.timestamp = utils.get_timestamp()
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
common.branding_dir = utils.get_abspath(common.workspace_dir + "/" + args.branding) if args.branding else common.workspace_dir
common.deploy_data = utils.get_path(common.workspace_dir + "/deploy.txt")
common.summary = []
common.deploy_data = []
utils.log("workspace_dir: " + common.workspace_dir)
utils.log("os_family: " + common.os_family)
utils.log("platform: " + str(common.platform))
utils.log("prefix: " + str(common.prefix))
@ -49,6 +48,8 @@ utils.log("version: " + common.version)
utils.log("build: " + common.build)
utils.log("branding: " + str(common.branding))
utils.log("timestamp: " + common.timestamp)
utils.log("workspace_dir: " + common.workspace_dir)
utils.log("branding_dir: " + common.branding_dir)
# branding
if common.branding is not None:
@ -63,22 +64,24 @@ import package_mobile
# build
utils.set_cwd(common.workspace_dir, verbose=True)
utils.delete_file("deploy.json")
utils.delete_file(common.deploy_data)
if "core" in common.targets:
package_core.make()
if "closure-maps-os" in common.targets:
package_core.deploy_closure_maps("opensource")
if "closure-maps-com" in common.targets:
package_core.deploy_closure_maps("commercial")
if "closuremaps_opensource" in common.targets:
package_core.deploy_closuremaps_sdkjs("opensource")
package_core.deploy_closuremaps_webapps("opensource")
if "closuremaps_commercial" in common.targets:
package_core.deploy_closuremaps_sdkjs("commercial")
package_core.deploy_closuremaps_webapps("commercial")
if "desktop" in common.targets:
package_desktop.make()
if "builder" in common.targets:
package_builder.make()
if "server-community" in common.targets:
if "server_community" in common.targets:
package_server.make("community")
if "server-enterprise" in common.targets:
if "server_enterprise" in common.targets:
package_server.make("enterprise")
if "server-developer" in common.targets:
if "server_developer" in common.targets:
package_server.make("developer")
if "mobile" in common.targets:
package_mobile.make()

View File

@ -14,6 +14,8 @@ import re
import stat
import json
__file__script__path__ = os.path.dirname( os.path.realpath(__file__))
# common functions --------------------------------------
def get_script_dir(file=""):
test_file = file
@ -188,28 +190,56 @@ def copy_dir(src, dst):
if is_dir(dst):
delete_dir(dst)
try:
shutil.copytree(get_path(src), get_path(dst))
except OSError as e:
print('Directory not copied. Error: %s' % e)
shutil.copytree(get_path(src), get_path(dst))
except:
if ("windows" == host_platform()) and copy_dir_windows(src, dst):
return
print("Directory not copied")
return
def copy_dir_windows(src, dst):
if is_dir(dst):
delete_dir(dst)
err = cmd("robocopy", [get_path(src), get_path(dst), "/e", "/NFL", "/NDL", "/NJH", "/NJS", "/nc", "/ns", "/np"], True)
if (1 == err):
return True
return False
def delete_dir_with_access_error(path):
def delete_file_on_error(func, path, exc_info):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
if ("windows" != host_platform()):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
return
elif (0 != path.find("\\\\?\\")):
# abspath not work with long names
full_path = path
drive_pos = full_path.find(":")
if (drive_pos < 0) or (drive_pos > 2):
full_path = os.getcwd() + "\\" + full_path
else:
full_path = full_path
if (len(full_path) >= 260):
full_path = "\\\\?\\" + full_path
if not os.access(full_path, os.W_OK):
os.chmod(full_path, stat.S_IWUSR)
func(full_path)
return
if not is_dir(path):
print("delete warning [folder not exist]: " + path)
return
shutil.rmtree(get_path(path), ignore_errors=False, onerror=delete_file_on_error)
shutil.rmtree(os.path.normpath(get_path(path)), ignore_errors=False, onerror=delete_file_on_error)
return
def delete_dir(path):
if not is_dir(path):
print("delete warning [folder not exist]: " + path)
return
shutil.rmtree(get_path(path), ignore_errors=True)
if ("windows" == host_platform()):
delete_dir_with_access_error(path)
else:
shutil.rmtree(get_path(path), ignore_errors=True)
return
def copy_lib(src, dst, name):
@ -220,6 +250,9 @@ def copy_lib(src, dst, name):
create_dir(dst + "/simulator")
copy_dir(src + "/simulator/" + name + ".framework", dst + "/simulator/" + name + ".framework")
if is_dir(dst + "/" + name + ".xcframework"):
delete_dir(dst + "/" + name + ".xcframework")
cmd("xcodebuild", ["-create-xcframework",
"-framework", dst + "/" + name + ".framework",
"-framework", dst + "/simulator/" + name + ".framework",
@ -262,44 +295,66 @@ def copy_exe(src, dst, name):
copy_file(src + "/" + name + exe_ext, dst + "/" + name + exe_ext)
return
def readFileCommon(path):
file_data = ""
try:
with open(get_path(path), "r") as file:
file_data = file.read()
except Exception as e:
with open(get_path(path), "r", encoding="utf-8") as file:
file_data = file.read()
return file_data
def writeFileCommon(path, data):
file_data = ""
try:
with open(get_path(path), "w") as file:
file.write(data)
except Exception as e:
with open(get_path(path), "w", encoding="utf-8") as file:
file.write(data)
return
def replaceInFile(path, text, textReplace):
if not is_file(path):
print("[replaceInFile] file not exist: " + path)
return
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
filedata = readFileCommon(path)
filedata = filedata.replace(text, textReplace)
delete_file(path)
with open(get_path(path), "w") as file:
file.write(filedata)
writeFileCommon(path, filedata)
return
def replaceInFileUtf8(path, text, textReplace):
if not is_file(path):
print("[replaceInFile] file not exist: " + path)
return
filedata = ""
with open(get_path(path), "rb") as file:
filedata = file.read().decode("UTF-8")
filedata = filedata.replace(text, textReplace)
delete_file(path)
with open(get_path(path), "wb") as file:
file.write(filedata.encode("UTF-8"))
return
def replaceInFileRE(path, pattern, textReplace):
if not is_file(path):
print("[replaceInFile] file not exist: " + path)
return
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
filedata = readFileCommon(path)
filedata = re.sub(pattern, textReplace, filedata)
delete_file(path)
with open(get_path(path), "w") as file:
file.write(filedata)
writeFileCommon(path, filedata)
return
def readFile(path):
if not is_file(path):
return ""
filedata = ""
with open(get_path(path), "r") as file:
filedata = file.read()
return filedata
return readFileCommon(path)
def writeFile(path, data):
if is_file(path):
delete_file(path)
with open(get_path(path), "w") as file:
file.write(data)
writeFileCommon(path, data)
return
# system cmd methods ------------------------------------
@ -329,7 +384,7 @@ def cmd2(prog, args=[], is_no_errors=False):
sys.exit("Error (" + prog + "): " + str(ret))
return ret
def cmd_exe(prog, args):
def cmd_exe(prog, args, is_no_errors=False):
prog_dir = os.path.dirname(prog)
env_dir = os.environ
if ("linux" == host_platform()):
@ -351,7 +406,7 @@ def cmd_exe(prog, args):
command += (" \"" + arg + "\"")
process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir)
ret = process.wait()
if ret != 0:
if ret != 0 and True != is_no_errors:
sys.exit("Error (" + prog + "): " + str(ret))
return ret
@ -592,6 +647,20 @@ def get_gcc_version():
def qt_setup(platform):
compiler = config.check_compiler(platform)
qt_dir = config.option("qt-dir") if (-1 == platform.find("_xp")) else config.option("qt-dir-xp")
# qt bug
if (host_platform() == "mac"):
for compiler_folder in glob.glob(qt_dir + "/*"):
if is_dir(compiler_folder):
old_path_file = compiler_folder + "/mkspecs/features/toolchain.prf"
new_path_file = compiler_folder + "/mkspecs/features/toolchain.prf.bak"
if (is_file(old_path_file) and not is_file(new_path_file)):
try:
copy_file(old_path_file, new_path_file)
copy_file(get_script_dir() + "/../tools/mac/toolchain.prf", old_path_file)
except IOError as e:
print("Unable to copy file: " + old_path_file)
compiler_platform = compiler["compiler"] if platform_is_32(platform) else compiler["compiler_64"]
qt_dir = qt_dir + "/" + compiler_platform
@ -616,6 +685,53 @@ def qt_version():
qt_dir = qt_dir.split("/")[-3]
return "".join(i for i in qt_dir if (i.isdigit() or i == "."))
def check_congig_option_with_platfom(platform, option_name):
if config.check_option("config", option_name):
return True
if (0 == platform.find("win")) and config.check_option("config_addon_windows", option_name):
return True
elif (0 == platform.find("linux")) and config.check_option("config_addon_linux", option_name):
return True
elif (0 == platform.find("mac")) and config.check_option("config_addon_macos", option_name):
return True
elif (0 == platform.find("ios")) and config.check_option("config_addon_ios", option_name):
return True
elif (0 == platform.find("android")) and config.check_option("config_addon_android", option_name):
return True
return False
def correct_makefile_after_qmake(platform, file):
if (0 == platform.find("android")):
if ("android_arm64_v8a" == platform):
replaceInFile(file, "_arm64-v8a.a", ".a")
replaceInFile(file, "_arm64-v8a.so", ".so")
if ("android_armv7" == platform):
replaceInFile(file, "_armeabi-v7a.a", ".a")
replaceInFile(file, "_armeabi-v7a.so", ".so")
if ("android_x86_64" == platform):
replaceInFile(file, "_x86_64.a", ".a")
replaceInFile(file, "_x86_64.so", ".so")
if ("android_x86" == platform):
replaceInFile(file, "_x86.a", ".a")
replaceInFile(file, "_x86.so", ".so")
return
def qt_config_platform_addon(platform):
config_addon = ""
if (0 == platform.find("win")):
config_addon += (" " + config.option("config_addon_windows"))
elif (0 == platform.find("linux")):
config_addon += (" " + config.option("config_addon_linux"))
elif (0 == platform.find("mac")):
config_addon += (" " + config.option("config_addon_macos"))
elif (0 == platform.find("ios")):
config_addon += (" " + config.option("config_addon_ios"))
elif (0 == platform.find("android")):
config_addon += (" " + config.option("config_addon_android"))
if (config_addon == " "):
config_addon = ""
return config_addon
def qt_config(platform):
config_param = config.option("module") + " " + config.option("config") + " " + config.option("features")
config_param_lower = config_param.lower()
@ -633,20 +749,42 @@ def qt_config(platform):
if config.check_option("module", "mobile"):
config_param += " support_web_socket"
is_disable_pch = False
if ("ios" == platform):
config_param += " disable_precompiled_header"
is_disable_pch = True
if (0 == platform.find("android")):
is_disable_pch = True
if not config.check_option("config", "debug"):
is_disable_pch = True
if is_disable_pch:
config_param += " disable_precompiled_header"
if ("linux_arm64" == platform):
config_param += " linux_arm64"
config_param += qt_config_platform_addon(platform)
return config_param
def qt_major_version():
qt_dir = qt_version()
return qt_dir.split(".")[0]
def qt_version_decimal():
qt_dir = qt_version()
return 10 * int(qt_dir.split(".")[0]) + int(qt_dir.split(".")[1])
def qt_config_as_param(value):
qt_version = qt_version_decimal()
ret_params = []
if (66 > qt_version):
ret_params.append("CONFIG+=" + value)
else:
params = value.split()
for name in params:
ret_params.append("CONFIG+=" + name)
return ret_params
def qt_copy_lib(lib, dir):
qt_dir = get_env("QT_DEPLOY")
if ("windows" == host_platform()):
@ -737,7 +875,7 @@ def app_make():
return "make"
# doctrenderer.config
def generate_doctrenderer_config(path, root, product, vendor = ""):
def generate_doctrenderer_config(path, root, product, vendor = "", dictionaries = ""):
content = "<Settings>\n"
content += ("<file>" + root + "sdkjs/common/Native/native.js</file>\n")
@ -756,6 +894,9 @@ def generate_doctrenderer_config(path, root, product, vendor = ""):
content += ("<file>" + vendor_dir + "xregexp/xregexp-all-min.js</file>\n")
content += ("<sdkjs>" + root + "sdkjs</sdkjs>\n")
if ("" != dictionaries):
content += ("<dictionaries>" + dictionaries + "</dictionaries>\n")
if (False): # old html file
content += ("<htmlfile>" + vendor_dir + "jquery/jquery.min.js</htmlfile>\n")
if ("desktop" == product):
@ -905,15 +1046,15 @@ def web_apps_addons_param():
def download(url, dst):
return cmd_exe("curl", ["-L", "-o", dst, url])
def extract(src, dst):
def extract(src, dst, is_no_errors=False):
app = "7za" if ("mac" == host_platform()) else "7z"
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors)
def extract_unicode(src, dst):
def extract_unicode(src, dst, is_no_errors=False):
if "windows" == host_platform():
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
return
return extract(src, dst)
return extract(src, dst, is_no_errors)
def archive_folder(src, dst):
app = "7za" if ("mac" == host_platform()) else "7z"
@ -1070,6 +1211,10 @@ def mac_correct_rpath_x2t(dir):
cmd("chmod", ["-v", "+x", "./pluginsmanager"])
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./pluginsmanager"], True)
mac_correct_rpath_binary("./pluginsmanager", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network"])
if is_file("./vboxtester"):
cmd("chmod", ["-v", "+x", "./vboxtester"])
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./vboxtester"], True)
mac_correct_rpath_binary("./vboxtester", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network"])
os.chdir(cur_dir)
return
@ -1079,6 +1224,8 @@ def mac_correct_rpath_docbuilder(dir):
cmd("chmod", ["-v", "+x", "./docbuilder"])
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True)
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
mac_correct_rpath_library("docbuilder.c", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "doctrenderer"])
cmd("install_name_tool", ["-add_rpath", "@loader_path", "libdocbuilder.c.dylib"], True)
os.chdir(cur_dir)
return
@ -1114,6 +1261,19 @@ def mac_correct_rpath_desktop(dir):
os.chdir(cur_dir)
return
def linux_set_origin_rpath_libraries(dir, libs):
tools_dir = get_script_dir() + "/../tools/linux/elf/"
cur_dir = os.getcwd()
os.chdir(dir)
for lib in libs:
cmd(tools_dir + "patchelf", ["--set-rpath", "\\$ORIGIN", "lib" + lib], True)
os.chdir(cur_dir)
return
def linux_correct_rpath_docbuilder(dir):
linux_set_origin_rpath_libraries(dir, ["docbuilder.c.so", "icuuc.so.58", "doctrenderer.so", "graphics.so", "kernel.so", "kernel_network.so", "UnicodeConverter.so"])
return
def common_check_version(name, good_version, clean_func):
version_good = name + "_version_" + good_version
version_path = "./" + name + ".data"
@ -1163,9 +1323,38 @@ def copy_sdkjs_plugin(src_dir, dst_dir, name, is_name_as_guid=False, is_desktop_
delete_dir(dst_deploy_dir)
return
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
plugins_dir = get_script_dir() + "/../../onlyoffice.github.io/sdkjs-plugins/content"
def copy_marketplace_plugin(dst_dir, is_name_as_guid=False, is_desktop_local=False, is_store_copy=False):
git_dir = __file__script__path__ + "/../.."
if False:
# old version
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", dst_dir, "manager", is_name_as_guid, is_desktop_local)
return
src_dir_path = git_dir + "/onlyoffice.github.io/store/plugin"
name = "marketplace"
if is_name_as_guid:
name = "{AA2EA9B6-9EC2-415F-9762-634EE8D9A95E}"
dst_dir_path = dst_dir + "/" + name
if is_dir(dst_dir_path):
delete_dir(dst_dir_path)
create_dir(dst_dir_path)
copy_dir_content(src_dir_path, dst_dir_path)
if is_desktop_local:
for file in glob.glob(dst_dir_path + "/*.html"):
replaceInFile(file, "https://onlyoffice.github.io/sdkjs-plugins/", "../")
if is_store_copy:
copy_dir(git_dir + "/onlyoffice.github.io/store", dst_dir_path + "/store")
delete_dir(dst_dir_path + "/store/plugin")
delete_dir(dst_dir_path + "/store/plugin-dev")
return
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False, isXp=False):
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
plugins_list_config = config.option("sdkjs-plugin")
if isXp:
plugins_list_config="photoeditor, macros, highlightcode, doc2md"
if ("" == plugins_list_config):
return
plugins_list = plugins_list_config.rsplit(", ")
@ -1174,7 +1363,7 @@ def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
return
def copy_sdkjs_plugins_server(dst_dir, is_name_as_guid=False, is_desktop_local=False):
plugins_dir = get_script_dir() + "/../../onlyoffice.github.io/sdkjs-plugins/content"
plugins_dir = __file__script__path__ + "/../../onlyoffice.github.io/sdkjs-plugins/content"
plugins_list_config = config.option("sdkjs-plugin-server")
if ("" == plugins_list_config):
return
@ -1328,7 +1517,7 @@ def copy_v8_files(core_dir, deploy_dir, platform, is_xp=False):
copy_files(directory_v8 + platform + "/icudt*.dat", deploy_dir + "/")
return
def clone_marketplace_plugin(out_dir, is_name_as_guid=False, is_replace_paths=False, is_delete_git_dir=True, git_owner=""):
def clone_marketplace_plugin(out_dir, is_name_as_guid=False, is_replace_paths=False, is_delete_git_dir=True, git_owner=""):
old_cur = os.getcwd()
os.chdir(out_dir)
git_update("onlyoffice.github.io", False, True, git_owner)
@ -1460,3 +1649,108 @@ def correct_elf_rpath_directory(directory, origin, is_recursion = True):
correct_elf_rpath_directory(file, origin)
return
def is_need_build_js():
if "osign" == config.option("module"):
return False
return True
def copy_dictionaries(src, dst, is_hyphen = True, is_spell = True):
if (False == is_hyphen) and (False == is_spell):
return
if not is_dir(dst):
create_dir(dst)
src_folder = src
if ("/" != src[-1:]):
src_folder += "/"
src_folder += "*"
for file in glob.glob(src_folder):
if is_file(file):
copy_file(file, dst)
continue
basename = os.path.basename(file)
if (".git" == basename):
continue
if (True == is_hyphen) and (True == is_spell):
copy_dir(file, dst + "/" + basename)
continue
is_spell_present = is_file(file + "/" + basename + ".dic")
is_hyphen_present = is_file(file + "/hyph_" + basename + ".dic")
is_dir_need = False
if (is_hyphen and is_hyphen_present) or (is_spell and is_spell_present):
is_dir_need = True
if not is_dir_need:
continue
lang_folder = dst + "/" + basename
create_dir(lang_folder)
if is_hyphen and is_hyphen_present:
copy_dir_content(file, lang_folder, "hyph_", "")
if is_spell and is_spell_present:
copy_dir_content(file, lang_folder, "", "hyph_")
if is_file(dst + "/en_US/en_US_thes.dat"):
delete_file(dst + "/en_US/en_US_thes.dat")
delete_file(dst + "/en_US/en_US_thes.idx")
if is_file(dst + "/ru_RU/ru_RU_oo3.dic"):
delete_file(dst + "/ru_RU/ru_RU_oo3.dic")
delete_file(dst + "/ru_RU/ru_RU_oo3.aff")
if is_file(dst + "/uk_UA/th_uk_UA.dat"):
delete_file(dst + "/uk_UA/th_uk_UA.dat")
delete_file(dst + "/uk_UA/th_uk_UA.idx")
return
def check_module_version(actual_version, clear_func):
module_file = "./module.version"
current_module_version = readFile(module_file)
if (actual_version == current_module_version):
return
if is_file(module_file):
delete_file(module_file)
writeFile(module_file, actual_version)
clear_func()
return
def check_python():
if ("linux" != host_platform()):
return
directory = __file__script__path__ + "/../tools/linux"
directory_bin = __file__script__path__ + "/../tools/linux/python3/bin"
if not is_dir(directory + "/python3"):
cmd("tar", ["xfz", directory + "/python3.tar.gz", "-C", directory])
cmd("ln", ["-s", directory_bin + "/python3", directory_bin + "/python"])
directory_bin = directory_bin.replace(" ", "\\ ")
os.environ["PATH"] = directory_bin + os.pathsep + os.environ["PATH"]
return
def check_tools():
if ("linux" == host_platform()):
directory = __file__script__path__ + "/../tools/linux"
if not is_os_arm() and config.check_option("platform", "linux_arm64"):
if not is_dir(directory + "/qt"):
create_dir(directory + "/qt")
cmd("python", [directory + "/arm/build_qt.py", "--arch", "arm64", directory + "/qt/arm64"])
return
def apply_patch(file, patch):
patch_content = readFile(patch)
index1 = patch_content.find("<<<<<<<")
index2 = patch_content.find("=======")
index3 = patch_content.find(">>>>>>>")
file_content_old = patch_content[index1 + 7:index2].strip()
file_content_new = patch_content[index2 + 7:index3].strip()
#file_content_new = "\n#if 0" + file_content_old + "#else" + file_content_new + "#endif\n"
replaceInFile(file, file_content_old, file_content_new)
return

View File

@ -1,118 +0,0 @@
#!/usr/bin/env python
import config
import base
import os
import multiprocessing
def make_pro_file(makefiles_dir, pro_file, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
old_env = dict(os.environ)
# if you need change output libraries path - set the env variable
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
isAndroid = False if (-1 == platform.find("android")) else True
if isAndroid:
toolchain_platform = "linux-x86_64"
if ("mac" == base.host_platform()):
toolchain_platform = "darwin-x86_64"
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
old_path = base.get_env("PATH")
new_path = base.qt_setup(platform) + "/bin:"
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
new_path += old_path
base.set_env("PATH", new_path)
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
# makefile suffix
file_suff = platform
if (config.check_option("config", "debug")):
file_suff += "_debug_"
file_suff += config.option("branding")
# setup qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# qmake CONFIG+=...
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
print("THIS PLATFORM IS NOT SUPPORTED")
continue
# non windows platform
if not base.is_windows():
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if not base.is_file(pro_file):
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("0" != config.option("multiprocess")):
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
else:
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
else:
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
base.run_as_bat(qmake_bat)
os.environ.clear()
os.environ.update(old_env)
base.delete_file(".qmake.stash")
# make build.pro
def make():
make_pro_file("makefiles", "build.pro")
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
make_pro_file("makefiles", "build.pro", "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
return

View File

@ -27,6 +27,8 @@ def correct_sdkjs_licence(directory):
def make():
if ("1" == base.get_env("OO_NO_BUILD_JS")):
return
if not base.is_need_build_js():
return
base.set_env('NODE_ENV', 'production')
@ -53,8 +55,6 @@ def make():
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
if not base.is_file(out_dir + "/desktop/sdkjs/common/AllFonts.js"):
base.copy_file(base_dir + "/../sdkjs/common/HtmlFileInternal/AllFonts.js", out_dir + "/desktop/sdkjs/common/AllFonts.js")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/mobile")
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/embed")
@ -65,7 +65,8 @@ def make():
build_interface(base_dir + "/../desktop-apps/common/loginpage/build")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/index.html", out_dir + "/desktop/index.html")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/noconnect.html", out_dir + "/desktop/noconnect.html")
# mobile
if config.check_option("module", "mobile"):
build_sdk_native(base_dir + "/../sdkjs/build", False)
@ -73,38 +74,29 @@ def make():
base.create_dir(out_dir + "/mobile/sdkjs")
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_word.js")
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_cell.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_slide.js")
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
base.create_dir(out_dir + "/mobile/sdkjs/word")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/cell")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/slide")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
base.delete_file(out_dir + "/mobile/sdkjs/banners_word.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_cell.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_slide.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
return
# JS build
@ -148,27 +140,40 @@ def build_sdk_builder(directory):
def build_sdk_native(directory, minimize=True):
#_run_npm_cli(directory)
_run_npm(directory)
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
addons = base.sdkjs_addons_param()
if not config.check_option("sdkjs-addons", "sdkjs-native"):
addons.append("--addon=sdkjs-native")
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
return
def build_sdkjs_develop(root_dir):
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
def build_js_develop(root_dir):
#_run_npm_cli(root_dir + "/sdkjs/build")
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
build_sdkjs_develop(root_dir)
_run_npm(root_dir + external_folder + "/web-apps/build")
_run_npm(root_dir + external_folder + "/web-apps/build/sprites")
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
old_cur = os.getcwd()
old_product_version = base.get_env("PRODUCT_VERSION")
base.set_env("PRODUCT_VERSION", old_product_version + "d")
os.chdir(root_dir + external_folder + "/web-apps/vendor/framework7-react")
base.cmd("npm", ["install"])
base.cmd("npm", ["ci"])
base.cmd("npm", ["run", "deploy-word"])
base.cmd("npm", ["run", "deploy-cell"])
base.cmd("npm", ["run", "deploy-slide"])

View File

@ -14,6 +14,9 @@ parser.add_option("--output",
parser.add_option("--write-version",
action="store_true", dest="write_version", default=False,
help="Create version file of build")
parser.add_option("--minimize",
action="store", type="string", dest="minimize", default="0",
help="Is minimized version")
(options, args) = parser.parse_args(arguments)
def write_version_files(output_dir):
@ -32,7 +35,11 @@ def write_version_files(output_dir):
# parse configuration
config.parse()
config.parse_defaults()
config.extend_option("jsminimize", "0")
isMinimize = False
if ("1" == options.minimize or "true" == options.minimize):
isMinimize = True
config.set_option("jsminimize", "disable")
branding = config.option("branding-name")
if ("" == branding):
@ -46,41 +53,32 @@ if (options.output):
base.create_dir(out_dir)
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_word.js")
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_cell.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_slide.js")
base.join_scripts(prefix_js, out_dir + "/banners.js")
base.create_dir(out_dir + "/word")
base.join_scripts([out_dir + "/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
base.create_dir(out_dir + "/cell")
base.join_scripts([out_dir + "/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
base.create_dir(out_dir + "/slide")
base.join_scripts([out_dir + "/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
base.delete_file(out_dir + "/banners_word.js")
base.delete_file(out_dir + "/banners_cell.js")
base.delete_file(out_dir + "/banners_slide.js")
base.delete_file(out_dir + "/banners.js")
# Write sdk version mark file if needed
if (options.write_version):

View File

@ -16,8 +16,7 @@ def make():
if("" != config.option("branding")):
branding_dir = git_dir + '/' + config.option("branding") + '/server'
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "grunt", ["--no-color", "-v"] + base.server_addons_param())
build_server_with_addons()
#env variables
product_version = base.get_env('PRODUCT_VERSION')
@ -30,18 +29,16 @@ def make():
cur_date = datetime.date.today().strftime("%m/%d/%Y")
server_build_dir = server_dir + "/build/server"
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_build_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
custom_public_key = branding_dir + '/debug.js'
if(base.is_exist(custom_public_key)):
base.copy_file(custom_public_key, server_build_dir + '/Common/sources')
base.copy_file(custom_public_key, server_dir + '/Common/sources')
pkg_target = "node14"
pkg_target = "node16"
if ("linux" == base.host_platform()):
pkg_target += "-linux"
@ -51,16 +48,26 @@ def make():
if ("windows" == base.host_platform()):
pkg_target += "-win"
base.cmd_in_dir(server_build_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_build_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_build_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
base.cmd_in_dir(server_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
example_dir = base.get_script_dir() + "/../../document-server-integration/web/documentserver-example/nodejs"
base.delete_dir(example_dir + "/node_modules")
base.cmd_in_dir(example_dir, "npm", ["install"])
base.cmd_in_dir(example_dir, "npm", ["ci"])
base.cmd_in_dir(example_dir, "pkg", [".", "-t", pkg_target, "-o", "example"])
def build_server_with_addons():
addons = {}
addons["server"] = [True, False]
addons.update(base.get_server_addons())
for addon in addons:
if (addon):
addon_dir = base.get_script_dir() + "/../../" + addon
base.cmd_in_dir(addon_dir, "npm", ["ci"])
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
def build_server_develop():
server_dir = base.get_script_dir() + "/../../server"
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "npm", ["ci"])
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())

49
scripts/build_sln.py Normal file
View File

@ -0,0 +1,49 @@
#!/usr/bin/env python
import config
import base
import os
import sys
sys.path.append(os.path.dirname(__file__) + "/..")
import sln
import qmake
# make solution
def make(solution=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
if ("" == solution):
solution = "./sln.json"
projects = sln.get_projects(solution, platform)
for pro in projects:
qmake_main_addon = ""
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
print("[WARNING:] temporary enable strip for x2t library in debug")
qmake_main_addon += "build_strip_debug"
qmake.make(platform, pro, qmake_main_addon)
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
return

View File

@ -182,6 +182,9 @@ def extend_option(name, value):
else:
options[name] = value
def set_option(name, value):
options[name] = value
def branding():
branding = option("branding-name")
if ("" == branding):
@ -219,6 +222,10 @@ def parse_defaults():
options[name] = options[name].replace("default", defaults_options[name])
else:
options[name] = defaults_options[name]
if ("config_addon" in defaults_options):
extend_option("config", defaults_options["config_addon"])
return
def is_cef_107():

View File

@ -13,7 +13,7 @@ import cef
import icu
import openssl
import curl
import websocket
import websocket_all
import v8
import html2
import hunspell
@ -21,6 +21,7 @@ import glew
import harfbuzz
import hyphen
import googletest
import libvlc
def check_android_ndk_macos_arm(dir):
if base.is_dir(dir + "/darwin-x86_64") and not base.is_dir(dir + "/darwin-arm64"):
@ -46,9 +47,12 @@ def make():
glew.make()
hyphen.make()
googletest.make()
if config.check_option("build-libvlc", "1"):
libvlc.make()
if config.check_option("module", "mobile"):
if (config.check_option("platform", "android")):
curl.make()
websocket.make()
websocket_all.make()
return

View File

@ -0,0 +1,170 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import re
def get_android_ndk_version():
env_val = base.get_env("ANDROID_NDK_ROOT")
if (env_val == ""):
env_val = "21.1.6352462"
return env_val.strip("/").split("/")[-1]
def get_android_ndk_version_major():
val = get_android_ndk_version().split(".")[0]
val = re.sub("[^0-9]", "", val)
return int(val)
def get_sdk_api():
if (23 > get_android_ndk_version_major()):
return "21"
return "23"
global archs
archs = ["arm64", "arm", "x86_64", "x86"]
global platforms
platforms = {
"arm64" : {
"abi" : "arm64-v8a",
"target" : "aarch64-linux-android",
"dst" : "arm64_v8a",
"api" : get_sdk_api(),
"old" : "aarch64-linux-android"
},
"arm" : {
"abi" : "armeabi-v7a",
"target" : "armv7a-linux-androideabi",
"dst" : "armv7",
"api" : get_sdk_api(),
"old" : "arm-linux-android"
},
"x86_64" : {
"arch" : "x86_64",
"target" : "x86_64-linux-android",
"dst" : "x86_64",
"api" : get_sdk_api(),
"old" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86",
"target" : "i686-linux-android",
"dst" : "x86",
"api" : get_sdk_api(),
"old" : "i686-linux-android"
}
}
# todo: check arm host!
global host
if ("linux" == base.host_platform()):
host = {
"name" : "linux",
"arch" : "linux-x86_64"
}
else:
host = {
"name" : "darwin",
"arch" : "darwin-x86_64"
}
def get_android_ndk_version():
#return "26.2.11394342"
return "21.1.6352462"
def get_android_ndk_version_major():
return int(get_android_ndk_version().split(".")[0])
def get_options_dict_as_array(opts):
value = []
for key in opts:
value.append(key + "=" + opts[key])
return value
def get_options_array_as_string(opts):
return " ".join(opts)
def ndk_dir():
return base.get_env("ANDROID_NDK_ROOT")
def sdk_dir():
ndk_path = ndk_dir()
if (-1 != ndk_path.find("/ndk/")):
return ndk_path + "/../.."
return ndk_path + "/.."
def toolchain_dir():
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
def prepare_platform(arch, cpp_standard=11):
target = platforms[arch]["target"]
api = platforms[arch]["api"]
ndk_directory = ndk_dir()
toolchain = toolchain_dir()
base.set_env("TARGET", target)
base.set_env("TOOLCHAIN", toolchain)
base.set_env("NDK_STANDARD_ROOT", toolchain)
base.set_env("ANDROIDVER", api)
base.set_env("ANDROID_API", api)
base.set_env("AR", toolchain + "/bin/llvm-ar")
base.set_env("AS", toolchain + "/bin/llvm-as")
base.set_env("LD", toolchain + "/bin/ld")
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
base.set_env("CC", target + api + "-clang")
base.set_env("CXX", target + api + "-clang++")
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
if (23 > get_android_ndk_version_major()):
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
base.set_env("LDFLAGS", ld_flags)
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
cflags = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-Wno-unused-function",
"-fPIC",
"-I" + toolchain + "/sysroot/usr/include",
"-D__ANDROID_API__=" + api,
"-DANDROID"
]
cflags_string = " ".join(cflags)
cppflags_string = cflags_string
if (cpp_standard >= 11):
cppflags_string += " -std=c++11"
base.set_env("CFLAGS", cflags_string)
base.set_env("CXXFLAGS", cppflags_string)
base.set_env("CPPPLAGS", cflags_string)
return
def extend_cflags(params):
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
return
def extend_cxxflags(params):
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
return
def extend_ldflags(params):
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
return

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_version = "curl-7_68_0"
lib_name = "curl-7.68.0"
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
params = []
if ("arm64" == arch):
params.append("--host=aarch64-linux-android")
elif ("arm" == arch):
params.append("--host=arm-linux-androideabi")
elif ("x86_64" == arch):
params.append("--host=x86_64-linux-android")
elif ("x86" == arch):
params.append("--host=i686-linux-android")
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
params.append("--enable-ipv6")
params.append("--enable-static")
params.append("--disable-shared")
params.append("--prefix=" + arch_build_dir)
params.append("--with-ssl=" + openssl_dir)
base.cmd("./configure", params)
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
icu_major = "58"
icu_minor = "3"
options = {
"--enable-strict" : "no",
"--enable-extras" : "no",
"--enable-draft" : "yes",
"--enable-samples" : "no",
"--enable-tests" : "no",
"--enable-renaming" : "yes",
"--enable-icuio" : "no",
"--enable-layoutex" : "no",
"--with-library-bits" : "nochange",
"--with-library-suffix" : "",
"--enable-static" : "yes",
"--enable-shared" : "no",
"--with-data-packaging" : "archive"
}
cpp_flags_base = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-fPIC"
]
cpp_flags = [
"-fno-short-wchar",
"-fno-short-enums",
"-DU_USING_ICU_NAMESPACE=0",
"-DU_HAVE_NL_LANGINFO_CODESET=0",
"-DU_TIMEZONE=0",
"-DU_DISABLE_RENAMING=0",
"-DUCONFIG_NO_COLLATION=0",
"-DUCONFIG_NO_FORMATTING=0",
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
"-DUCONFIG_NO_TRANSLITERATION=0",
"-DU_STATIC_IMPLEMENTATION"
]
def fetch_icu():
if not base.is_dir(current_dir + "icu"):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + icu_major, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
base.delete_dir_with_access_error(current_dir + "icu2")
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
if False and ("mac" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
return
def build_host():
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
if not base.is_dir(cross_build_dir):
base.create_dir(cross_build_dir)
os.chdir(cross_build_dir)
ld_flags = "-pthread"
if ("linux" == base.host_platform()):
ld_flags += " -Wl,--gc-sections"
else:
# gcc on OSX does not support --gc-sections
ld_flags += " -Wl,-dead_strip"
base.set_env("LDFLAGS", ld_flags)
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
host_type = "Linux"
if ("mac" == base.host_platform()):
host_type = "MacOSX/GCC"
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(current_dir + "build")
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
os.chdir(current_dir)
return
def build_arch(arch):
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
android_ndk.extend_cflags(" ".join(cpp_flags))
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
base.create_dir(arch_build_dir)
os.chdir(arch_build_dir)
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
base.cmd("make", ["-j4"])
os.chdir(current_dir)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
base.delete_dir(arch_build_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
old_env = dict(os.environ)
fetch_icu()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_name="openssl-1.1.1t"
options = [
"no-shared",
"no-tests",
"enable-ssl3",
"enable-ssl3-method",
"enable-md2",
"no-asm"
]
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
# not needed, just create directories
if not base.is_dir(current_dir + "/build"):
base.create_dir(current_dir + "/build")
if not base.is_dir(current_dir + "/build/android"):
base.create_dir(current_dir + "/build/android")
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.create_dir(dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -1,108 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
platforms = {
"arm64_v8a" : {
"name" : "arm64-v8a",
"toolset" : "arm64v8a",
"clang_triple" : "aarch64-linux-android21",
"tool_triple" : "aarch64-linux-android",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
},
"armv7" : {
"name" : "armeabi-v7a",
"toolset" : "armeabiv7a",
"clang_triple" : "armv7a-linux-androideabi16",
"tool_triple" : "arm-linux-androideabi",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "32",
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
"linker_flags" : "-Wl,--fix-cortex-a8"
},
"x86" : {
"name" : "x86",
"toolset" : "x86",
"clang_triple" : "i686-linux-android16",
"tool_triple" : "i686-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "32",
"compiler_flags" : "",
"linker_flags" : ""
},
"x86_64" : {
"name" : "x86_64",
"toolset" : "x8664",
"clang_triple" : "x86_64-linux-android21",
"tool_triple" : "x86_64-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
}
}
base_dir = base.get_script_dir()
def make(platform):
tmp_build_dir = base_dir + "/core_common/modules/boost"
if (base.is_dir(tmp_build_dir)):
base.delete_dir(tmp_build_dir)
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
current_platform = platforms[platform]
if (base.host_platform() == "mac"):
source = "prebuilt/linux-x86_64"
dest = "prebuilt/darwin-x86_64"
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
build_dir_tmp = tmp_build_dir + "/tmp"
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
old_path = base.get_env("PATH")
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
print(current_platform)
base.cmd("./b2", ["-q", "-j4",
"toolset=clang-" + current_platform["toolset"],
"binary-format=elf",
"address-model=" + current_platform["address_model"],
"architecture=" + current_platform["arch"],
"abi=" + current_platform["abi"],
"link=static",
"threading=multi",
"target-os=android",
"--user-config=" + tmp_build_dir + "/user-config.jam",
"--ignore-site-config",
"--layout=system",
"install"], True)
base.set_env("PATH", old_path)
base.delete_dir(tmp_build_dir)
return

View File

@ -5,7 +5,7 @@ sys.path.append('../..')
import config
import base
import os
import build
import qmake
def make(src_dir, modules, build_platform="android", qmake_addon=""):
old_cur = os.getcwd()
@ -23,17 +23,13 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("TARGET = boost_" + module)
pro_file_content.append("TEMPLATE = lib")
pro_file_content.append("CONFIG += staticlib")
if (build_platform == "android"):
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
pro_file_content.append("")
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
pro_file_content.append("include($$PWD/../../../../../base.pri)")
pro_file_content.append("")
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
pro_file_content.append("")
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
pro_file_content.append("INCLUDEPATH += $$PWD/include")
@ -43,7 +39,7 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
os.chdir(module_dir)
build.make_pro_file("./", module + ".pro", qmake_addon)
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
os.chdir(old_cur)
return

View File

@ -2,21 +2,19 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import subprocess
import os
import base
import curl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
old_cur = os.getcwd()
os.chdir(path)
if (-1 != config.option("platform").find("android")):
if base.is_dir(path + "/build/android"):
os.chdir(old_cur)
return
subprocess.call(["./build-android-curl.sh"])
curl_android.make()
elif (-1 != config.option("platform").find("ios")):
if base.is_dir(path + "/build/ios"):
os.chdir(old_cur)

View File

@ -2,26 +2,50 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import base
import os
import glob
import icu_android
def fetch_icu(major, minor):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + major, "https://github.com/unicode-org/icu.git", "./icu2"])
base.copy_dir("./icu2/icu4c", "./icu")
base.delete_dir_with_access_error("icu2")
#base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
return
def clear_module():
if base.is_dir("icu"):
base.delete_dir_with_access_error("icu")
# remove build
for child in glob.glob("./*"):
if base.is_dir(child):
base.delete_dir(child)
return
def make():
print("[fetch & build]: icu")
if (-1 != config.option("platform").find("android")):
icu_android.make()
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
old_cur = os.getcwd()
os.chdir(base_dir)
icu_major = "58"
icu_minor = "2"
base.check_module_version("3", clear_module)
if (-1 != config.option("platform").find("android")):
icu_android.make()
os.chdir(base_dir)
icu_major = "58"
icu_minor = "3"
if not base.is_dir("icu"):
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
fetch_icu(icu_major, icu_minor)
if ("windows" == base.host_platform()):
platformToolset = "v140"

View File

@ -1,172 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
toolshains_dir = current_dir + "/toolchains"
icu_major = "58"
icu_minor = "2"
icu_is_shared = False
current_path = base.get_env("PATH")
platforms = {
"arm64" : {
"arch" : "aarch64-linux-android",
"bin" : "aarch64-linux-android"
},
"arm" : {
"arch" : "arm-linux-androideabi",
"bin" : "arm-linux-androideabi"
},
"x86_64" : {
"arch" : "x86_64-linux-android",
"bin" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86-linux-android",
"bin" : "i686-linux-android"
}
}
def build_arch(arch, api_version):
print("icu build: " + arch + " ----------------------------------------")
if base.is_dir(current_dir + "/icu/" + arch):
base.delete_dir(current_dir + "/icu/" + arch)
base.create_dir(current_dir + "/icu/" + arch)
os.chdir(current_dir + "/icu/" + arch)
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
"--platform=android-" + api_version,
"--install-dir=" + current_dir + "/toolchain/" + arch,
"--toolchain=" + platforms[arch]["arch"],
"--force"
])
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
if not icu_is_shared:
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
base.cmd("../source/configure", command_args.split())
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
base.set_env("PATH", current_path)
os.chdir(current_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
if base.is_dir(current_dir + "/build"):
return
current_dir_old = os.getcwd()
print("[fetch & build]: icu_android")
os.chdir(current_dir)
if not base.is_dir("icu"):
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
#if ("mac" == base.host_platform()):
# base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
if not base.is_dir(current_dir + "/icu/cross_build"):
base.create_dir(current_dir + "/icu/cross_build")
os.chdir(current_dir + "/icu/cross_build")
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
os.chdir(current_dir)
build_arch("arm64", "21")
build_arch("arm", "16")
build_arch("x86_64","21")
build_arch("x86", "16")
os.chdir(current_dir)
base.create_dir(current_dir + "/build")
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
if icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
# patch elf information
os.chdir(current_dir + "/build")
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
os.chdir("./patchelf")
base.cmd("./bootstrap.sh")
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
base.cmd("make")
base.cmd("make", ["install"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
base.delete_dir(current_dir + "/build/patchelf")
if not icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
os.chdir(current_dir_old)
return

View File

@ -35,7 +35,7 @@ def restore_icu_defs(current_dir):
return
icu_major = "58"
icu_minor = "2"
icu_minor = "3"
current_dir_old = os.getcwd()
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"

View File

@ -0,0 +1,124 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
def docker_build(image_name, dockerfile_dir, base_dir):
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
vlc_dir = base_dir + "/vlc"
base.cmd("docker", ["run", "--rm", "-v", vlc_dir + ":/vlc", image_name])
base.cmd("docker", ["image", "rm", image_name])
return
def form_build_win(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/sdk/include", dest_dir + "/include")
# form lib dir
base.create_dir(dest_dir + "/lib")
base.copy_file(src_dir + "/sdk/lib/libvlc.lib", dest_dir + "/lib/vlc.lib")
base.copy_file(src_dir + "/sdk/lib/libvlccore.lib", dest_dir + "/lib/vlccore.lib")
base.copy_dir(src_dir + "/plugins", dest_dir + "/lib/plugins")
base.copy_file(src_dir + "/libvlc.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/libvlccore.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/vlc-cache-gen.exe", dest_dir + "/lib")
# generate cache file 'plugins.dat' for plugins loading
base.cmd_exe(dest_dir + "/lib/vlc-cache-gen", [dest_dir + "/lib/plugins"])
return
def form_build_linux(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
def form_build_mac(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.cmd("find", [dest_dir + "/lib", "-name", "\"*.la\"", "-type", "f", "-delete"])
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
# generate cache file 'plugins.dat' for plugins loading
base.run_command("DYLD_LIBRARY_PATH=" + dest_dir + "/lib " + dest_dir + "/lib/vlc/vlc-cache-gen " + dest_dir + "/lib/vlc/plugins")
return
def make():
print("[fetch & build]: libvlc")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/libvlc"
vlc_dir = base_dir + "/vlc"
vlc_version = "3.0.18"
tools_dir = base.get_script_dir() + "/../tools"
old_cur = os.getcwd()
os.chdir(base_dir)
if not base.is_dir(vlc_dir):
# temporary disable auto CRLF for Windows
if "windows" == base.host_platform():
autocrlf_old = base.run_command("git config --global core.autocrlf")['stdout']
base.cmd("git", ["config", "--global", "core.autocrlf", "false"])
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
if "windows" == base.host_platform():
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
base.create_dir("build")
base.copy_file("tools/ignore-cache-time.patch", "vlc")
# windows
if "windows" == base.host_platform():
if config.check_option("platform", "win_64"):
base.copy_file("tools/win_64/build.patch", "vlc")
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
if config.check_option("platform", "win_32"):
base.copy_file("tools/win_32/build.patch", "vlc")
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
form_build_win(vlc_dir + "/build/win32/vlc-" + vlc_version, base_dir + "/build/win_32")
# linux
if config.check_option("platform", "linux_64"):
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
# mac
if "mac" == base.host_platform():
os.chdir(vlc_dir)
base.cmd("git", ["restore", "src/modules/bank.c"])
base.cmd("patch", ["-p1", "src/modules/bank.c", "../tools/ignore-cache-time.patch"])
if config.check_option("platform", "mac_64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_64/build.patch"])
base.create_dir("build/mac_64")
os.chdir("build/mac_64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_64/vlc_install_dir", base_dir + "/build/mac_64")
if config.check_option("platform", "mac_arm64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_arm64/build.patch"])
base.create_dir("build/mac_arm64")
os.chdir("build/mac_arm64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_arm64/vlc_install_dir", base_dir + "/build/mac_arm64")
os.chdir(old_cur)
return

View File

@ -1,18 +1,19 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
sys.path.append('android')
import base
import config
import os
import subprocess
import openssl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(path)
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
subprocess.call(["./build-android-openssl.sh"])
if (-1 != config.option("platform").find("android")):
openssl_android.make()
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
subprocess.call(["./build-ios-openssl.sh"])

View File

@ -27,8 +27,16 @@ def make():
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
# patches
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
# no tls realization (remove if socket.io fix this)
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"

View File

@ -93,6 +93,7 @@ def make():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
@ -225,13 +226,17 @@ def make_xp():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
base.replaceInFile("depot_tools/cipd.ps1", "windows-386", "windows-amd64")
# old variant
#path_to_python2 = "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin"
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_23_bin/python/bin"
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
base_dir + "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin",
base_dir + path_to_python2,
config.option("vs-path") + "/../Common7/IDE",
os.environ["PATH"]])
@ -266,6 +271,13 @@ def make_xp():
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
]);
programFilesDir = base.get_env("ProgramFiles")
if ("" != base.get_env("ProgramFiles(x86)")):
programFilesDir = base.get_env("ProgramFiles(x86)")
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
if (base.is_dir(dev_path)):
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
if config.check_option("platform", "win_64_xp"):
if not base.is_dir("win_64/release"):

View File

@ -7,6 +7,23 @@ import base
import os
import subprocess
def change_bootstrap():
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
content = "# changed by build_tools\n\n"
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
content += "@Subdir python\n"
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
content += "@Subdir python3\n"
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
content += "@Subdir git\n"
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
return
def make_args(args, platform, is_64=True, is_debug=False):
args_copy = args[:]
if is_64:
@ -46,6 +63,12 @@ def ninja_windows_make(args, is_64=True, is_debug=False):
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
base.cmd("ninja", ["-C", directory_out])
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
@ -85,6 +108,7 @@ def make():
os.chdir(base_dir)
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
change_bootstrap()
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
@ -108,9 +132,10 @@ def make():
if ("windows" == base.host_platform()):
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
if not base.is_file("v8/src/base/platform/wrappers.cc"):
base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n")
else:
base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "")
if not base.is_file("v8/third_party/jinja2/tests.py.bak"):
base.copy_file("v8/third_party/jinja2/tests.py", "v8/third_party/jinja2/tests.py.bak")

View File

@ -4,12 +4,10 @@ import sys
sys.path.append('../..')
import config
import base
import ixwebsocket
import socketrocket
#import ixwebsocket
#import socketrocket
import socket_io
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
def make():
#ixwebsocket.make()
#socketrocket.make()

View File

@ -7,6 +7,7 @@ import deploy_builder
import deploy_server
import deploy_core
import deploy_mobile
import deploy_osign
def make():
if config.check_option("module", "desktop"):
@ -19,4 +20,6 @@ def make():
deploy_core.make()
if config.check_option("module", "mobile"):
deploy_mobile.make()
if config.check_option("module", "osign"):
deploy_osign.make()
return

View File

@ -76,12 +76,18 @@ def make():
if (0 == platform.find("win")):
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
# app
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.copy_dir(git_dir + "/document-templates/new/en-US", root_dir + "/empty")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
# js
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", root_dir + "/sdkjs")
base.create_dir(root_dir + "/sdkjs/vendor")
@ -108,6 +114,9 @@ def make():
if ("ios" == platform):
base.generate_plist(root_dir)
if (0 == platform.find("linux")):
base.linux_correct_rpath_docbuilder(root_dir)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
base.mac_correct_rpath_docbuilder(root_dir)

View File

@ -59,7 +59,11 @@ def make():
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "standardtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2ttester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
if base.is_file(archive_dir + ".7z"):
base.delete_file(archive_dir + ".7z")

View File

@ -6,30 +6,16 @@ import os
import platform
import glob
def deploy_marketplace_plugin(git_dir, root_dir):
# old manager
#base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "manager", True)
def copy_lib_with_links(src_dir, dst_dir, lib, version):
lib_full_name = lib + "." + version
major_version = version[:version.find(".")]
lib_major_name = lib + "." + major_version
# plugin manager with local paths
sys_plugins_dir = root_dir + "/editors/sdkjs-plugins"
base.clone_marketplace_plugin(sys_plugins_dir, True, True, False)
# store with local paths
manager_dir = sys_plugins_dir + "/{AA2EA9B6-9EC2-415F-9762-634EE8D9A95E}"
base.copy_file(src_dir + "/" + lib_full_name, dst_dir + "/" + lib_full_name)
store_dir_path = manager_dir + "/store"
if base.is_dir(store_dir_path):
base.delete_dir(store_dir_path)
base.create_dir(store_dir_path)
base.copy_dir_content(sys_plugins_dir + "/onlyoffice.github.io/store", store_dir_path, "", ".git")
base.delete_dir(store_dir_path + "/plugin")
base.delete_file(store_dir_path + "/build.bat")
for file in glob.glob(store_dir_path + "/*.html"):
base.replaceInFile(file, "https://onlyoffice.github.io/sdkjs-plugins/", "../../")
base.delete_dir_with_access_error(sys_plugins_dir + "/onlyoffice.github.io")
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_full_name, "./" + lib_major_name])
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_major_name, "./" + lib])
return
def make():
@ -68,23 +54,25 @@ def make():
platform_postfix = platform + base.qt_dst_postfix()
build_libraries_path = core_build_dir + "/lib/" + platform_postfix
# x2t
base.create_dir(root_dir + "/converter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DocxRenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel_network")
base.copy_lib(build_libraries_path, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(build_libraries_path, root_dir + "/converter", "graphics")
base.copy_lib(build_libraries_path, root_dir + "/converter", "PdfFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DjVuFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "XpsFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlFile2")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlRenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "Fb2File")
base.copy_lib(build_libraries_path, root_dir + "/converter", "EpubFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DocxRenderer")
if ("ios" == platform):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "x2t")
base.copy_lib(build_libraries_path, root_dir + "/converter", "x2t")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "x2t")
@ -107,19 +95,24 @@ def make():
# doctrenderer
if isWindowsXP:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir + "/converter", "doctrenderer")
base.copy_lib(build_libraries_path + "/xp", root_dir + "/converter", "doctrenderer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "doctrenderer")
base.copy_v8_files(core_dir, root_dir + "/converter", platform, isWindowsXP)
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop")
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop", "", "../dictionaries")
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
# dictionaries
base.create_dir(root_dir + "/dictionaries")
base.copy_dir_content(git_dir + "/dictionaries", root_dir + "/dictionaries", "", ".git")
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries")
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
base.copy_dir(git_dir + "/desktop-apps/common/package/fonts", root_dir + "/fonts")
base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE")
# cef
@ -139,11 +132,11 @@ def make():
isUseQt = False
# libraries
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "hunspell")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
base.copy_lib(build_libraries_path, root_dir, "hunspell")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
if (0 != platform.find("mac")):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
if (0 == platform.find("mac")):
base.copy_dir(core_build_dir + "/bin/" + platform_postfix + "/editors_helper.app", root_dir + "/editors_helper.app")
@ -155,9 +148,7 @@ def make():
base.qt_copy_lib("Qt5Gui", root_dir)
base.qt_copy_lib("Qt5PrintSupport", root_dir)
base.qt_copy_lib("Qt5Svg", root_dir)
base.qt_copy_lib("Qt5Widgets", root_dir)
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_lib("Qt5Widgets", root_dir)
base.qt_copy_lib("Qt5Network", root_dir)
base.qt_copy_lib("Qt5OpenGL", root_dir)
@ -166,13 +157,17 @@ def make():
base.qt_copy_plugin("imageformats", root_dir)
base.qt_copy_plugin("platforms", root_dir)
base.qt_copy_plugin("platforminputcontexts", root_dir)
base.qt_copy_plugin("printsupport", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("printsupport", root_dir)
base.qt_copy_plugin("platformthemes", root_dir)
base.qt_copy_plugin("xcbglintegrations", root_dir)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("styles", root_dir)
if (0 == platform.find("linux")):
@ -180,7 +175,8 @@ def make():
base.qt_copy_lib("Qt5X11Extras", root_dir)
base.qt_copy_lib("Qt5XcbQpa", root_dir)
base.qt_copy_icu(root_dir)
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
if (0 == platform.find("win")):
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/projicons/" + apps_postfix + "/projicons.exe", root_dir + "/DesktopEditors.exe")
@ -191,37 +187,43 @@ def make():
elif (0 == platform.find("linux")):
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors", root_dir + "/DesktopEditors")
if ("" != base.get_env("VIDEO_PLAYER_VLC_DIR")):
vlc_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc/"
if base.check_congig_option_with_platfom(platform, "libvlc"):
vlc_dir = git_dir + "/core/Common/3dParty/libvlc/build/" + platform + "/lib"
if (0 == platform.find("win")):
base.copy_file(vlc_dir + platform + "/bin/libvlc.dll", root_dir + "/libvlc.dll")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.dll", root_dir + "/libvlccore.dll")
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.dll", root_dir + "/VLCQtCore.dll")
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.dll", root_dir + "/VLCQtWidgets.dll")
else:
base.copy_file(vlc_dir + platform + "/bin/libvlc.so", root_dir + "/libvlc.so")
base.copy_file(vlc_dir + platform + "/bin/libvlc.so.5", root_dir + "/libvlc.so.5")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so", root_dir + "/libvlccore.so")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so.8", root_dir + "/libvlccore.so.8")
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.so", root_dir + "/VLCQtCore.so")
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.so", root_dir + "/VLCQtWidgets.so")
base.copy_dir(vlc_dir + "/plugins", root_dir + "/plugins")
base.copy_files(vlc_dir + "/*.dll", root_dir)
base.copy_file(vlc_dir + "/vlc-cache-gen.exe", root_dir + "/vlc-cache-gen.exe")
elif (0 == platform.find("linux")):
base.copy_dir(vlc_dir + "/vlc/plugins", root_dir + "/plugins")
base.copy_file(vlc_dir + "/vlc/libcompat.a", root_dir + "/libcompat.a")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_pulse.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_vdpau.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_xcb_events.so", "0.0.0")
copy_lib_with_links(vlc_dir, root_dir, "libvlc.so", "5.6.1")
copy_lib_with_links(vlc_dir, root_dir, "libvlccore.so", "9.0.1")
base.copy_file(vlc_dir + "/vlc/vlc-cache-gen", root_dir + "/vlc-cache-gen")
if isWindowsXP:
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer/xp", root_dir, "videoplayer")
base.copy_lib(build_libraries_path + "/mediaplayer/xp", root_dir, "videoplayer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer", root_dir, "videoplayer")
base.copy_dir(vlc_dir + platform + "/bin/plugins", root_dir + "/plugins")
base.copy_lib(build_libraries_path + "/mediaplayer", root_dir, "videoplayer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.create_dir(root_dir + "/editors")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
if len(os.listdir(root_dir + "/editors/sdkjs")) == 0:
base.delete_dir(root_dir + "/editors/sdkjs") # delete empty folder. for bug 62528
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
base.delete_file(file)
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
base.create_dir(root_dir + "/editors/sdkjs-plugins")
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
if not isWindowsXP:
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
# remove some default plugins
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
@ -237,10 +239,10 @@ def make():
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/common/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}", root_dir + "/editors/sdkjs-plugins/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}")
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/engine/database/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}", root_dir + "/editors/sdkjs-plugins/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}")
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
deploy_marketplace_plugin(git_dir, root_dir)
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
base.create_dir(root_dir + "/editors/webext")
base.copy_file(base_dir + "/js/" + branding + "/desktop/noconnect.html", root_dir + "/editors/webext/noconnect.html")
if isWindowsXP:
base.create_dir(root_dir + "/providers")

View File

@ -95,6 +95,7 @@ def make():
if ("ios" == platform):
base.generate_plist(root_dir)
deploy_fonts(git_dir, root_dir)
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
@ -110,8 +111,9 @@ def make():
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
# fonts
deploy_fonts(git_dir, root_dir, "android")
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
# app
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so", libs_dir + "/arm64-v8a")

60
scripts/deploy_osign.py Normal file
View File

@ -0,0 +1,60 @@
#!/usr/bin/env python
import config
import base
def make():
base_dir = base.get_script_dir() + "/../out"
git_dir = base.get_script_dir() + "/../.."
core_dir = git_dir + "/core"
branding = config.branding()
platforms = config.option("platform").split()
for native_platform in platforms:
if not native_platform in config.platforms:
continue
root_dir = base_dir + "/" + native_platform + "/" + branding + "/osign"
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
return
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
qt_dir = base.qt_setup(native_platform)
platform = native_platform
core_build_dir = core_dir + "/build"
if ("" != config.option("branding")):
core_build_dir += ("/" + config.option("branding"))
platform_postfix = platform + base.qt_dst_postfix()
# x2t
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "osign")
# correct ios frameworks
if ("ios" == platform):
base.generate_plist(root_dir)
for native_platform in platforms:
if native_platform == "android":
# make full version
root_dir = base_dir + "/android/" + branding + "/osign"
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/osign/*.so", libs_dir + "/arm64-v8a")
base.create_dir(libs_dir + "/armeabi-v7a")
base.copy_files(base_dir + "/android_armv7/" + branding + "/osign/*.so", libs_dir + "/armeabi-v7a")
base.create_dir(libs_dir + "/x86")
base.copy_files(base_dir + "/android_x86/" + branding + "/osign/*.so", libs_dir + "/x86")
base.create_dir(libs_dir + "/x86_64")
base.copy_files(base_dir + "/android_x86_64/" + branding + "/osign/*.so", libs_dir + "/x86_64")
break
return

View File

@ -5,6 +5,7 @@ import base
import re
import shutil
import glob
from tempfile import mkstemp
def make():
@ -40,24 +41,22 @@ def make():
build_server_dir = root_dir + '/server'
server_dir = base.get_script_dir() + "/../../server"
bin_server_dir = server_dir + "/build/server"
base.create_dir(build_server_dir + '/DocService')
base.copy_dir(bin_server_dir + '/Common/config', build_server_dir + '/Common/config')
base.copy_dir(server_dir + '/Common/config', build_server_dir + '/Common/config')
base.create_dir(build_server_dir + '/DocService')
base.copy_exe(bin_server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.copy_exe(server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.create_dir(build_server_dir + '/FileConverter')
base.copy_exe(bin_server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.copy_exe(server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.create_dir(build_server_dir + '/Metrics')
base.copy_exe(bin_server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(bin_server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.copy_exe(server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.create_dir(build_server_dir + '/Metrics/node_modules/modern-syslog/build/Release')
base.copy_file(bin_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
base.copy_file(server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
qt_dir = base.qt_setup(native_platform)
platform = native_platform
@ -90,7 +89,7 @@ def make():
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server")
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server", "", "../../../dictionaries")
# icu
if (0 == platform.find("win")):
@ -115,12 +114,16 @@ def make():
js_dir = root_dir
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
base.delete_file(file)
# add embed worker code
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
# plugins
base.create_dir(js_dir + "/sdkjs-plugins")
base.copy_marketplace_plugin(js_dir + "/sdkjs-plugins", False, True)
if ("1" == config.option("preinstalled-plugins")):
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
@ -132,8 +135,6 @@ def make():
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", js_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(js_dir + "/sdkjs-plugins")
base.clone_marketplace_plugin(root_dir + "/sdkjs-plugins")
# tools
tools_dir = root_dir + "/server/tools"
base.create_dir(tools_dir)
@ -147,10 +148,7 @@ def make():
branding_dir = git_dir + '/' + config.option("branding") + '/server'
#dictionaries
spellchecker_dictionaries = root_dir + '/dictionaries'
spellchecker_dictionaries_files = server_dir + '/../dictionaries/*_*'
base.create_dir(spellchecker_dictionaries)
base.copy_files(spellchecker_dictionaries_files, spellchecker_dictionaries)
base.copy_dictionaries(server_dir + "/../dictionaries", root_dir + "/dictionaries")
if (0 == platform.find("win")):
exec_ext = '.exe'
@ -209,15 +207,15 @@ def make():
base.delete_dir(root_dir_snap)
base.create_dir(root_dir_snap)
base.copy_dir(root_dir, root_dir_snap)
base.copy_dir(bin_server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(bin_server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(bin_server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.copy_dir(server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.delete_file(root_dir_snap + '/server/DocService/docservice')
base.copy_dir(bin_server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(bin_server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.copy_dir(server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.delete_file(root_dir_snap + '/server/FileConverter/converter')
base.copy_dir(bin_server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(bin_server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
base.copy_dir(server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
if (base.is_dir(root_dir_snap_example)):
base.delete_dir(root_dir_snap_example)
base.create_dir(root_dir_snap_example)

View File

@ -61,13 +61,19 @@ def make():
print("Core is up to date. ---------------------------------------")
print("-----------------------------------------------------------")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
if not base.is_dir(git_dir + "/sdkjs-plugins"):
base.create_dir(git_dir + "/sdkjs-plugins")
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
base.create_dir(git_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
base.clone_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False, False)
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
if not base.is_dir(git_dir + "/fonts"):
base.create_dir(git_dir + "/fonts")
@ -101,7 +107,8 @@ def make():
server_addons = []
if (config.option("server-addons") != ""):
server_addons = config.option("server-addons").rsplit(", ")
if ("server-lockstorage" in server_addons):
#server-lockstorage is private
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
server_config["editorDataStorage"] = "editorDataRedis"
sdkjs_addons = []
@ -125,6 +132,8 @@ def make():
sql["type"] = config.option("sql-type")
if (config.option("db-port") != ""):
sql["dbPort"] = config.option("db-port")
if (config.option("db-name") != ""):
sql["dbName"] = config.option("db-name")
if (config.option("db-user") != ""):
sql["dbUser"] = config.option("db-user")
if (config.option("db-pass") != ""):
@ -144,8 +153,10 @@ def make():
example_config["siteUrl"] = "http://" + config.option("siteUrl") + ":8000/"
example_config["apiUrl"] = "web-apps/apps/api/documents/api.js"
example_config["preloaderUrl"] = "web-apps/apps/api/documents/cache-scripts.html"
json_file = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/local-development-" + base.host_platform() + ".json"
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
json_dir = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/"
json_file = json_dir + "/local-development-" + base.host_platform() + ".json"
if base.is_exist(json_dir):
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
os.chdir(old_cur)
return

View File

@ -66,6 +66,28 @@ class CDependencies:
res += ['--remove-path', item]
return res
def check__docker_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
if (host_platform == 'mac'):
return True
checksResult = CDependencies()
checksResult.append(check_nodejs())
checksResult.append(check_7z())
if (len(checksResult.install) > 0):
install_args = ['install.py']
install_args += checksResult.get_uninstall()
install_args += checksResult.get_removepath()
install_args += checksResult.get_install()
base_dir = base.get_script_dir(__file__)
install_args[0] = './scripts/develop/' + install_args[0]
if (host_platform == 'windows'):
code = libwindows.sudo(unicode(sys.executable), install_args)
elif (host_platform == 'linux'):
get_updates()
base.cmd_in_dir(base_dir + "/../../", 'python', install_args, False)
def check_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
@ -168,21 +190,21 @@ def check_nodejs():
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
print('Installed Node.js version: ' + nodejs_version[1:])
nodejs_min_version = '14.14'
nodejs_min_version = '18'
nodejs_min_version_minor = 0
major_minor_min_version = nodejs_min_version.split('.')
nodejs_min_version_major = int(major_minor_min_version[0])
if len(major_minor_min_version) > 1:
nodejs_min_version_minor = int(major_minor_min_version[1])
nodejs_max_version = '14'
nodejs_max_version = ""
nodejs_max_version_minor = float("inf")
major_minor_max_version = nodejs_max_version.split('.')
nodejs_max_version_major = int(major_minor_max_version[0])
# nodejs_max_version_major = int(major_minor_max_version[0])
nodejs_max_version_major = float("inf")
if len(major_minor_max_version) > 1:
nodejs_max_version_minor = int(major_minor_max_version[1])
if (nodejs_min_version_major > nodejs_cur_version_major or nodejs_cur_version_major > nodejs_max_version_major):
print('Installed Node.js version must be 14.14 to 14.x')
isNeedReinstall = True
elif (nodejs_min_version_major == nodejs_cur_version_major):
if (nodejs_min_version_minor > nodejs_cur_version_minor):
@ -192,7 +214,7 @@ def check_nodejs():
isNeedReinstall = True
if (True == isNeedReinstall):
print('Installed Node.js version must be 14.14 to 14.x')
print('Installed Node.js version must be 18 or higher.')
if (host_platform == 'windows'):
dependence.append_uninstall('Node.js')
dependence.append_install('Node.js')
@ -461,8 +483,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
mysqlPath += 'bin'
return mysqlPath
def get_mysqlLoginSrting():
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
def get_mysqlLoginString():
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
def get_mysqlServersInfo():
arrInfo = []
@ -489,14 +511,14 @@ def get_mysqlServersInfo():
def check_mysqlServer():
base.print_info('Check MySQL Server')
dependence = CDependencies()
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
if (host_platform != 'windows'):
result = os.system(mysqlLoginSrt + ' -e "exit"')
if (result == 0):
connectionResult = base.run_command(connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print('MySQL configuration is valid')
dependence.sqlPath = 'mysql'
return dependence
@ -513,7 +535,7 @@ def check_mysqlServer():
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print(mysql_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -537,23 +559,43 @@ def check_mysqlServer():
return dependence
def check_MySQLConfig(mysqlPath = ''):
result = True
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
print('Database onlyoffice not found')
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
print('Database "' + config.option("db-name") + '" not found')
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
if (not result):
return False
print('Creating ' + config.option("db-name") + ' tables ...')
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
print('Password encryption is not valid')
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
return result
def execMySQLScript(mysql_path_to_bin, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginSrting()
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
mysqlLoginSrt = get_mysqlLoginString()
print('CREATE DATABASE ' + dbName + ';')
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
print('failed CREATE DATABASE ' + dbName + ';')
return False
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# return False
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# return False
return True
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginString()
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
return False
@ -562,7 +604,7 @@ def execMySQLScript(mysql_path_to_bin, scriptPath):
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
print('Setting MySQL password encrypting...')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
if (code != 0):
print('Setting password encryption failed!')
return False
@ -589,7 +631,7 @@ def get_postrgre_path_to_bin(postgrePath = ''):
def get_postgreLoginSrting(userName):
if (host_platform == 'windows'):
return 'psql -U' + userName + ' '
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U' + userName + ' -hlocalhost '
def get_postgreSQLInfoByFlag(flag):
arrInfo = []
@ -625,7 +667,7 @@ def check_postgreSQL():
result = os.system(postgreLoginSrt + ' -c "\q"')
connectionResult = base.run_command(connectionString)['stdout']
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
if (result != 0 or connectionResult.find(config.option("db-port")) == -1):
print('Valid PostgreSQL not found!')
dependence.append_install('PostgreSQL')
dependence.append_uninstall('PostgreSQL')
@ -635,7 +677,7 @@ def check_postgreSQL():
return dependence
arrInfo = get_postgreSQLInfo()
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
for info in arrInfo:
if (base.is_dir(info['Location']) == False):
continue
@ -643,7 +685,7 @@ def check_postgreSQL():
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
if (connectionResult.find(config.option("db-port")) != -1):
print(postgre_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -661,12 +703,12 @@ def check_postgreSQL():
def check_postgreConfig(postgrePath = ''):
result = True
if (host_platform == 'windows'):
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
rootUser = install_params['PostgreSQL']['root']
dbUser = install_params['PostgreSQL']['dbUser']
dbName = install_params['PostgreSQL']['dbName']
dbPass = install_params['PostgreSQL']['dbPass']
dbUser = config.option("db-user")
dbName = config.option("db-name")
dbPass = config.option("db-pass")
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
postgreLoginRoot = get_postgreLoginSrting(rootUser)
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
@ -683,7 +725,7 @@ def check_postgreConfig(postgrePath = ''):
base.print_info('Creating ' + dbName + ' user...')
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find(config.option("db-name")) == -1):
print('Database ' + dbName + ' not found')
base.print_info('Creating ' + dbName + ' database...')
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
@ -862,13 +904,13 @@ def install_gruntcli():
def install_mysqlserver():
if (host_platform == 'windows'):
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
elif (host_platform == 'linux'):
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
code = os.system('sudo apt-get -y install zsh htop') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
return os.system('yes | sudo apt install mysql-server') and code
return 1
@ -890,7 +932,7 @@ def install_postgresql():
file_name = "install.exe"
base.download(download_url, file_name)
base.print_info("Install PostgreSQL...")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
else:
base.print_info("Install PostgreSQL...")
install_command = 'sudo apt install postgresql -y'
@ -901,12 +943,12 @@ def install_postgresql():
if (host_platform == 'windows'):
base.delete_file(file_name)
else:
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
return code
def install_nodejs():
os.system('curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -')
os.system('curl -sSL https://deb.nodesource.com/setup_18.x | sudo -E bash -')
base.print_info("Install node.js...")
install_command = 'yes | sudo apt install nodejs'
print(install_command)
@ -915,7 +957,7 @@ def install_nodejs():
downloads_list = {
'Windows': {
'Git': 'https://github.com/git-for-windows/git/releases/download/v2.29.0.windows.1/Git-2.29.0-64-bit.exe',
'Node.js': 'https://nodejs.org/download/release/v14.17.6/node-v14.17.6-x64.msi',
'Node.js': 'https://nodejs.org/dist/v18.17.1/node-v18.17.1-x64.msi',
'Java': 'https://aka.ms/download-jdk/microsoft-jdk-11.0.18-windows-x64.msi',
'RabbitMQ': 'https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.9/rabbitmq-server-3.8.9.exe',
'Erlang': 'http://erlang.org/download/otp_win64_23.1.exe',
@ -952,18 +994,11 @@ install_params = {
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
'Git': '/VERYSILENT /NORESTART',
'MySQLServer': {
'port': '3306',
'user': 'root',
'pass': 'onlyoffice',
'version': '8.0.21'
},
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
'PostgreSQL': {
'root': 'postgres',
'dbPort': '5432',
'dbName': 'onlyoffice',
'dbUser': 'onlyoffice',
'dbPass': 'onlyoffice'
'root': 'postgres'
}
}
uninstall_params = {

View File

@ -10,11 +10,15 @@ import config_server as develop_config_server
base_dir = base.get_script_dir(__file__)
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
def build_docker_server():
dependence.check__docker_dependencies()
build_develop_server()
def build_docker_sdk_web_apps(dir):
dependence.check__docker_dependencies()
build_js.build_js_develop(dir)
def build_develop_server():
build_server.build_server_develop()
build_js.build_js_develop(base_dir + "/../../..")
develop_config_server.make()
@ -22,5 +26,12 @@ def make():
branding_develop_script_dir = base_dir + "/../../../" + config.option("branding") + "/build_tools/scripts"
if base.is_file(branding_develop_script_dir + "/develop.py"):
base.cmd_in_dir(branding_develop_script_dir, "python", ["develop.py"], True)
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
build_develop_server()
exit(0)

View File

@ -6,6 +6,9 @@ import os
import base
import dependence
import traceback
import develop
base_dir = base.get_script_dir(__file__)
def install_module(path):
base.print_info('Install: ' + path)
@ -41,56 +44,98 @@ def start_linux_services():
os.system('sudo service rabbitmq-server restart')
def run_integration_example():
base.cmd_in_dir('../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
if base.is_exist(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs'):
base.cmd_in_dir(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
def start_linux_services():
base.print_info('Restart MySQL Server')
def update_config(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
def make_start():
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
def make_configure(args):
base.print_info('Build modules')
update_config(args)
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
def make_install():
platform = base.host_platform()
run_integration_example()
base.create_dir(base_dir + '/../../../server/App_Data')
install_module(base_dir + '/../../../server/DocService')
install_module(base_dir + '/../../../server/Common')
install_module(base_dir + '/../../../server/FileConverter')
def make_run():
platform = base.host_platform()
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module(base_dir + '/../../../server/DocService', ['sources/server.js'])
#run_module(base_dir + '/../../../server/DocService', ['sources/gc.js'])
run_module(base_dir + '/../../../server/FileConverter', ['sources/convertermaster.js'])
#run_module(base_dir + '/../../../server/SpellChecker', ['sources/server.js'])
def run_docker_server(args = []):
try:
make_start()
develop.build_docker_server()
make_install()
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def run_docker_sdk_web_apps(dir):
try:
develop.build_docker_sdk_web_apps(dir)
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def make(args = []):
try:
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
make_start()
make_configure(args)
make_install()
make_run()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
base.print_info('Build modules')
if ("linux" == platform):
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
base.cmd_in_dir('../../', 'python', ['make.py'])
run_integration_example()
base.create_dir('../../../server/App_Data')
install_module('../../../server/DocService')
install_module('../../../server/Common')
install_module('../../../server/FileConverter')
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module('../../../server/DocService', ['sources/server.js'])
# run_module('../../../server/DocService', ['sources/gc.js'])
run_module('../../../server/FileConverter', ['sources/convertermaster.js'])
# run_module('../../../server/SpellChecker', ['sources/server.js'])
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)

View File

@ -0,0 +1,162 @@
# license_checker
## Overview
**license_checker** allow you to automatically check
licenses inside specified code files.
## How to use
### Running
**Note**: Pyhton 3.9 and above required
(otherwise `TypeError: 'type' object is not subscriptable`)
* Linux
```bash
python3 license_checker.py
```
* Windows
```bash
python license_checker.py
```
## How to configure
The checker settings are specified in the `config.json`.
The path to the license template is indicated there.
### How to specify a license template
The license template is a plain text
file where the license text is indicated
as you would like to see the license at
the beginning of the file.
### How to configure `config.json`
#### Сonfig parameters
* `basePath` specifies which folder the
paths will be relative to.
**For example:**
```json
"basePath": "../../../"
```
* `reportFolder` specifies in which folder to
save text files with reports.
**For example:**
```json
"reportFolder": "build_tools/scripts/license_checker/reports"
```
* `printChecking` specifies whether to output
information about which file is
being checked to the console.
**For example:**
```json
"printChecking": false
```
* `printReports` specifies whether to output
reports to the console.
**For example:**
```json
"printReports": false
```
* `fix` specifies which categories of reports
should be repaired automatically.
Possible array values:
`"OUTDATED"`,
`"NO_LICENSE"`,
`"INVALID_LICENSE"`,
`"LEN_MISMATCH"`.
**For example:**
```json
"fix": ["OUTDATED", "NO_LICENSE"],
```
Automatically repair files where the license is outdated or not found.
* `configs` license check and repair configurations.
* `dir` folder to check.
**For example:**
```json
"dir": "sdkjs"
```
* `fileExtensions` file extensions to check.
**For example:**
```json
"fileExtensions": [".js"]
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "header.license"
```
* `ignoreListDir` folder paths to ignore.
**For example:**
```json
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
```
* `ignoreListDirName` folder names to ignore.
**For example:**
```json
"ignoreListDirName": [
"node_modules",
"vendor"
]
```
* `ignoreListFile` file paths to ignore.
**For example:**
```json
"ignoreListFile": [
"sdkjs/develop/awesomeFileToIgnore.js",
]
```
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
**For example:**
```json
"ignoreListDir": [
"sdkjs/develop"
],
"allowListFile": [
"sdkjs/develop/awesomeFileToAllow.js",
]
```
Any number of configurations can be
specified, they can overlap
if we need to check
files in the same folder in different ways.

View File

@ -0,0 +1,202 @@
{
"basePath": "../../../",
"reportFolder": "build_tools/scripts/license_checker/reports",
"printChecking": false,
"printReports": false,
"fix": ["OUTDATED"],
"configs": [
{
"dir": "core",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
"licensePath": "header.license",
"ignoreListDir": [
"core/build",
"core/Common/cfcpp/test",
"core/Common/js",
"core/DesktopEditor/agg-2.4",
"core/DesktopEditor/cximage",
"core/DesktopEditor/freetype_names/freetype-2.5.3",
"core/DesktopEditor/freetype-2.5.2",
"core/DesktopEditor/freetype-2.10.4",
"core/DesktopEditor/raster/JBig2",
"core/DesktopEditor/raster/Jp2",
"core/DesktopEditor/xml/libxml2",
"core/DesktopEditor/xmlsec",
"core/DjVuFile/libdjvu",
"core/DjVuFile/wasm",
"core/EpubFile",
"core/OOXML/PPTXFormat/Limit/pri",
"core/Fb2File",
"core/HtmlFile2",
"core/HtmlFile2",
"core/OdfFile/Common/utf8cpp",
"core/OfficeUtils/js/emsdk",
"core/OfficeUtils/src/zlib-1.2.11",
"core/PdfFile/lib",
"core/UnicodeConverter/icubuilds-mac",
"core/UnicodeConverter/icubuilds-win32"
],
"ignoreListDirName": [
"node_modules",
"vendor",
"3dParty"
],
"ignoreListFile": [
"core/Test/CoAuthoring/settings.js",
"core/OdfFile/Projects/Linux/precompiled.h",
"core/MsBinaryFile/Projects/XlsFormatLib/Linux/precompiled.h"
],
"allowListFile": [
"core/DesktopEditor/freetype_names/FontMaps/FontMaps.cpp",
"core/Common/3dParty/openssl/test/main.cpp ",
"core/Common/3dParty/openssl/common/common_openssl.h",
"core/Common/3dParty/openssl/common/common_openssl.cpp"
]
},
{
"dir": "core-ext",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDir": [
"core-ext/AutoTester",
"core-ext/cell_android",
"core-ext/cell_android",
"core-ext/desktop-sdk-private",
"core-ext/docbuilder",
"core-ext/Registration",
"core-ext/slide_android",
"core-ext/test",
"core-ext/word_android",
"core-ext/word_ios"
],
"ignoreListFile": [
"core-ext/native_base/json.hpp",
"core-ext/native_base/android_base/libeditors/src/main/cpp/workaround/swab/swab.h"
]
},
{
"dir": "sdkjs",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs"
],
"ignoreListDirName": [
"node_modules",
"vendor"
],
"ignoreListFile": [
"sdkjs/common/externs/jquery-3.2.js",
"sdkjs/common/externs/socket.io.js",
"sdkjs/common/Native/jquery_native.js",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
},
{
"dir": "sdkjs-forms",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "sdkjs-ooxml",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "web-apps",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor",
"search"
],
"ignoreListDir": [
"web-apps/apps/common/mobile",
"web-apps/apps/common/main/lib/mods",
"web-apps/apps/documenteditor/mobile",
"web-apps/apps/spreadsheeteditor/mobile",
"web-apps/apps/presentationeditor/mobile",
"web-apps/build/plugins/grunt-inline"
],
"ignoreListFile": [
"web-apps/apps/api/documents/api.js",
"web-apps/apps/common/main/lib/core/application.js",
"web-apps/apps/common/main/lib/core/keymaster.js",
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
]
},
{
"dir": "web-apps-mobile",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "server",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"server/FileConverter/bin"
],
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-lockstorage",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license-key",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "editors-ios",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDirName": [
"vendor",
"Vendor",
"3dParty"
],
"allowListFile": [
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.m",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m"
]
}
]
}

View File

@ -0,0 +1,31 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/

View File

@ -0,0 +1,339 @@
import os
import re
import enum
import json
import codecs
CONFIG_PATH = 'config.json'
class ErrorType(enum.Enum):
INVALID_LICENSE = 1
NO_LICENSE = 2
OUTDATED = 3
LEN_MISMATCH = 4
FIX_TYPES = {
'OUTDATED': ErrorType.OUTDATED,
'NO_LICENSE': ErrorType.NO_LICENSE,
'INVALID_LICENSE': ErrorType.INVALID_LICENSE,
'LEN_MISMATCH': ErrorType.LEN_MISMATCH
}
class Config(object):
"""
License checker configuration.
Attributes:
dir: Directory to check.
fileExtensions: file extensions to check.
ignoreListDir: Ignored folder paths.
ignoreListDirName: Ignored folder names.
ignoreListFile: Ignored file paths.
allowListFile: allow file paths.
"""
def __init__(self,
dir: str,
fileExtensions: list[str],
licensePath: str = 'header.license',
allowListFile: list[str] = [],
ignoreListDir: list[str] = [],
ignoreListDirName: list[str] = [],
ignoreListFile: list[str] = []) -> None:
self._dir = dir
self._fileExtensions = fileExtensions
self._allowListFile = allowListFile
self._ignoreListDir = ignoreListDir
self._ignoreListDirName = ignoreListDirName
self._ignoreListFile = ignoreListFile
"""Read license template."""
with open(licensePath, 'r', encoding="utf8") as file:
lines = file.readlines()
if not lines:
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
non_empty_lines = [s for s in lines if not s.isspace()]
self._startMultiComm = non_empty_lines[0]
self._endMultiComm = non_empty_lines[-1]
self._license_lines = lines
def getDir(self) -> str:
return self._dir
def getFileExtensions(self) -> list[str]:
return self._fileExtensions
def getStartMultiComm(self) -> str:
return self._startMultiComm
def getEndMultiComm(self) -> str:
return self._endMultiComm
def getLicense(self) -> list[str]:
return self._license_lines
def getAllowListFile(self) -> list[str]:
return self._allowListFile
def getIgnoreListDir(self) -> list[str]:
return self._ignoreListDir
def getIgnoreListDirName(self) -> list[str]:
return self._ignoreListDirName
def getIgnoreListFile(self) -> list[str]:
return self._ignoreListFile
with open(CONFIG_PATH, 'r') as j:
_json: dict = json.load(j)
BASE_PATH: str = _json.get('basePath') or '../../../'
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
if (_json.get('fix')):
try:
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
except KeyError:
raise Exception(f'KeyError. "fix" cannot process value. It must be an array of strings. Check {CONFIG_PATH}. Possible array values: "OUTDATED", "NO_LICENSE", "INVALID_LICENSE", "LEN_MISMATCH"')
else:
FIX = False
PRINT_CHECKING: bool = _json.get('printChecking')
PRINT_REPORTS: bool = _json.get('printReports')
CONFIGS: list[Config] = []
for i in _json.get('configs'):
CONFIGS.append(Config(**i))
os.chdir(BASE_PATH)
class Error(object):
def __init__(self, errorType: ErrorType) -> None:
self._errorType = errorType
self._errorMessages = {
ErrorType.INVALID_LICENSE: 'Detected license is invalid',
ErrorType.NO_LICENSE: 'The license was not found',
ErrorType.OUTDATED: 'Detected license is outdated',
ErrorType.LEN_MISMATCH: 'Detected license length does not match pattern'
}
def getErrorType(self) -> ErrorType:
return self._errorType
def getErrorMessage(self) -> str:
return self._errorMessages.get(self._errorType)
class Report(object):
def __init__(self, pathToFile: str, error: Error, message:str = '') -> None:
self._pathToFile = pathToFile
self._error = error
self._message = message
def getPathToFile(self) -> str:
return self._pathToFile
def getError(self) -> Error:
return self._error
def getMessage(self) -> str:
return self._message
def report(self) -> str:
return f'{self.getPathToFile()}: {self.getError().getErrorMessage()}. {self.getMessage()}.'
class Checker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._reports: list[Report] = []
def getReports(self):
return self._reports
def _checkLine(self, line: str, prefix: str) -> bool:
"""Checks if a line has a prefix."""
"""Trim to catch invalid license without leading spaces"""
prefix = prefix.lstrip()
if (re.search(re.escape(prefix), line)):
return True
else:
return False
def findLicense(self, lines: list[str]) -> list[str]:
"""Looks for consecutive comments in a list of strings."""
result = []
isStarted = False
for line in lines:
if line == '\n': continue
if (self._checkLine(line=line, prefix=self._config.getStartMultiComm())):
result.append(line)
isStarted = True
elif(self._checkLine(line=line, prefix=self._config.getEndMultiComm())):
result.append(line)
break
elif (isStarted):
result.append(line)
else:
break
return result
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
license = self._config.getLicense()
if len(license) != len(test):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.LEN_MISMATCH),
message=f'Found {len(test)} lines, expected {len(license)}')
invalidLinesCount = 0
lastWrongLine = 0
for i in range(len(license)):
if (license[i] != test[i]):
invalidLinesCount += 1
lastWrongLine = i
if (invalidLinesCount == 1):
r = r'\d\d\d\d'
testDate = re.findall(r, test[lastWrongLine])
licenseDate = re.findall(r, license[lastWrongLine])
if not (testDate and licenseDate):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Something wrong...')
testLastYear = int(testDate[-1])
licenseLastYear = int(licenseDate[-1])
if (testLastYear < licenseLastYear):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.OUTDATED),
message=f'Found date {testLastYear}, expected {licenseLastYear}')
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
elif (invalidLinesCount > 0):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Found {invalidLinesCount} wrong lines out of {len(license)}')
def checkFile(self, pathToFile: str) -> None:
"""Checks a file for a valid license."""
with open(pathToFile, 'r', encoding="utf-8-sig") as file:
test = self.findLicense(lines=file.readlines())
if test:
result = self._checkLicense(test=test, pathToFile=pathToFile)
if result:
self._reports.append(result)
else:
self._reports.append(Report(pathToFile=pathToFile, error=Error(errorType=ErrorType.NO_LICENSE)))
return
class Walker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._checker = Checker(config=self._config)
def getChecker(self):
return self._checker
def getConfig(self):
return self._config
def _getFiles(self) -> list[str]:
result = []
for address, dirs, files in os.walk(self._config.getDir()):
for i in files:
if (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getAllowListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
else:
for i in self._config.getIgnoreListDirName():
if(re.search(re.escape(i), address)):
break
else:
for i in self._config.getIgnoreListDir():
if(re.search(re.escape(os.path.normpath(i)), address)):
break
else:
for i in files:
if not (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getIgnoreListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
return result
def checkFiles(self) -> list[Report]:
files = self._getFiles()
for file in files:
if (PRINT_CHECKING):
print(f'Checking {file}...')
# self._checker.checkFile(file)
try:
self._checker.checkFile(file)
except Exception as e:
print(file)
print(e)
return self._checker.getReports()
class Fixer(object):
def __init__(self, walker: Walker) -> int:
self._walker = walker
self._checker = self._walker.getChecker()
self._config = self._walker.getConfig()
def fix(self):
count = 0
for report in self._checker.getReports():
if ((not FIX and report.getError().getErrorType() == ErrorType.NO_LICENSE) or (report.getError().getErrorType() == ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._addLicense(report.getPathToFile())
count += 1
elif ((not FIX and report.getError().getErrorType() != ErrorType.NO_LICENSE) or (report.getError().getErrorType() != ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._fixLicense(report.getPathToFile())
count += 1
return count
def _addLicense(self, pathToFile: str):
buffer = []
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
with open(pathToFile, 'w', encoding="utf8") as file:
license = self._config.getLicense()
file.writelines(license)
file.write('\n')
file.writelines(buffer)
return
def _fixLicense(self, pathToFile: str):
buffer = []
writeEncoding = "utf8"
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
if buffer and buffer[0].startswith(codecs.decode(codecs.BOM_UTF8)):
writeEncoding = "utf-8-sig"
oldLicense = self._checker.findLicense(buffer)
for i in oldLicense:
buffer.remove(i)
with open(pathToFile, 'w', encoding=writeEncoding) as file:
license = self._config.getLicense()
file.writelines(license)
file.writelines(buffer)
return
walkers: list[Walker] = []
reports: list[Report] = []
def fix(walkers):
count = 0
if FIX:
print(f'Fixing selected files...')
else:
print(f'Fixing all {len(reports)} files...')
for walker in walkers:
fixer = Fixer(walker=walker)
count += fixer.fix()
print(f'Fixed {count} files.')
def writeReports(reports: list[Report]) -> None:
files: dict[str, list[Report]] = dict()
for i in ErrorType:
files[i.name] = []
for i in reports:
files[i.getError().getErrorType().name].append(i)
for i in ErrorType:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
for config in CONFIGS:
walkers.append(Walker(config=config))
print('Checking files...')
for walker in walkers:
reports = reports + walker.checkFiles()
if reports:
if not os.path.exists(REPORT_FOLDER):
os.mkdir(REPORT_FOLDER)
if PRINT_REPORTS:
print('\n'.join(map(lambda report: report.report(), reports)))
print(f'{len(reports)} invalid licenses were found.')
print(f'Saving reports in {REPORT_FOLDER}')
writeReports(reports=reports)
if FIX:
fix(walkers=walkers)
# else:
# choice = str(input(f'Fix it automatically? [Y/N] ')).lower()
# if choice == 'y':
# fix(walkers=walkers)
else:
print('All licenses are ok.')
# os.system('pause')

View File

@ -17,10 +17,7 @@ if utils.is_windows():
desktop_product_name = "Desktop Editors"
desktop_product_name_s = desktop_product_name.replace(" ","")
desktop_package_name = company_name + "-" + desktop_product_name_s
desktop_vcredist_list = ["2022"]
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
desktop_changes_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/changes"
desktop_updates_url = "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/updates"
if utils.is_macos():
desktop_package_name = "ONLYOFFICE"
@ -33,6 +30,59 @@ if utils.is_macos():
builder_product_name = "Document Builder"
if utils.is_linux():
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
builder_make_targets = ["deb", "rpm"] # tar
server_make_targets = ["deb", "rpm", "tar"]
builder_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "builder/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "builder/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "builder/linux/rhel/"
}
]
desktop_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "desktop/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "desktop/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "desktop/linux/rhel/"
},
{
"make": "rpm-suse",
"src": "rpm-suse/build/RPMS/*/*.rpm",
"dst": "desktop/linux/suse/"
}
]
server_make_targets = [
{
"make": "deb",
"src": "deb/*.deb",
"dst": "server/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "server/linux/rhel/"
},
{
"make": "tar",
"src": "*.tar*",
"dst": "server/linux/snap/"
}
]

View File

@ -9,39 +9,24 @@ def make():
utils.log_h1("BUILDER")
if utils.is_windows():
make_windows()
elif utils.is_macos():
make_macos()
elif utils.is_linux():
make_linux()
else:
utils.log("Unsupported host OS")
return
def aws_s3_upload(files, key, ptype=None):
if not files:
return False
def s3_upload(files, dst):
if not files: return False
ret = True
key = "builder/" + key
for file in files:
if not utils.is_file(file):
utils.log_err("file not exist: " + file)
ret &= False
continue
args = ["aws"]
if hasattr(branding, "s3_endpoint_url"):
args += ["--endpoint-url=" + branding.s3_endpoint_url]
args += [
"s3", "cp", "--no-progress", "--acl", "public-read",
"--metadata", "md5=" + utils.get_md5(file),
file, "s3://" + branding.s3_bucket + "/" + key
]
if common.os_family == "windows":
upload = utils.cmd(*args, verbose=True)
else:
upload = utils.sh(" ".join(args), verbose=True)
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.add_deploy_data(key)
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
if upload and ptype is not None:
full_key = key
if full_key.endswith("/"): full_key += utils.get_basename(file)
utils.add_deploy_data("builder", ptype, file, full_key)
return ret
def make_windows():
@ -85,7 +70,7 @@ def make_zip():
if common.deploy and ret:
utils.log_h2("builder zip deploy")
ret = aws_s3_upload(["build\\" + zip_file], "win/generic/", "Portable")
ret = s3_upload(["build\\" + zip_file], "builder/win/generic/")
utils.set_summary("builder zip deploy", ret)
return
@ -114,15 +99,45 @@ def make_inno():
if common.deploy and ret:
utils.log_h2("builder inno deploy")
ret = aws_s3_upload(["build\\" + inno_file], "win/inno/", "Installer")
ret = s3_upload(["build\\" + inno_file], "builder/win/inno/")
utils.set_summary("builder inno deploy", ret)
return
def make_macos():
company = branding.company_name.lower()
product = branding.builder_product_name.replace(" ","").lower()
source_dir = "build_tools/out/%s/%s/%s" % (common.prefix, company, product)
arch_list = {
"darwin_x86_64": "x86_64",
"darwin_arm64": "arm64"
}
suffix = arch_list[common.platform]
builder_tar = "../%s-%s-%s-%s-%s.tar.xz" % \
(company, product, common.version, common.build, suffix)
utils.set_cwd(source_dir)
if common.clean:
utils.log_h2("builder clean")
utils.delete_files("../*.tar*")
utils.log_h2("builder build")
ret = utils.sh("tar --xz -cvf %s *" % builder_tar, creates=builder_tar, verbose=True)
utils.set_summary("builder build", ret)
if common.deploy and ret:
utils.log_h2("builder deploy")
ret = s3_upload([builder_tar], "builder/mac/generic/")
utils.set_summary("builder deploy", ret)
utils.set_cwd(common.workspace_dir)
return
def make_linux():
utils.set_cwd("document-builder-package")
utils.log_h2("builder build")
make_args = branding.builder_make_targets
make_args = [t["make"] for t in branding.builder_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -130,40 +145,11 @@ def make_linux():
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("builder build", ret)
rpm_arch = "x86_64"
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
if common.deploy:
utils.log_h2("builder deploy")
if ret:
if "tar" in branding.builder_make_targets:
utils.log_h2("builder tar deploy")
ret = aws_s3_upload(
utils.glob_path("tar/*.tar.gz"),
"linux/generic/", "Portable"
)
utils.set_summary("builder tar deploy", ret)
if "deb" in branding.builder_make_targets:
utils.log_h2("builder deb deploy")
ret = aws_s3_upload(
utils.glob_path("deb/*.deb"),
"linux/debian/", "Debian"
)
utils.set_summary("builder deb deploy", ret)
if "rpm" in branding.builder_make_targets:
utils.log_h2("builder rpm deploy")
ret = aws_s3_upload(
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"linux/rhel/", "CentOS"
)
utils.set_summary("builder rpm deploy", ret)
else:
if "tar" in branding.builder_make_targets:
utils.set_summary("builder tar deploy", False)
if "deb" in branding.builder_make_targets:
utils.set_summary("builder deb deploy", False)
if "rpm" in branding.builder_make_targets:
utils.set_summary("builder rpm deploy", False)
for t in branding.builder_make_targets:
utils.log_h2("builder " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("builder " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -1,19 +1,5 @@
#!/usr/bin/env python
platformTitles = {
"windows_x64": "Windows x64",
"windows_x86": "Windows x86",
"windows_x64_xp": "Windows x64 XP",
"windows_x86_xp": "Windows x86 XP",
"darwin_x86_64": "macOS x86_64",
"darwin_arm64": "macOS arm64",
"darwin_x86_64_v8": "macOS x86_64 V8",
"linux_x86_64": "Linux x86_64",
"linux_aarch64": "Linux aarch64",
"linux_x86_64_cef": "Linux x86_64 cef107",
"android": "Android",
}
platformPrefixes = {
"windows_x64": "win_64",
"windows_x86": "win_32",
@ -29,35 +15,3 @@ platformPrefixes = {
out_dir = "build_tools/out"
tsa_server = "http://timestamp.digicert.com"
vcredist_links = {
"2022": {
"x64": {
"url": "https://aka.ms/vs/17/release/vc_redist.x64.exe",
"md5": "077f0abdc2a3881d5c6c774af821f787"
},
"x86": {
"url": "https://aka.ms/vs/17/release/vc_redist.x86.exe",
"md5": "ae427c1329c3b211a6d09f8d9506eb74"
}
},
"2015": {
"x64": {
"url": "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x64.exe",
"md5": "27b141aacc2777a82bb3fa9f6e5e5c1c"
},
"x86": {
"url": "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x86.exe",
"md5": "1a15e6606bac9647e7ad3caa543377cf"
}
},
"2013": {
"x64": {
"url": "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x64.exe",
"md5": "96b61b8e069832e6b809f24ea74567ba"
},
"x86": {
"url": "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x86.exe",
"md5": "0fc525b6b7b96a87523daa7a0013c69d"
}
}
}

View File

@ -26,8 +26,8 @@ def make_core():
repo = repos[common.platform]
branch = utils.get_env("BRANCH_NAME")
core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company))
dest_version = "%s/core/%s/%s/%s/" % (repo["repo"], branch, repo["version"], repo["arch"])
dest_latest = "%s/core/%s/%s/%s/" % (repo["repo"], branch, "latest", repo["arch"])
dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"])
dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"])
if branch is None:
utils.log_err("BRANCH_NAME variable is undefined")
@ -39,62 +39,67 @@ def make_core():
return
utils.log_h2("core deploy")
args = ["aws", "s3", "cp", "--acl", "public-read", "--no-progress",
"--metadata", "md5=" + utils.get_md5(core_7z),
core_7z, "s3://" + branding.s3_bucket + "/" + dest_version + "core.7z"]
if common.os_family == "windows":
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
ret = utils.s3_upload(
core_7z,
"s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z")
if ret:
utils.add_deploy_data("core", "Archive", core_7z, dest_version + "core.7z")
args = ["aws", "s3", "sync", "--delete",
"--acl", "public-read", "--no-progress",
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest]
if common.os_family == "windows":
ret &= utils.cmd(*args, verbose=True)
else:
ret &= utils.sh(" ".join(args), verbose=True)
utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z")
utils.add_deploy_data(dest_version + "/core.7z")
ret = utils.s3_sync(
"s3://" + branding.s3_bucket + "/" + dest_version + "/",
"s3://" + branding.s3_bucket + "/" + dest_latest + "/",
delete=True)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z")
utils.set_summary("core deploy", ret)
return
def deploy_closure_maps(license):
def deploy_closuremaps_sdkjs(license):
if not common.deploy: return
utils.log_h1("CLOSURE MAPS")
utils.set_cwd(utils.get_path("sdkjs/build/maps"))
utils.log_h1("SDKJS CLOSURE MAPS")
branch = utils.get_env("BRANCH_NAME")
maps = utils.glob_path("*.js.map")
if branch is None:
utils.log_err("BRANCH_NAME variable is undefined")
utils.set_summary("closure maps " + license + " deploy", False)
return
if not maps:
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("closure maps " + license + " deploy", False)
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
return
utils.log_h2("closure maps " + license + " deploy")
dest = "closure-maps/%s/%s/%s" % (common.version, common.build, license)
utils.log_h2("sdkjs closure maps %s deploy" % license)
ret = True
for file in maps:
args = ["aws"]
if hasattr(branding, "s3_endpoint_url"):
args += ["--endpoint-url=" + branding.s3_endpoint_url]
args += [
"s3", "cp", "--no-progress", "--metadata", "md5=" + utils.get_md5(file),
file, "s3://" + branding.s3_bucket + "/" + dest + "/"
]
if common.os_family == "windows":
upload = utils.cmd(*args, verbose=True)
else:
upload = utils.sh(" ".join(args), verbose=True)
for f in maps:
base = utils.get_basename(f)
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.add_deploy_data("core", "Closure maps " + license, file, dest + "/" + file)
utils.set_summary("closure maps " + license + " deploy", ret)
utils.set_cwd(common.workspace_dir)
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.add_deploy_data(key)
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
return
def deploy_closuremaps_webapps(license):
if not common.deploy: return
utils.log_h1("WEB-APPS CLOSURE MAPS")
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("web-apps closure maps %s deploy" % license, False)
return
utils.log_h2("web-apps closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.add_deploy_data(key)
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
return

View File

@ -2,10 +2,10 @@
# -*- coding: utf-8 -*-
import os
import re
import package_utils as utils
import package_common as common
import package_branding as branding
import config
def make():
utils.log_h1("DESKTOP")
@ -19,33 +19,16 @@ def make():
utils.log("Unsupported host OS")
return
def aws_s3_upload(files, key, ptype=None):
if not files:
return False
def s3_upload(files, dst):
if not files: return False
ret = True
key = "desktop/" + key
for file in files:
if not utils.is_file(file):
utils.log_err("file not exist: " + file)
ret &= False
continue
args = ["aws"]
if hasattr(branding, "s3_endpoint_url"):
args += ["--endpoint-url=" + branding.s3_endpoint_url]
args += [
"s3", "cp", "--no-progress", "--acl", "public-read",
"--metadata", "md5=" + utils.get_md5(file),
file, "s3://" + branding.s3_bucket + "/" + key
]
if common.os_family == "windows":
upload = utils.cmd(*args, verbose=True)
else:
upload = utils.sh(" ".join(args), verbose=True)
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.add_deploy_data(key)
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
if upload and ptype is not None:
full_key = key
if full_key.endswith("/"): full_key += utils.get_basename(file)
utils.add_deploy_data("desktop", ptype, file, full_key)
return ret
#
@ -53,302 +36,155 @@ def aws_s3_upload(files, key, ptype=None):
#
def make_windows():
global package_version, arch_list, source_dir, desktop_dir, viewer_dir, \
inno_file, inno_help_file, inno_sa_file, inno_update_file, advinst_file, zip_file
global package_name, package_version, arch, xp, suffix
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
package_name = branding.desktop_package_name
package_version = common.version + "." + common.build
arch_list = {
arch = {
"windows_x64": "x64",
"windows_x64_xp": "x64",
"windows_x86": "x86",
"windows_x86_xp": "x86"
}
suffix = arch_list[common.platform]
if common.platform.endswith("_xp"): suffix += "-xp"
zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix)
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
inno_help_file = "%s-Help-%s-%s.exe" % (package_name, package_version, suffix)
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
inno_update_file = "update\\editors_update_%s.exe" % suffix.replace("-","_")
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
}[common.platform]
xp = common.platform.endswith("_xp")
suffix = arch + ("-xp" if xp else "")
if common.clean:
utils.log_h2("desktop clean")
utils.delete_dir("build")
# utils.delete_dir("data\\vcredist")
utils.delete_dir("DesktopEditors-cache")
utils.delete_files("*.exe")
utils.delete_files("*.msi")
utils.delete_files("*.aic")
utils.delete_files("*.tmp")
utils.delete_files("*.zip")
utils.delete_files("update\\*.exe")
utils.delete_files("update\\*.xml")
utils.delete_files("update\\*.html")
utils.log_h2("copy arifacts")
source_dir = "%s\\build_tools\\out\\%s\\%s" \
% (common.workspace_dir, common.prefix, branding.company_name)
utils.create_dir("build")
desktop_dir = "build\\" + branding.desktop_product_name_s
utils.copy_dir(source_dir + "\\" + branding.desktop_product_name_s, desktop_dir)
if not branding.onlyoffice:
viewer_dir = "build\\" + branding.viewer_product_name_s
utils.copy_dir(source_dir + "\\" + branding.viewer_product_name_s, viewer_dir)
utils.delete_files("data\\*.exe")
make_prepare()
make_zip()
vcdl = True
vcdl &= download_vcredist("2013")
vcdl &= download_vcredist("2022")
if not vcdl:
utils.set_summary("desktop inno build", False)
utils.set_summary("desktop inno standalone build", False)
utils.set_summary("desktop inno update build", False)
utils.set_summary("desktop advinst build", False)
utils.set_cwd(common.workspace_dir)
return
make_inno()
if common.platform == "windows_x64":
make_update_files()
if common.platform in ["windows_x64", "windows_x86"]:
make_advinst()
make_advinst()
utils.set_cwd(common.workspace_dir)
return
def make_zip():
utils.log_h2("desktop zip build")
args = ["-DesktopPath", desktop_dir, "-OutFile", zip_file]
def make_prepare():
args = [
"-Version", package_version,
"-Arch", arch
]
if xp:
args += ["-Target", "xp"]
if common.sign:
args += ["-Sign", "-CertName", branding.cert_name]
if branding.onlyoffice and not common.platform.endswith("_xp"):
args += ["-ExcludeHelp"]
ret = utils.ps1(
"make_zip.ps1", args, creates=zip_file, verbose=True
)
args += ["-Sign"]
utils.log_h2("desktop prepare")
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("desktop prepare", ret)
return
def make_zip():
zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
if xp:
args += ["-Target", "xp"]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("desktop zip build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("desktop zip build", ret)
if common.deploy and ret:
utils.log_h2("desktop zip deploy")
ret = aws_s3_upload([zip_file], "win/generic/", "Portable")
ret = s3_upload([zip_file], "desktop/win/generic/")
utils.set_summary("desktop zip deploy", ret)
return
def download_vcredist(year):
utils.log_h2("vcredist " + year + " download")
arch = arch_list[common.platform]
link = common.vcredist_links[year][arch]["url"]
md5 = common.vcredist_links[year][arch]["md5"]
vcredist_file = "data\\vcredist\\vcredist_%s_%s.exe" % (year, arch)
utils.log_h2(vcredist_file)
utils.create_dir(utils.get_dirname(vcredist_file))
ret = utils.download_file(link, vcredist_file, md5, verbose=True)
utils.set_summary("vcredist " + year + " download", ret)
return ret
def make_inno():
utils.log_h2("desktop inno build")
inno_arch_list = {
"windows_x64": "64",
"windows_x86": "32",
"windows_x64_xp": "64",
"windows_x86_xp": "32"
}
iscc_args = [
"/Qp",
"/DVERSION=" + package_version,
"/DsAppVersion=" + package_version,
"/DDEPLOY_PATH=" + desktop_dir,
"/DARCH=" + arch_list[common.platform],
"/D_ARCH=" + inno_arch_list[common.platform],
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
inno_update_file = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
update_wrapper = not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper)
args = [
"-Version", package_version,
"-Arch", arch
]
if branding.onlyoffice:
iscc_args.append("/D_ONLYOFFICE=1")
else:
iscc_args.append("/DsBrandingFolder=" + \
utils.get_abspath(common.workspace_dir + "\\" + common.branding + "\\desktop-apps"))
if common.platform.endswith("_xp"):
iscc_args.append("/D_WIN_XP=1")
if common.sign:
iscc_args.append("/DENABLE_SIGNING=1")
iscc_args.append("/Sbyparam=signtool.exe sign /a /v /n $q" + \
branding.cert_name + "$q /t " + common.tsa_server + " $f")
args = ["iscc"] + iscc_args + ["common.iss"]
ret = utils.cmd(*args, creates=inno_file, verbose=True)
args += ["-Sign"]
utils.log_h2("desktop inno build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp"], verbose=True)
else:
ret = utils.ps1("make_inno.ps1", args, verbose=True)
utils.set_summary("desktop inno build", ret)
if branding.onlyoffice and not common.platform.endswith("_xp"):
args = ["iscc"] + iscc_args + ["help.iss"]
ret = utils.cmd(*args, creates=inno_help_file, verbose=True)
utils.set_summary("desktop inno help build", ret)
args = ["iscc"] + iscc_args + ["/DEMBED_HELP", "/DsPackageEdition=Standalone", "common.iss"]
ret = utils.cmd(*args, creates=inno_sa_file, verbose=True)
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone")
ret = utils.ps1("make_inno.ps1", args + ["-Target", "standalone"], verbose=True)
utils.set_summary("desktop inno standalone build", ret)
if not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper):
args = ["iscc"] + iscc_args + ["/DTARGET_NAME=" + inno_file, "update_common.iss"]
ret = utils.cmd(*args, creates=inno_update_file, verbose=True)
if update_wrapper:
utils.log_h2("desktop inno update build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp_update"], verbose=True)
else:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "update"], verbose=True)
utils.set_summary("desktop inno update build", ret)
if common.deploy:
utils.log_h2("desktop inno deploy")
ret = aws_s3_upload([inno_file], "win/inno/","Installer")
ret = s3_upload([inno_file], "desktop/win/inno/")
utils.set_summary("desktop inno deploy", ret)
if branding.onlyoffice and not common.platform.endswith("_xp"):
utils.log_h2("desktop inno help deploy")
ret = aws_s3_upload([inno_help_file], "win/inno/","Installer")
utils.set_summary("desktop inno help deploy", ret)
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone deploy")
ret = aws_s3_upload([inno_sa_file], "win/inno/","Installer")
ret = s3_upload([inno_sa_file], "desktop/win/inno/")
utils.set_summary("desktop inno standalone deploy", ret)
utils.log_h2("desktop inno update deploy")
if utils.is_file(inno_update_file):
ret = aws_s3_upload(
[inno_update_file],
"win/inno/%s/%s/" % (common.version, common.build),
"Update"
)
ret = s3_upload([inno_update_file], "desktop/win/inno/")
elif utils.is_file(inno_file):
ret = aws_s3_upload(
[inno_file],
"win/inno/%s/%s/%s" % (common.version, common.build, utils.get_basename(inno_update_file)),
"Update"
)
ret = s3_upload([inno_file], "desktop/win/inno/" + inno_update_file)
else:
ret = False
utils.set_summary("desktop inno update deploy", ret)
return
def make_update_files():
utils.log_h2("desktop update files build")
changes_dir = common.workspace_dir + "\\" + utils.get_path(branding.desktop_changes_dir) + "\\" + common.version
if common.deploy:
utils.log_h2("desktop update files deploy")
ret = aws_s3_upload(
utils.glob_path(changes_dir + "\\*.html"),
"win/update/%s/%s/" % (common.version, common.build),
"Update"
)
utils.set_summary("desktop update files deploy", ret)
changes_dir = common.workspace_dir + "\\" \
+ utils.get_path(branding.desktop_changes_dir) + "\\" + common.version
if common.platform == "windows_x64" and \
common.deploy and \
utils.glob_path(changes_dir + "\\*.html"):
utils.log_h2("desktop changelog deploy")
ret = s3_upload(
utils.glob_path(changes_dir + "\\*.html"),
"desktop/win/update/%s/%s/" % (common.version, common.build))
utils.set_summary("desktop changelog deploy", ret)
return
def make_advinst():
utils.log_h2("desktop advinst build")
msi_build = {
"windows_x64": "MsiBuild64",
"windows_x86": "MsiBuild32"
}[common.platform]
branding_dir = "."
if not branding.onlyoffice:
branding_dir = common.workspace_dir + "\\" + common.branding + "\\desktop-apps\\win-linux\\package\\windows"
multimedia_dir = common.workspace_dir + "\\" + common.branding + "\\multimedia"
utils.copy_file(branding_dir + "\\dictionary.ail", "dictionary.ail")
utils.copy_dir_content(branding_dir + "\\data", "data", ".bmp")
utils.copy_dir_content(branding_dir + "\\data", "data", ".png")
utils.copy_dir_content(
branding_dir + "\\..\\..\\extras\\projicons\\res",
"..\\..\\extras\\projicons\\res",
".ico")
utils.copy_file(
branding_dir + "\\..\\..\\..\\common\\package\\license\\eula_" + common.branding + ".rtf",
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
utils.copy_file(
multimedia_dir + "\\imageviewer\\icons\\ico\\" + common.branding + ".ico",
"..\\..\\extras\\projicons\\res\\gallery.ico")
utils.copy_file(
multimedia_dir + "\\videoplayer\\icons\\" + common.branding + ".ico",
"..\\..\\extras\\projicons\\res\\media.ico")
utils.copy_file(
branding_dir + "\\data\\VisualElementsManifest.xml",
desktop_dir + "\\DesktopEditors.VisualElementsManifest.xml")
utils.create_dir(desktop_dir + "\\browser")
utils.copy_dir_content(
branding_dir + "\\data",
desktop_dir + "\\browser",
"visual_elements_icon")
utils.write_file(desktop_dir + "\\converter\\package.config", "package=msi")
aic_content = [";aic"]
if not common.sign:
aic_content += [
"ResetSig"
]
if branding.onlyoffice:
for path in utils.glob_path(desktop_dir + "\\editors\\web-apps\\apps\\*\\main\\resources\\help"):
utils.delete_dir(path)
aic_content += [
"DelFolder CUSTOM_PATH"
]
else:
aic_content += [
"SetProperty UpgradeCode=\"" + branding.desktop_upgrade_code + "\"",
"AddUpgradeCode {47EEF706-B0E4-4C43-944B-E5F914B92B79} \
-min_ver 7.1.1 -include_min_ver \
-max_ver 7.2.2 -include_max_ver \
-include_lang 1049 \
-property_name UPGRADE_2 -enable_migrate",
"DelLanguage 1029 -buildname " + msi_build,
"DelLanguage 1031 -buildname " + msi_build,
"DelLanguage 1041 -buildname " + msi_build,
"DelLanguage 1046 -buildname " + msi_build,
"DelLanguage 2070 -buildname " + msi_build,
"DelLanguage 1060 -buildname " + msi_build,
"DelLanguage 1036 -buildname " + msi_build,
"DelLanguage 3082 -buildname " + msi_build,
"DelLanguage 1033 -buildname " + msi_build,
"SetCurrentFeature ExtendedFeature",
"NewSync CUSTOM_PATH " + viewer_dir,
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + viewer_dir + "\\ImageViewer.exe",
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + viewer_dir + "\\VideoPlayer.exe",
"SetProperty ProductName=\"" + branding.desktop_product_name_full + "\"",
"SetProperty ASCC_REG_PREFIX=" + branding.ascc_reg_prefix
]
if common.platform == "windows_x86":
aic_content += [
"SetComponentAttribute -feature_name ExtendedFeature -unset -64bit_component"
]
if common.platform == "windows_x86":
aic_content += [
"SetComponentAttribute -feature_name MainFeature -unset -64bit_component",
"SetComponentAttribute -feature_name FileProgramAssociation -unset -64bit_component"
]
aic_content += [
"SetCurrentFeature MainFeature",
"NewSync APPDIR " + desktop_dir,
"UpdateFile APPDIR\\DesktopEditors.exe " + desktop_dir + "\\DesktopEditors.exe",
"UpdateFile APPDIR\\updatesvc.exe " + desktop_dir + "\\updatesvc.exe",
"SetVersion " + package_version,
"SetPackageName " + advinst_file + " -buildname " + msi_build,
"Rebuild -buildslist " + msi_build
if not common.platform in ["windows_x64", "windows_x86"]:
return
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
utils.write_file("DesktopEditors.aic", "\r\n".join(aic_content), "utf-8-sig")
ret = utils.cmd("AdvancedInstaller.com", "/execute", \
"DesktopEditors.aip", "DesktopEditors.aic", verbose=True)
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop advinst build")
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
utils.set_summary("desktop advinst build", ret)
if common.deploy and ret:
utils.log_h2("desktop advinst deploy")
ret = aws_s3_upload([advinst_file], "win/advinst/", "Installer")
ret = s3_upload([advinst_file], "desktop/win/advinst/")
utils.set_summary("desktop advinst deploy", ret)
return
@ -358,7 +194,7 @@ def make_advinst():
def make_macos():
global package_name, build_dir, branding_dir, updates_dir, changes_dir, \
suffix, lane, scheme, app_version
suffix, lane, scheme, released_updates_dir
package_name = branding.desktop_package_name
build_dir = branding.desktop_build_dir
branding_dir = branding.desktop_branding_dir
@ -371,6 +207,7 @@ def make_macos():
}[common.platform]
lane = "release_" + suffix
scheme = package_name + "-" + suffix
sparkle_updates = False
utils.set_cwd(branding_dir)
@ -379,6 +216,7 @@ def make_macos():
utils.delete_dir(utils.get_env("HOME") + "/Library/Developer/Xcode/Archives")
utils.delete_dir(utils.get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
utils.log_h2("build")
source_dir = "%s/build_tools/out/%s/%s" \
% (common.workspace_dir, common.prefix, branding.company_name)
if branding.onlyoffice:
@ -386,32 +224,40 @@ def make_macos():
+ "/desktopeditors/editors/web-apps/apps/*/main/resources/help"):
utils.delete_dir(path)
appcast_url = branding.sparkle_base_url + "/" + suffix + "/" + branding.desktop_package_name.lower() + ".xml"
release_bundle_version_string = utils.sh_output(
'curl -Ls ' + appcast_url + ' 2> /dev/null' \
+ ' | xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:shortVersionString\']/text()" -',
verbose=True).rstrip()
release_bundle_version = utils.sh_output(
'curl -Ls ' + appcast_url + ' 2> /dev/null' \
+ ' | xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:version\']/text()" -',
verbose=True).rstrip()
if utils.get_env("ARCHIVES_DIR"):
sparkle_updates = True
released_updates_dir = "%s/%s/_updates" % (utils.get_env("ARCHIVES_DIR"), scheme)
plistbuddy = "/usr/libexec/PlistBuddy"
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
% (common.workspace_dir, branding_dir, package_name, suffix)
app_version = common.version
bundle_version = str(int(release_bundle_version) + 1)
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
% (common.workspace_dir, branding.desktop_branding_dir, branding.desktop_package_name, suffix)
utils.sh('/usr/libexec/PlistBuddy -c "Set :CFBundleShortVersionString %s" %s' \
% (common.version, plist_path), verbose=True)
utils.sh('/usr/libexec/PlistBuddy -c "Set :CFBundleVersion %s" %s' \
% (bundle_version, plist_path), verbose=True)
utils.sh('/usr/libexec/PlistBuddy -c "Add :ASCWebappsHelpUrl string %s" %s' \
% ("https://download.onlyoffice.com/install/desktop/editors/help/v" + app_version + "/apps", plist_path), verbose=True)
appcast = utils.sh_output('%s -c "Print :SUFeedURL" %s' \
% (plistbuddy, plist_path), verbose=True).rstrip()
appcast = released_updates_dir + "/" + appcast[appcast.rfind("/")+1:]
utils.log("RELEASE=" + release_bundle_version_string + "(" + release_bundle_version + ")" \
+ "\nCURRENT=" + common.version + "(" + bundle_version + ")")
release_version_string = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:shortVersionString\']/text()" ' + appcast,
verbose=True).rstrip()
release_version = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:version\']/text()" ' + appcast,
verbose=True).rstrip()
bundle_version = str(int(release_version) + 1)
help_url = "https://download.onlyoffice.com/install/desktop/editors/help/v" + common.version + "/apps"
utils.sh('%s -c "Set :CFBundleShortVersionString %s" %s' \
% (plistbuddy, common.version, plist_path), verbose=True)
utils.sh('%s -c "Set :CFBundleVersion %s" %s' \
% (plistbuddy, bundle_version, plist_path), verbose=True)
utils.sh('%s -c "Set :ASCBundleBuildNumber %s" %s' \
% (plistbuddy, common.build, plist_path), verbose=True)
utils.sh('%s -c "Add :ASCWebappsHelpUrl string %s" %s' \
% (plistbuddy, help_url, plist_path), verbose=True)
utils.log("RELEASE=" + release_version_string + "(" + release_version + ")" \
+ "\nCURRENT=" + common.version + "(" + bundle_version + ")")
dmg = make_dmg()
if dmg:
if dmg and sparkle_updates:
make_sparkle_updates()
utils.set_cwd(common.workspace_dir)
@ -429,33 +275,28 @@ def make_dmg():
if common.deploy and dmg:
utils.log_h2("desktop dmg deploy")
ret = aws_s3_upload(
utils.glob_path("build/*.dmg"),
"mac/%s/%s/%s/" % (common.version, common.build, suffix),
"Disk Image"
)
ret = s3_upload(
utils.glob_path("build/*.dmg"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop dmg deploy", ret)
utils.log_h2("desktop zip deploy")
ret = aws_s3_upload(
["build/%s-%s.zip" % (scheme, app_version)],
"mac/%s/%s/%s/" % (common.version, common.build, suffix),
"Archive"
)
ret = s3_upload(
["build/%s-%s.zip" % (scheme, common.version)],
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop zip deploy", ret)
return dmg
def make_sparkle_updates():
utils.log_h2("desktop sparkle files build")
zip_filename = scheme + '-' + app_version
zip_filename = scheme + '-' + common.version
macos_zip = "build/" + zip_filename + ".zip"
updates_storage_dir = "%s/%s/_updates" % (utils.get_env('ARCHIVES_DIR'), scheme)
utils.create_dir(updates_dir)
utils.copy_file(macos_zip, updates_dir)
utils.copy_dir_content(updates_storage_dir, updates_dir, ".zip")
utils.copy_dir_content(released_updates_dir, updates_dir, ".zip")
for file in utils.glob_path(changes_dir + "/" + app_version + "/*.html"):
for file in utils.glob_path(changes_dir + "/" + common.version + "/*.html"):
filename = utils.get_basename(file).replace("changes", zip_filename)
utils.copy_file(file, updates_dir + "/" + filename)
@ -470,48 +311,14 @@ def make_sparkle_updates():
)
utils.set_summary("desktop sparkle files build", ret)
# utils.log_h3("edit sparkle appcast links")
# appcast_url = branding.sparkle_base_url + "/" + suffix
# appcast = "%s/%s.xml" % (updates_dir, package_name.lower())
# for lang, base in update_changes_list.items():
# if base == "ReleaseNotes":
# utils.replace_in_file(appcast,
# r'(<sparkle:releaseNotesLink>.+/).+(\.html</sparkle:releaseNotesLink>)',
# "\\1" + base + "\\2")
# else:
# utils.replace_in_file(appcast,
# r'(<sparkle:releaseNotesLink xml:lang="' + lang + r'">).+(\.html</sparkle:releaseNotesLink>)',
# "\\1" + base + "\\2")
utils.log("")
utils.log_h3("generate checksums")
utils.sh(
"md5 *.zip *.delta > md5sums.txt",
chdir="build/update", verbose=True
)
utils.sh(
"shasum -a 256 *.zip *.delta > sha256sums.txt",
chdir="build/update", verbose=True
)
if common.deploy:
utils.log_h2("desktop sparkle files deploy")
ret = aws_s3_upload(
utils.glob_path("build/update/*.delta") \
+ utils.glob_path("build/update/*.xml") \
+ utils.glob_path("build/update/*.html"),
"mac/%s/%s/%s/" % (common.version, common.build, suffix),
"Sparkle"
)
ret = s3_upload(
utils.glob_path("build/update/*.delta") \
+ utils.glob_path("build/update/*.xml") \
+ utils.glob_path("build/update/*.html"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop sparkle files deploy", ret)
utils.log_h2("desktop checksums deploy")
ret = aws_s3_upload(
utils.glob_path("build/update/*.txt"),
"mac/%s/%s/%s/" % (common.version, common.build, suffix),
"Checksums"
)
utils.set_summary("desktop checksums deploy", ret)
return
#
@ -522,7 +329,7 @@ def make_linux():
utils.set_cwd("desktop-apps/win-linux/package/linux")
utils.log_h2("desktop build")
make_args = branding.desktop_make_targets
make_args = [t["make"] for t in branding.desktop_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -530,80 +337,11 @@ def make_linux():
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("desktop build", ret)
rpm_arch = "x86_64"
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
if common.deploy:
utils.log_h2("desktop deploy")
if ret:
utils.log_h2("desktop tar deploy")
if "tar" in branding.desktop_make_targets:
ret = aws_s3_upload(
utils.glob_path("tar/*.tar*"),
"linux/generic/", "Portable"
)
utils.set_summary("desktop tar deploy", ret)
if "deb" in branding.desktop_make_targets:
utils.log_h2("desktop deb deploy")
ret = aws_s3_upload(
utils.glob_path("deb/*.deb"),
"linux/debian/", "Debian"
)
utils.set_summary("desktop deb deploy", ret)
if "deb-astra" in branding.desktop_make_targets:
utils.log_h2("desktop deb-astra deploy")
ret = aws_s3_upload(
utils.glob_path("deb-astra/*.deb"),
"linux/astra/", "Astra Linux Special Edition"
)
utils.set_summary("desktop deb-astra deploy", ret)
if "rpm" in branding.desktop_make_targets:
utils.log_h2("desktop rpm deploy")
ret = aws_s3_upload(
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
+ utils.glob_path("rpm/builddir/RPMS/noarch/*.rpm"),
"linux/rhel/", "CentOS"
)
utils.set_summary("desktop rpm deploy", ret)
if "suse-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop suse-rpm deploy")
ret = aws_s3_upload(
utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
+ utils.glob_path("suse-rpm/builddir/RPMS/noarch/*.rpm"),
"linux/suse/", "SUSE Linux"
)
utils.set_summary("desktop suse-rpm deploy", ret)
if "apt-rpm" in branding.desktop_make_targets:
utils.log_h2("desktop apt-rpm deploy")
ret = aws_s3_upload(
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
+ utils.glob_path("apt-rpm/builddir/RPMS/noarch/*.rpm"),
"linux/altlinux/", "ALT Linux"
)
utils.set_summary("desktop apt-rpm deploy", ret)
if "urpmi" in branding.desktop_make_targets:
utils.log_h2("desktop urpmi deploy")
ret = aws_s3_upload(
utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm") \
+ utils.glob_path("urpmi/builddir/RPMS/noarch/*.rpm"),
"linux/rosa/", "ROSA"
)
utils.set_summary("desktop urpmi deploy", ret)
else:
if "tar" in branding.desktop_make_targets:
utils.set_summary("desktop tar deploy", False)
if "deb" in branding.desktop_make_targets:
utils.set_summary("desktop deb deploy", False)
if "deb-astra" in branding.desktop_make_targets:
utils.set_summary("desktop deb-astra deploy", False)
if "rpm" in branding.desktop_make_targets:
utils.set_summary("desktop rpm deploy", False)
if "suse-rpm" in branding.desktop_make_targets:
utils.set_summary("desktop suse-rpm deploy", False)
if "apt-rpm" in branding.desktop_make_targets:
utils.set_summary("desktop apt-rpm deploy", False)
if "urpmi" in branding.desktop_make_targets:
utils.set_summary("desktop urpmi deploy", False)
for t in branding.desktop_make_targets:
utils.log_h2("desktop " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("desktop " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -15,31 +15,24 @@ def make():
def make_mobile():
utils.set_cwd("build_tools/out")
zip_file = "build-" + common.version + "-" + common.build + ".zip"
if common.clean:
utils.log_h2("mobile clean")
utils.sh("rm -rfv *.zip", verbose=True)
zip_file = "build-" + common.version + "-" + common.build + ".zip"
s3_key = "mobile/android/" + zip_file
utils.log_h2("mobile build")
ret = utils.sh("zip -r " + zip_file + " ./android* ./js", verbose=True)
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
utils.set_summary("mobile build", ret)
if common.deploy:
utils.log_h2("mobile deploy")
if not utils.is_file(zip_file):
utils.log_err("file not exist: " + zip_file)
ret = False
elif ret:
ret = utils.sh(
"aws s3 cp --acl public-read --no-progress " \
+ "--metadata md5=" + utils.get_md5(zip_file) + " " \
+ zip_file + " s3://" + branding.s3_bucket + "/" + s3_key,
verbose=True
)
if ret:
utils.add_deploy_data("mobile", "Android", zip_file, s3_key)
utils.log_h2("mobile deploy")
key = "mobile/android/" + zip_file
ret = utils.s3_upload(zip_file, "s3://" + branding.s3_bucket + "/" + key)
if ret:
utils.add_deploy_data(key)
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("mobile deploy", ret)
utils.set_cwd(common.workspace_dir)

View File

@ -14,33 +14,16 @@ def make(edition):
utils.log("Unsupported host OS")
return
def aws_s3_upload(files, key, edition, ptype=None):
if not files:
return False
def s3_upload(files, dst):
if not files: return False
ret = True
key = "server/" + key
for file in files:
if not utils.is_file(file):
utils.log_err("file not exist: " + file)
ret &= False
continue
args = ["aws"]
if hasattr(branding, "s3_endpoint_url"):
args += ["--endpoint-url=" + branding.s3_endpoint_url]
args += [
"s3", "cp", "--no-progress", "--acl", "public-read",
"--metadata", "md5=" + utils.get_md5(file),
file, "s3://" + branding.s3_bucket + "/" + key
]
if common.os_family == "windows":
upload = utils.cmd(*args, verbose=True)
else:
upload = utils.sh(" ".join(args), verbose=True)
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.add_deploy_data(key)
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
if upload and ptype is not None:
full_key = key
if full_key.endswith("/"): full_key += utils.get_basename(file)
utils.add_deploy_data("server_" + edition, ptype, file, full_key)
return ret
def make_windows(edition):
@ -62,10 +45,7 @@ def make_windows(edition):
if common.deploy and ret:
utils.log_h2("server " + edition + " inno deploy")
ret = aws_s3_upload(
utils.glob_path("exe/*.exe"),
"win/inno/", edition, "Installer"
)
ret = s3_upload(utils.glob_path("exe/*.exe"), "server/win/inno/")
utils.set_summary("server " + edition + " inno deploy", ret)
utils.set_cwd(common.workspace_dir)
@ -81,7 +61,8 @@ def make_linux(edition):
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
make_args = [t["make"] for t in branding.server_make_targets]
make_args += ["-e", "PRODUCT_NAME=" + product_name]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
@ -89,49 +70,11 @@ def make_linux(edition):
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("server " + edition + " build", ret)
rpm_arch = "x86_64"
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
if common.deploy:
utils.log_h2("server " + edition + " deploy")
if ret:
if "deb" in branding.server_make_targets:
utils.log_h2("server " + edition + " deb deploy")
ret = aws_s3_upload(
utils.glob_path("deb/*.deb"),
"linux/debian/", edition, "Debian"
)
utils.set_summary("server " + edition + " deb deploy", ret)
if "rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " rpm deploy")
ret = aws_s3_upload(
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"linux/rhel/", edition, "CentOS"
)
utils.set_summary("server " + edition + " rpm deploy", ret)
if "apt-rpm" in branding.server_make_targets:
utils.log_h2("server " + edition + " apt-rpm deploy")
ret = aws_s3_upload(
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
"linux/altlinux/", edition, "ALT Linux"
)
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
if "tar" in branding.server_make_targets:
utils.log_h2("server " + edition + " snap deploy")
ret = aws_s3_upload(
utils.glob_path("*.tar.gz"),
"linux/generic/", edition, "Snap"
)
utils.set_summary("server " + edition + " snap deploy", ret)
else:
if "deb" in branding.server_make_targets:
utils.set_summary("server " + edition + " deb deploy", False)
if "rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " rpm deploy", False)
if "apt-rpm" in branding.server_make_targets:
utils.set_summary("server " + edition + " apt-rpm deploy", False)
if "tar" in branding.server_make_targets:
utils.set_summary("server " + edition + " snap deploy", False)
for t in branding.server_make_targets:
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -4,7 +4,6 @@
import codecs
import glob
import hashlib
import json
import os
import platform
import re
@ -73,6 +72,9 @@ def get_path(path):
return path.replace("/", "\\")
return path
def get_relpath(path, rel_path):
return os.path.relpath(get_path(path), get_path(rel_path))
def get_abspath(path):
return os.path.abspath(get_path(path))
@ -107,11 +109,25 @@ def glob_file(path):
return glob.glob(path)[0]
return
def get_md5(path):
def get_hash_sha256(path):
if os.path.exists(path):
md5_hash = hashlib.md5()
md5_hash.update(open(path, "rb").read())
return md5_hash.hexdigest()
h = hashlib.sha256()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_sha1(path):
if os.path.exists(path):
h = hashlib.sha1()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_md5(path):
if os.path.exists(path):
h = hashlib.md5()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def create_dir(path, verbose=True):
@ -185,18 +201,12 @@ def copy_files(src, dst, override=True, verbose=True):
copy_files(file + "/*", dst + "/" + file_name, override)
return
def copy_dir(src, dst, override=True, verbose=True):
def copy_dir(src, dst, verbose=True):
if verbose:
log("- copy_dir:")
log(" src: " + src)
log(" dst: " + dst)
log(" override: " + str(override))
if is_dir(dst):
delete_dir(dst)
try:
shutil.copytree(get_path(src), get_path(dst))
except OSError as e:
log_err('directory not copied. Error: %s' % e)
shutil.copytree(src, dst)
return
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
@ -206,20 +216,18 @@ def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose
log(" dst: " + dst)
log(" include: " + filter_include)
log(" exclude: " + filter_exclude)
src_folder = src
if ("/" != src[-1:]):
src_folder += "/"
src_folder += "*"
for file in glob.glob(src_folder):
basename = os.path.basename(file)
if ("" != filter_include) and (-1 == basename.find(filter_include)):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if ("" != filter_include) and (-1 == item.find(filter_include)):
continue
if ("" != filter_exclude) and (-1 != basename.find(filter_exclude)):
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
continue
if is_file(file):
copy_file(file, dst, verbose=False)
elif is_dir(file):
copy_dir(file, dst + "/" + basename, verbose=False)
if os.path.isdir(s):
shutil.copytree(s, d)
else:
shutil.copy2(s, d)
log(item)
return
def delete_file(path, verbose=True):
@ -258,18 +266,9 @@ def set_summary(target, status):
common.summary.append({target: status})
return
def add_deploy_data(product, ptype, src, dst):
common.deploy_data.append({
"platform": common.platformTitles[common.platform],
"product": product,
"type": ptype,
# "local": get_path(src),
"size": get_file_size(get_path(src)),
"key": dst
})
file = open(get_path(common.workspace_dir + "/deploy.json"), 'w')
file.write(json.dumps(common.deploy_data, sort_keys=True, indent=4))
file.close()
def add_deploy_data(key):
with open(common.deploy_data, 'a+') as f:
f.write(key + "\n")
return
def cmd(*args, **kwargs):
@ -285,12 +284,12 @@ def cmd(*args, **kwargs):
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"])
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
[i for i in args], stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd)
set_cwd(oldcwd, verbose=False)
return ret
def cmd_output(*args, **kwargs):
@ -323,33 +322,11 @@ def ps1(file, args=[], **kwargs):
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return True
ret = subprocess.call(
["powershell", "-File", file] + args, stderr=subprocess.STDOUT, shell=True
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def download_file(url, path, md5, verbose=False):
if verbose:
log("- download_file:")
log(" url: " + path)
log(" path: " + url)
log(" md5: " + md5)
if is_file(path):
if get_md5(path) == md5:
log_err("file already exist (match checksum)")
return True
else:
log_err("wrong checksum (%s), delete" % md5)
os.remove(path)
ret = powershell(
"(New-Object System.Net.WebClient).DownloadFile('%s','%s')" % (url, path),
verbose=True
)
md5_new = get_md5(path)
if md5 != md5_new:
log_err("checksum didn't match (%s != %s)" % (md5, md5_new))
return False
return ret
def sh(command, **kwargs):
if kwargs.get("verbose"):
log("- sh:")
@ -363,18 +340,63 @@ def sh(command, **kwargs):
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"])
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
command, stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd)
set_cwd(oldcwd, verbose=False)
return ret
def sh_output(command, **kwargs):
if kwargs.get("verbose"):
log("- sh_output:")
log(" command: " + command)
return subprocess.check_output(
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.check_output(
command, stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
log(ret)
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def s3_upload(src, dst, **kwargs):
if not is_file(src):
log_err("file not exist: " + src)
return False
metadata = "sha256=" + get_hash_sha256(src) \
+ ",sha1=" + get_hash_sha1(src) \
+ ",md5=" + get_hash_md5(src)
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
args += ["--metadata", metadata, src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
else:
ret = sh(" ".join(args), verbose=True)
return ret
def s3_sync(src, dst, **kwargs):
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "sync", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
if kwargs.get("delete") and kwargs["delete"]:
args += ["--delete"]
args += [src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
else:
ret = sh(" ".join(args), verbose=True)
return ret

147
scripts/qmake.py Normal file
View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
import os
import sys
__dir__name__ = os.path.dirname(__file__)
sys.path.append(__dir__name__ + '/core_common/modules/android')
import base
import config
import android_ndk
import multiprocessing
def get_make_file_suffix(platform):
suffix = platform
if config.check_option("config", "debug"):
suffix += "_debug_"
suffix += config.option("branding")
return suffix
def get_j_num():
if ("0" != config.option("multiprocess")):
return ["-j" + str(multiprocessing.cpu_count())]
return []
def check_support_platform(platform):
qt_dir = base.qt_setup(platform)
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
return False
return True
def make(platform, project, qmake_config_addon=""):
# check platform
if not check_support_platform(platform):
print("THIS PLATFORM IS NOT SUPPORTED")
return
old_env = dict(os.environ)
# qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# pro & makefile
file_pro = os.path.abspath(project)
pro_dir = os.path.dirname(file_pro)
if (pro_dir.endswith("/.")):
pro_dir = pro_dir[:-2]
if (pro_dir.endswith("/")):
pro_dir = pro_dir[:-1]
makefile_name = "Makefile." + get_make_file_suffix(platform)
makefile = pro_dir + "/" + makefile_name
stash_file = pro_dir + "/.qmake.stash"
old_cur = os.getcwd()
os.chdir(pro_dir)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
if (base.is_file(makefile)):
base.delete_file(makefile)
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
# setup android env
if (-1 != platform.find("android")):
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
# setup ios env
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
if base.is_file(makefile):
base.delete_file(makefile)
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
clean_params = ["clean", "-f", makefile]
distclean_params = ["distclean", "-f", makefile]
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
qmake_app = qt_dir + "/bin/qmake"
# non windows platform
if not base.is_windows():
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd("make", clean_params, True)
base.cmd_and_return_cwd("make", distclean_params, True)
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num())
else:
config_params_array = base.qt_config_as_param(config_param)
config_params_string = ""
for item in config_params_array:
config_params_string += (" \"" + item + "\"")
qmake_addon_string = " ".join(qmake_addon)
if ("" != qmake_addon_string):
qmake_addon_string = " " + qmake_addon_string
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake " + " ".join(clean_params))
qmake_bat.append("call nmake " + " ".join(distclean_params))
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefile)
base.run_as_bat(qmake_bat)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
os.chdir(old_cur)
os.environ.clear()
os.environ.update(old_env)
return
def make_all_platforms(project, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
make(platform, project, qmake_config_addon)
return

View File

@ -2,6 +2,8 @@
import os
import shutil
import re
import argparse
def readFile(path):
with open(path, "r", errors='replace') as file:
filedata = file.read()
@ -112,6 +114,8 @@ class EditorApi(object):
line = line.replace("}", "")
lineWithoutSpaces = line.replace(" ", "")
if not is_found_function and 0 == line.find("function "):
if -1 == decoration.find("@constructor"):
return
codeCorrect += (line + addon_for_func + "\n")
is_found_function = True
if not is_found_function and -1 != line.find(".prototype."):
@ -177,7 +181,7 @@ class EditorApi(object):
def generate(self):
for file in self.files:
file_content = readFile(file)
file_content = readFile(f'{sdkjs_dir}/{file}')
arrRecords = file_content.split("/**")
arrRecords = arrRecords[1:-1]
for record in arrRecords:
@ -185,8 +189,8 @@ class EditorApi(object):
self.numfile += 1
correctContent = ''.join(self.records)
correctContent += "\n"
os.mkdir('deploy/api_builder/' + self.folder)
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
os.mkdir(args.destination + self.folder)
writeFile(args.destination + self.folder + "/api.js", correctContent)
return
def convert_to_interface(arrFiles, sEditorType):
@ -195,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType):
editor.generate()
return
old_cur = os.getcwd()
os.chdir("../../../sdkjs")
if True == os.path.isdir('deploy/api_builder'):
shutil.rmtree('deploy/api_builder', ignore_errors=True)
os.mkdir('deploy/api_builder')
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)
sdkjs_dir = "../../../sdkjs"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
)
args = parser.parse_args()
old_cur = os.getcwd()
if True == os.path.isdir(args.destination):
shutil.rmtree(args.destination, ignore_errors=True)
os.mkdir(args.destination)
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)

View File

@ -0,0 +1,57 @@
# Documentation Generation Guide
This guide explains how to generate documentation for Onlyoffice API using the provided Python scripts, `generate_docs_json.py` and `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
## Prerequisites
1. **Node.js and npm**: Ensure you have Node.js and npm installed on your machine. You can download them from [Node.js official website](https://nodejs.org/).
2. **jsdoc**: The scripts use `jsdoc` to generate documentation. Install it using npm:
```bash
npm install
```
## Scripts Overview
### `generate_docs_json.py`
This script generates JSON documentation based on the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/sdkjs/deploy/api_builder/json`.
### `generate_docs_md.py`
This script generates Markdown documentation from the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `Onlyoffice/office-js-api`.
## Example
To generate JSON documentation with the default output path:
```bash
python generate_docs_json.py /path/to/save/json
```
To generate Markdown documentation and specify a custom output path:
```bash
python generate_docs_md.py /path/to/save/markdown
```
## Notes
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
- The output directories will be created if they do not exist.

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,216 @@
exports.handlers = {
processingComplete: function(e) {
// array for filtered doclets
let filteredDoclets = [];
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
const classesDocletsMap = {}; // doclets for classes write at the end
let passedClasses = []; // passed classes for current editor
// Remove dublicates doclets
const latestDoclets = {};
e.doclets.forEach(doclet => {
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
latestDoclets[doclet.longname] = doclet;
}
});
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
// check available classess for current editor
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAdd =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' &&
(!isMethod || hasTypeofEditorsTag);
if (shouldAdd) {
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
passedClasses.push(cleanName(doclet.memberof));
}
}
else if (doclet.kind == 'class') {
classesDocletsMap[cleanName(doclet.name)] = doclet;
}
}
// remove unavailave classes in current editor
passedClasses = passedClasses.filter(className => {
const doclet = classesDocletsMap[className];
if (!doclet) {
return true;
}
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
// class is passes if there is no editor tag or the current editor is among the tags
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
return isPassed;
});
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod) {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
continue;
}
// We leave only the necessary fields
doclet.memberof = cleanName(doclet.memberof);
doclet.longname = cleanName(doclet.longname);
doclet.name = cleanName(doclet.name);
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
memberof: cleanName(doclet.memberof),
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
name: doclet.name,
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: doclet.scope,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
see: doclet.see
};
// Add the filtered doclet to the array
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'class') {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.name))) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
augments: doclet.augments || undefined,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see || undefined
};
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'typedef') {
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type
};
filteredDoclets.push(filteredDoclet);
}
}
// Replace doclets with a filtered array
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,136 @@
import os
import subprocess
import json
import argparse
import re
# Configuration files
configs = [
"./config/word.json",
"./config/cell.json",
"./config/slide.json",
"./config/forms.json"
]
editors_maps = {
"word": "CDE",
"cell": "CSE",
"slide": "CPE",
"forms": "CFE"
}
def generate(output_dir, md=False):
missing_examples_file = f'{output_dir}/missing_examples.txt'
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Recreate missing_examples.txt file
with open(missing_examples_file, 'w', encoding='utf-8') as f:
f.write('')
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command = f"set EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = '../../../../' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
# Format content for doclet['example']
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
if md == False:
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```'
else:
# Record missing examples in missing_examples.txt
with open(missing_examples_file, 'a', encoding='utf-8') as missing_file:
missing_file.write(f"{file_path}\n")
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
def get_current_branch(path):
try:
# Navigate to the specified directory and get the current branch name
result = subprocess.run(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
cwd=path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
if result.returncode == 0:
return result.stdout.strip()
else:
print(f"Error: {result.stderr}")
return None
except Exception as e:
print(f"Exception: {e}")
return None
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../document-builder-declarations/document-builder" # Default value
)
args = parser.parse_args()
branch_name = get_current_branch("../../../../sdkjs")
if branch_name:
args.destination = f"{args.destination}/{branch_name}"
generate(args.destination)

View File

@ -0,0 +1,266 @@
import os
import json
import re
import shutil
import argparse
import generate_docs_json
# Configuration files
editors = [
"word",
"cell",
"slide",
"forms"
]
def load_json(file_path):
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
def write_markdown_file(file_path, content):
with open(file_path, 'w', encoding='utf-8') as md_file:
md_file.write(content)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
def correct_description(string):
if string is None:
return 'No description provided.'
# Replace opening <b> tag with **
string = re.sub(r'<b>', '**', string)
# Replace closing </b> tag with **
string = re.sub(r'</b>', '**', string)
# Note
return re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
def correct_default_value(value, enumerations, classes):
if value is None:
return ''
if value == True:
value = "true"
elif value == False:
value = "false"
else:
value = str(value)
return generate_data_types_markdown([value], enumerations, classes)
def remove_line_breaks(string):
return re.sub(r'[\r\n]', '', string)
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
param_types_md = ' &#124;'.join(types)
for enum in enumerations:
if enum['name'] in types:
param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)")
for cls in classes:
if cls in types:
param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)")
def replace_with_links(match):
element = match.group(1).strip()
base_type = element.split('.')[0] # Take only the first part before the dot, if any
if any(enum['name'] == base_type for enum in enumerations):
return f"<[{element}](../../Enumeration/{base_type}.md)>"
elif base_type in classes:
return f"<[{element}](../../{base_type}/{base_type}.md)>"
return f"<{element}>"
return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md)
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
content += generate_properties_markdown(properties, enumerations, classes, '../')
content += "## Methods\n\n"
for method in methods:
method_name = method['name']
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
return content
def generate_method_markdown(method, enumerations, classes):
method_name = method['name']
description = method.get('description', 'No description provided.')
description = correct_description(description)
params = method.get('params', [])
returns = method.get('returns', [])
example = method.get('example', '')
memberof = method.get('memberof', '')
content = f"# {method_name}\n\n{description}\n\n"
# Syntax section
param_list = ', '.join([param['name'] for param in params]) if params else ''
content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n"
if memberof:
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
content += "## Parameters\n\n"
if params:
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
for param in params:
param_name = param.get('name', 'Unnamed')
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
param_required = "Required" if not param.get('optional') else "Optional"
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
else:
content += "This method doesn't have any parameters.\n"
content += "\n## Returns\n\n"
if returns:
return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown'
# Check for enumerations and classes in return type and add links if they exist
return_type_md = generate_data_types_markdown([return_type], enumerations, classes)
content += return_type_md
else:
content += "This method doesn't return any data."
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def generate_properties_markdown(properties, enumerations, classes, root='../../'):
if (properties is None):
return ''
content = "## Properties\n\n"
content += "| Name | Type | Description |\n"
content += "| ---- | ---- | ----------- |\n"
for prop in properties:
prop_name = prop['name']
prop_description = prop.get('description', 'No description provided.')
prop_description = remove_line_breaks(correct_description(prop_description))
param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root)
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
content += "\n"
return content
def generate_enumeration_markdown(enumeration, enumerations, classes):
enum_name = enumeration['name']
description = enumeration.get('description', 'No description provided.')
description = correct_description(description)
example = enumeration.get('example', '')
content = f"# {enum_name}\n\n{description}\n\n"
if 'TypeUnion' == enumeration['type']['parsedType']['type']:
content += "## Type\n\nEnumeration\n\n"
content += "## Values\n\n"
elements = enumeration['type']['parsedType']['elements']
for element in elements:
element_name = element['name'] if element['type'] != 'NullLiteral' else 'null'
# Check if element is in enumerations or classes before adding link
if any(enum['name'] == element_name for enum in enumerations):
content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n"
elif element_name in classes:
content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n"
else:
content += f"- {element_name}\n"
elif enumeration['properties'] is not None:
content += "## Type\n\nObject\n\n"
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
else:
content += "## Type\n\n"
types = enumeration['type']['names']
for t in types:
t = generate_data_types_markdown([t], enumerations, classes)
content += t + "\n\n"
if example:
# Separate comment and code, and remove comment symbols
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
return content
def process_doclets(data, output_dir):
classes = {}
classes_props = {}
enumerations = []
for doclet in data:
if doclet['kind'] == 'class':
class_name = doclet['name']
classes[class_name] = []
classes_props[class_name] = doclet.get('properties', None)
elif doclet['kind'] == 'function':
class_name = doclet.get('memberof')
if class_name:
if class_name not in classes:
classes[class_name] = []
classes[class_name].append(doclet)
elif doclet['kind'] == 'typedef':
enumerations.append(doclet)
# Process classes
for class_name, methods in classes.items():
class_dir = os.path.join(output_dir, class_name)
methods_dir = os.path.join(class_dir, 'Methods')
os.makedirs(methods_dir, exist_ok=True)
# Write class file
class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes)
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
# Write method files
for method in methods:
method_content = generate_method_markdown(method, enumerations, classes)
write_markdown_file(os.path.join(methods_dir, f"{method['name']}.md"), method_content)
# Process enumerations
enum_dir = os.path.join(output_dir, 'Enumeration')
os.makedirs(enum_dir, exist_ok=True)
for enum in enumerations:
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
write_markdown_file(os.path.join(enum_dir, f"{enum['name']}.md"), enum_content)
def generate(output_dir):
print('Generating Markdown documentation...')
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
for editor_name in editors:
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True)
data = load_json(input_file)
process_doclets(data, output_dir + f'/{editor_name}')
shutil.rmtree(output_dir + 'tmp_json')
print('Done')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../office-js-api/" # Default value
)
args = parser.parse_args()
generate(args.destination)

View File

@ -0,0 +1,7 @@
{
"dependencies": {
"jsdoc-to-markdown": "7.1.1",
"dmd": "6.1.0",
"handlebars": "4.7.7"
}
}

172
scripts/sln.py Normal file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env python
import sys
sys.path.append('scripts')
import config
import json
import os
is_log = False
def is_exist_in_array(projects, proj):
for p in projects:
if p == proj:
return True
return False
def adjust_project_params(params):
ret_params = params
# check aliases
all_windows = []
all_windows_xp = []
all_linux = []
all_mac = []
all_android = []
for i in config.platforms:
if (0 == i.find("win")):
all_windows.append(i)
if (-1 != i.find("xp")):
all_windows_xp.append(i)
elif (0 == i.find("linux")):
all_linux.append(i)
elif (0 == i.find("mac")):
all_mac.append(i)
elif (0 == i.find("android")):
all_android.append(i)
if is_exist_in_array(params, "win"):
ret_params += all_windows
if is_exist_in_array(params, "!win"):
ret_params += ["!" + x for x in all_windows]
if is_exist_in_array(params, "win_xp"):
ret_params += all_windows_xp
if is_exist_in_array(params, "!win_xp"):
ret_params += ["!" + x for x in all_windows_xp]
if is_exist_in_array(params, "linux"):
ret_params += all_linux
if is_exist_in_array(params, "!linux"):
ret_params += ["!" + x for x in all_linux]
if is_exist_in_array(params, "mac"):
ret_params += all_mac
if is_exist_in_array(params, "!mac"):
ret_params += ["!" + x for x in all_mac]
if is_exist_in_array(params, "android"):
ret_params += all_android
if is_exist_in_array(params, "!android"):
ret_params += ["!" + x for x in all_android]
return ret_params
def get_projects(pro_json_path, platform):
json_path = os.path.abspath(pro_json_path)
data = json.load(open(json_path))
root_dir_json = "../"
if ("root" in data):
root_dir_json = data["root"]
root_dir = os.path.dirname(json_path)
if ("/" != root_dir[-1] and "\\" != root_dir[-1]):
root_dir += "/"
root_dir += root_dir_json
result = []
modules = config.option("module").split(" ")
for module in modules:
if (module == ""):
continue
if not module in data:
continue
# check aliases to modules
records_src = data[module]
records = []
for rec in records_src:
if rec in data:
records += data[rec]
else:
records.append(rec)
for rec in records:
params = []
record = rec
if (0 == rec.find("[")):
pos = rec.find("]")
if (-1 == pos):
continue
record = rec[pos+1:]
header = rec[1:pos].replace(" ", "")
params_tmp = rec[1:pos].split(",")
for par in params_tmp:
if (par != ""):
params.append(par)
params = adjust_project_params(params)
if is_exist_in_array(result, record):
continue
if is_log:
print("params: " + ",".join(params))
print("file: " + record)
if is_exist_in_array(params, "!" + platform):
continue
platform_records = []
platform_records += config.platforms
platform_records += ["win", "win_xp", "linux", "mac", "android"]
# if one platform exists => all needed must exists
is_needed_platform_exist = False
for pl in platform_records:
if is_exist_in_array(params, pl):
is_needed_platform_exist = True;
break
# if one config exists => all needed must exists
is_needed_config_exist = False
for item in params:
if (0 == item.find("!")):
continue
if is_exist_in_array(platform_records, item):
continue
is_needed_config_exist = True
break;
if is_needed_platform_exist:
if not is_exist_in_array(params, platform):
continue
config_params = config.option("config").split(" ") + config.option("features").split(" ")
config_params = [x for x in config_params if x]
is_append = True
for conf in config_params:
if is_exist_in_array(params, "!" + conf):
is_append = False
break
if is_needed_config_exist and not is_exist_in_array(params, conf):
is_append = False
break
if is_append:
result.append(root_dir + record)
if is_log:
print(result)
return result
# test example
if __name__ == '__main__':
# test
config.parse()
is_log = True
projects = get_projects("./../sln.json", "win_64")

101
sln.json Normal file
View File

@ -0,0 +1,101 @@
{
"root" : "../",
"core" : [
"core/Common/3dParty/cryptopp/project/cryptopp.pro",
"core/Common/cfcpp/cfcpp.pro",
"core/UnicodeConverter/UnicodeConverter.pro",
"core/Common/kernel.pro",
"core/Common/Network/network.pro",
"core/DesktopEditor/graphics/pro/graphics.pro",
"core/PdfFile/PdfFile.pro",
"core/DjVuFile/DjVuFile.pro",
"core/XpsFile/XpsFile.pro",
"core/HtmlFile2/HtmlFile2.pro",
"core/Fb2File/Fb2File.pro",
"core/EpubFile/CEpubFile.pro",
"core/HtmlRenderer/htmlrenderer.pro",
"core/DocxRenderer/DocxRenderer.pro",
"core/DesktopEditor/doctrenderer/doctrenderer.pro",
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro",
"[!no_x2t]core/TxtFile/Projects/Linux/TxtXmlFormatLib.pro",
"[!no_x2t]core/RtfFile/Projects/Linux/RtfFormatLib.pro",
"[!no_x2t]core/OdfFile/Projects/Linux/OdfFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/BinDocument/BinDocument.pro",
"[!no_x2t]core/X2tConverter/build/Qt/X2tConverter.pro",
"[win,linux,mac]core/DesktopEditor/AllFontsGen/AllFontsGen.pro",
"[win,linux,mac]core/DesktopEditor/allthemesgen/allthemesgen.pro",
"[win,linux,mac]core/DesktopEditor/doctrenderer/app_builder/docbuilder.pro",
"[win,linux,mac]core/DesktopEditor/pluginsmanager/pluginsmanager.pro",
"[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro",
"[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro"
],
"builder" : [
"core"
],
"server" : [
"core"
],
"multimedia" : [
"[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro"
],
"spell" : [
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
],
"desktop" : [
"core",
"spell",
"multimedia",
"core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro",
"[win,linux]desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro",
"[win,linux]desktop-apps/win-linux/ASCDocumentEditor.pro",
"[win]desktop-apps/win-linux/extras/projicons/ProjIcons.pro",
"[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro"
],
"mobile" : [
"core"
],
"osign" : [
"[win,linux,mac]core/DesktopEditor/xmlsec/src/osign/lib/osign.pro"
]
}

View File

@ -0,0 +1,4 @@
{
"browser" : "chrome",
"browserUrl" : "C:/Program Files/Google/Chrome/Application/chrome.exe"
}

View File

@ -0,0 +1,4 @@
{
"browser" : "firefox",
"browserUrl" : "C:/Program Files/Mozilla Firefox/firefox.exe"
}

View File

@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
sys.path.append('../../scripts')
import base
import os
os.environ["PUPPETEER_SKIP_CHROMIUM_DOWNLOAD"] = "true"
base.cmd("npm", ["i", "puppeteer"])

64
tests/puppeteer/run.py Normal file
View File

@ -0,0 +1,64 @@
#!/usr/bin/env python
import sys
sys.path.append('../../scripts')
import base
import os
import glob
import json
def get_tests_in_dir(directory):
files = []
for file in glob.glob(directory + "/*.js"):
if base.is_file(file):
files.append(file)
elif is_dir(file):
files += get_tests_in_dir(file)
return files
params = sys.argv[1:]
if (0 == len(params)):
print("use: run.py path_to_config [path_to_test]")
exit(0)
config_path = params[0]
test_file = "./tests"
if (1 < len(params)):
test_file = params[1]
tests_array = [test_file]
if base.is_dir(test_file):
tests_array = get_tests_in_dir(test_file)
config_content = "{}"
with open(config_path, "r") as config_path_loader:
config_content = config_path_loader.read()
print(config_content)
config = json.loads(config_content)
os.environ["PUPPETEER_SKIP_CHROMIUM_DOWNLOAD"] = "true"
if "browser" in config:
print("browser: " + config["browser"])
os.environ["PUPPETEER_PRODUCT"] = config["browser"]
if "browserUrl" in config:
print("browserUrl: " + config["browserUrl"])
os.environ["PUPPETEER_EXECUTABLE_PATH"] = config["browserUrl"]
if not base.is_dir("./work_directory"):
base.create_dir("./work_directory")
base.create_dir("./work_directory/cache")
base.create_dir("./work_directory/downloads")
for test in tests_array:
print("run test: " + test)
run_file = test + ".runned.js"
base.copy_file("./tester.js", run_file)
test_content = base.readFile(test)
test_content = test_content.replace("await Tester.", "Tester.")
test_content = test_content.replace("Tester.", "await Tester.")
base.replaceInFile(run_file, "\"%%CODE%%\"", test_content)
base.cmd("node", [run_file])
base.delete_file(run_file)

171
tests/puppeteer/tester.js Normal file
View File

@ -0,0 +1,171 @@
const puppeteer = require('puppeteer')
const pathfs = require('path')
const fs = require('fs');
function TesterImpl()
{
this.browser = null;
this.page = null;
this.width = 1500;
this.height = 800;
this.pixelRatio = 1;
this.cacheDir = pathfs.resolve("./work_directory/cache");
this.downloadsDir = pathfs.resolve("./work_directory/downloads");
this.downloadCounter = 0;
this.load = async function(url)
{
const head = { x: 100, y: 200 };
this.browser = await puppeteer.launch({
headless: false,
product: process.env["PUPPETEER_PRODUCT"],
args: [
"--disable-infobars",
`--window-size=${this.width+head.x},${this.height+head.y}`,
"--disk-cache-dir=" + this.cacheDir
],
defaultViewport : {width: this.width, height: this.height, deviceScaleFactor : this.pixelRatio }
});
this.page = await this.browser.newPage();
await this.page.setViewport({ width: this.width, height: this.height });
let waitObject = (process.env["PUPPETEER_PRODUCT"] === "firefox") ? { waitUntil: "networkidle0", timeout: 15000 } : {};
await this.page.goto(url + "&autotest=enabled", waitObject);
console.log("[tester] pageLoaded");
return this.page;
};
this.close = async function(nosleep)
{
if (true !== nosleep)
await this.waitAutosave();
await this.browser.close();
};
this.sleep = async function(ms)
{
return await new Promise(resolve => setTimeout(resolve, ms));
};
this.waitEditor = async function()
{
// TODO: wait first onEndRecalculate
await this.sleep(5000);
console.log("[tester] editorReady");
};
this.waitAutosave = async function()
{
await this.sleep(5000);
};
this.evaluateInMainFrame = async function(code)
{
return await this.page.evaluate(code);
};
this.evaluateInEditorFrame = async function(code)
{
const frame = await this.page.frames().find(frame => frame.name() === 'frameEditor');
if (!frame)
return;
return await frame.evaluate(code);
};
this.click = async function(id)
{
let res = await this.evaluateInEditorFrame("document.getElementById(\"" + id + "\").click(); \"[tester] clicked: " + id + "\"");
//console.log(res);
await this.sleep(200);
return res;
};
this.mouseClick = async function(x, y, options)
{
let res = await this.page.mouse.click(x, y, options);
await this.sleep(200);
return res;
};
this.eval = async function(code)
{
let res = await this.evaluateInEditorFrame(code);
await this.sleep(200);
return res;
};
this.keyDown = async function(key)
{
// https://pptr.dev/api/puppeteer.keyinput
let res = await this.page.keyboard.down(key);
await this.sleep(200);
return res;
};
this.keyUp = async function(key)
{
// https://pptr.dev/api/puppeteer.keyinput
let res = await this.page.keyboard.up(key);
await this.sleep(200);
return res;
};
this.keyClick = async function(key)
{
// https://pptr.dev/api/puppeteer.keyinput
let res = await this.page.keyboard.down(key);
res = await this.page.keyboard.up(key);
await this.sleep(200);
return res;
};
this.keyPress = async function(key)
{
// https://pptr.dev/api/puppeteer.keyinput
let res = await this.page.keyboard.press(key);
await this.sleep(200);
return res;
};
this.input = async function(text)
{
let res = await this.page.keyboard.type(text);
await this.sleep(200);
return res;
};
this.downloadFile = async function(format, path)
{
const tmpDir = pathfs.resolve(this.downloadsDir, "./tmp" + this.downloadCounter++);
fs.mkdirSync(tmpDir);
// emulate download
const client = await this.page.target().createCDPSession();
await client.send("Page.setDownloadBehavior", {
behavior: "allow",
downloadPath: tmpDir
});
await this.evaluateInEditorFrame("document.querySelectorAll('[data-layout-name=\"toolbar-file\"]')[0].click();");
await this.sleep(200);
await this.evaluateInEditorFrame("document.getElementsByClassName(\"svg-format-" + format + "\")[0].click();");
await this.sleep(200);
await this.evaluateInEditorFrame("document.getElementById(\"fm-btn-return\").click();");
await this.sleep(2000);
const files = fs.readdirSync(tmpDir);
fs.copyFileSync(pathfs.resolve(tmpDir, "./" + files[0]), pathfs.resolve(path));
fs.rmSync(tmpDir, { recursive: true, force: true });
};
}
const Tester = new TesterImpl;
try {
(async () => {
"%%CODE%%"
})();
} catch (err) {
console.error(err);
}

View File

@ -0,0 +1,27 @@
Tester.load("path_to_file");
Tester.waitEditor();
// down Enter
Tester.keyClick("Enter");
// type text
Tester.input("Hello World!");
Tester.keyPress("ArrowLeft");
Tester.keyDown("Shift");
for (let i = 0; i < 5; i++)
Tester.keyPress("ArrowLeft");
Tester.keyUp("Shift");
// bold
Tester.click("id-toolbar-btn-bold");
// italic
Tester.mouseClick(115, 105);
// if needed
Tester.waitAutosave();
Tester.downloadFile("docx", "./work_directory/new.docx")
Tester.downloadFile("odt", "./work_directory/new.odt")
Tester.close(true);

View File

@ -1,10 +1,10 @@
## Overview
# Overview
**change_autor.py** is a tool for change autor and last modifiend in all documents in folder.
**change_autor.py** is a tool for change autor and last modifiend in all documents in folder.
## How to use
1. Place the files to be changed in a folder, e.g. **input**.
1. Place the files to be changed in a folder, e.g. **input**.
2. Create a folder in which the modified files will be stored, e.g. **output**.
3. Call the file *change_autor.py* as shown below.
@ -17,10 +17,11 @@ ________________________
## How to use
1. Place the files to be changed in a folder, e.g. **input**.
1. Place the files to be changed in a folder, e.g. **input**.
2. Create a folder in which the converted files will be stored, e.g. **output**.
3. Call the file *convert_directory.py* as shown below.
```bash
convert_directory.py path_to_builder_directory path_to_input_folder path_to_output_folder format_ext
convert_directory.py path_to_builder_directory
path_to_input_folder path_to_output_folder format_ext
```

View File

@ -7,6 +7,9 @@ import os
import glob
import shutil
sys.stdin.reconfigure(encoding='utf-8')
sys.stdout.reconfigure(encoding='utf-8')
params = sys.argv[1:]
if (3 > len(params)):
@ -20,8 +23,14 @@ directory_input = params[0].replace("\\", "/")
directory_output = params[1].replace("\\", "/")
author_name = params[2]
if not os.path.exists(directory_output):
os.mkdir(directory_output)
input_files = []
count = 1
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
print(count, file)
count += 1
input_files.append(file.replace("\\", "/"))
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
@ -30,7 +39,7 @@ def change_author_name(file_dist, output_file, author_name):
app = "7za" if ("mac" == base.host_platform()) else "7z"
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
with open(temp_dir + "/docProps/core.xml", 'r') as file:
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
data = file.read()
creator_open = "<dc:creator>"
@ -67,7 +76,7 @@ def change_author_name(file_dist, output_file, author_name):
else:
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
with open(temp_dir + "/docProps/core.xml", 'w') as file:
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
file.write(data)
shutil.copyfile(file_dist, output_file)
@ -80,7 +89,12 @@ for input_file in input_files:
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
change_author_name(input_file, output_file, author_name)
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]).replace(' ', '_') + u"." + input_file.split(".")[-1]
try:
change_author_name(input_file, output_file, author_name)
except:
print("Error in converting document: ", input_file)
continue
base.delete_dir(temp_dir)
output_cur += 1

View File

@ -0,0 +1,35 @@
#!/usr/bin/env python
import sys
sys.path.append('../../scripts')
import base
import os
import glob
import convert_common
params = sys.argv[1:]
if (5 > len(params)):
print("use: convert_directory.py path_to_builder_directory path_to_sdkjs_directory editor_type path_to_input_files_directory path_to_output_files_directory")
exit(0)
cur_path = os.getcwd()
base.configure_common_apps()
directory_x2t = params[0].replace("\\", "/")
directory_sdkjs = params[1].replace("\\", "/")
editor_type = params[2].replace("\\", "/")
directory_input = params[3].replace("\\", "/")
directory_output = params[4].replace("\\", "/")
input_files = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(directory_input)
for f in files]
output_len = len(input_files)
output_cur = 1
for input_file in input_files:
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.basename(input_file))
base.cmd_in_dir(directory_x2t, "test", [directory_sdkjs, editor_type, input_file, output_file], True)
output_cur += 1

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../../scripts')
@ -51,19 +52,43 @@ if base.is_file(directory_fonts_local + "/AllFonts.js"):
directory_fonts = directory_fonts_local
# ---------------------------------------------------
json_params = "{'spreadsheetLayout':{'fitToWidth':1,'fitToHeight':1},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}}"
json_params = "{"
json_params += "'spreadsheetLayout':{"
# True for fit, False for 100%
isScaleSheetToPage = False
json_fit_text = "0"
if isScaleSheetToPage:
json_fit_text = "1"
json_params += "'fitToWidth':" + json_fit_text + ",'fitToHeight':" + json_fit_text + ","
if True:
json_params += "'orientation':'landscape',"
page_margins = "'pageMargins':{'bottom':10,'footer':5,'header':5,'left':5,'right':5,'top':10}"
page_setup = "'pageSetup':{'orientation':1,'width':210,'height':297,'paperUnits':0,'scale':100,'printArea':false,'horizontalDpi':600,'verticalDpi':600,'usePrinterDefaults':true,'fitToHeight':0,'fitToWidth':0}"
json_params += "'sheetsProps':{'0':{'headings':false,'printTitlesWidth':null,'printTitlesHeight':null," + page_margins + "," + page_setup + "}}},"
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}"
json_params += "}"
json_params = json_params.replace("'", "&quot;")
output_len = len(input_files)
output_cur = 1
for input_file in input_files:
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0])
output_file_tmp = os.path.join(output_dir, "temp")
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0].strip())
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
xml_convert += u"<TaskQueueDataConvert>"
xml_convert += (u"<m_sFileFrom>" + input_file + u"</m_sFileFrom>")
xml_convert += (u"<m_sFileTo>" + output_file + u".zip</m_sFileTo>")
xml_convert += (u"<m_sFileTo>" + output_file_tmp + u".zip</m_sFileTo>")
xml_convert += u"<m_nFormatTo>1029</m_nFormatTo>"
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
xml_convert += (u"<m_sFontDir>" + directory_fonts + u"</m_sFontDir>")
@ -83,8 +108,9 @@ for input_file in input_files:
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
base.extract_unicode(output_file + u".zip", output_file)
base.delete_file(output_dir + "/" + os.path.splitext(os.path.basename(input_file))[0] + ".zip")
base.extract_unicode(output_file_tmp + u".zip", output_file_tmp)
base.move_dir(str(output_file_tmp), str(output_file))
base.delete_file(output_file_tmp + u".zip")
output_cur += 1
base.delete_dir(temp_dir)

View File

@ -0,0 +1,38 @@
FROM arm32v7/ubuntu:16.04
# basic dependencies
RUN apt-get -y update && \
apt-get -y install wget xz-utils
# qt source
RUN mkdir /source && cd /source && \
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
# build dependencies
RUN apt-get -y install \
build-essential \
glib-2.0-dev \
libglu1-mesa-dev \
libgtk-3-dev \
libpulse-dev \
libasound2-dev \
libatspi2.0-dev \
libcups2-dev \
libdbus-1-dev \
libicu-dev \
libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev \
libx11-xcb-dev \
libxcb* \
libxi-dev \
libxrender-dev \
libxss-dev
# increase or decrease CORES value to change the number of parallel jobs while building qt
ENV CORES=4
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
make -j$CORES && \
make install

View File

@ -0,0 +1,38 @@
FROM arm64v8/ubuntu:16.04
# basic dependencies
RUN apt-get -y update && \
apt-get -y install wget xz-utils
# qt source
RUN mkdir /source && cd /source && \
wget -q https://download.qt.io/new_archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz && \
tar -xf qt-everywhere-opensource-src-5.9.9.tar.xz
# build dependencies
RUN apt-get -y install \
build-essential \
glib-2.0-dev \
libglu1-mesa-dev \
libgtk-3-dev \
libpulse-dev \
libasound2-dev \
libatspi2.0-dev \
libcups2-dev \
libdbus-1-dev \
libicu-dev \
libgstreamer1.0-dev \
libgstreamer-plugins-base1.0-dev \
libx11-xcb-dev \
libxcb* \
libxi-dev \
libxrender-dev \
libxss-dev
# increase or decrease CORES value to change the number of parallel jobs while building qt
ENV CORES=4
CMD cd /source/qt-everywhere-opensource-src-5.9.9 && \
./configure -opensource -confirm-license -release -shared -accessibility -prefix /build -qt-zlib -qt-libpng -qt-libjpeg -qt-xcb -qt-pcre -no-sql-sqlite -no-qml-debug -gstreamer 1.0 -nomake examples -nomake tests -skip qtenginio -skip qtlocation -skip qtserialport -skip qtsensors -skip qtxmlpatterns -skip qt3d -skip qtwebview -skip qtwebengine && \
make -j$CORES && \
make install

30
tools/linux/arm/build_qt.py Executable file
View File

@ -0,0 +1,30 @@
#!/usr/bin/env python
import sys
import os
import argparse
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/../../../scripts')
import base
def docker_build(image_name, dockerfile_dir, build_dir):
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
base.cmd("docker", ["run", "--rm", "-v", build_dir + ":/build", image_name])
base.cmd("docker", ["image", "rm", image_name])
return
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Build qt for linux arm architecture')
parser.add_argument('build_dir', help='the path to build directory (directory may not exist)')
parser.add_argument('-a', '--arch', action='store', help='target architecture (arm32 or arm64)', choices=['arm32', 'arm64'], required=True)
args = parser.parse_args()
build_dir = args.build_dir
if base.is_dir(build_dir):
base.delete_dir(build_dir)
base.create_dir(build_dir)
abs_build_path = os.path.abspath(build_dir)
arch = args.arch
docker_build('qt-' + arch, __dir__name__ + "/" + arch, abs_build_path)

View File

@ -54,17 +54,10 @@ def install_deps():
print("Installed Node.js version: " + str(nodejs_cur_version_major) + "." + str(nodejs_cur_version_minor))
except:
nodejs_cur = 1
if (nodejs_cur < 14000):
print("Node.js version cannot be less 14")
if (nodejs_cur < 16000):
print("Node.js version cannot be less 16")
print("Reinstall")
if (base.is_dir("./node_js_setup_14.x")):
base.delete_dir("./node_js_setup_14.x")
base.cmd("sudo", ["apt-get", "remove", "--purge", "-y", "nodejs"])
base.download("https://deb.nodesource.com/setup_14.x", "./node_js_setup_14.x")
base.cmd('curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -')
base.cmd("sudo", ["bash", "./node_js_setup_14.x"])
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
base.cmd("sudo", ["npm", "install", "-g", "npm@6"])
base.run_as_bat(["curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - &&sudo apt-get install -y nodejs"])
else:
print("OK")
base.cmd("sudo", ["apt-get", "-y", "install", "npm", "yarn"], True)

BIN
tools/linux/python3.tar.gz Normal file

Binary file not shown.

445
tools/mac/toolchain.prf Normal file
View File

@ -0,0 +1,445 @@
defineTest(qtToolchainError) {
msg = \
$$1 \
"===================" \
$$2 \
"===================" \
$$3
error($$join(msg, $$escape_expand(\\n)))
}
defineTest(qtCompilerError) {
!cross_compile: \
what =
else: host_build: \
what = " host"
else: \
what = " target"
qtToolchainError("Cannot run$$what compiler '$$1'. Output:", $$2, \
"Maybe you forgot to setup the environment?")
}
cross_compile:host_build: \
target_prefix = QMAKE_HOST_CXX
else: \
target_prefix = QMAKE_CXX
#
# Determine and cache the compiler version
#
defineReplace(qtVariablesFromMSVC) {
ret = $$system("$$1 -nologo -E $$2 $$system_quote($$PWD/data/macros.cpp) 2>NUL", lines, ec)
!equals(ec, 0): qtCompilerError($$1, $$ret)
return($$ret)
}
defineReplace(qtVariablesFromGCC) {
ret = $$system("$$1 -E $$system_quote($$PWD/data/macros.cpp) \
2>$$QMAKE_SYSTEM_NULL_DEVICE", lines, ec)
!equals(ec, 0): qtCompilerError($$1, $$ret)
return($$ret)
}
isEmpty($${target_prefix}.COMPILER_MACROS) {
msvc {
clang_cl {
# We need to obtain the cl.exe version first
vars = $$qtVariablesFromMSVC(cl)
for (v, vars) {
isEmpty(v)|contains(v, $${LITERAL_HASH}.*): next()
eval($$v)
}
isEmpty(QMAKE_MSC_FULL_VER): error("Could not determine the Visual Studio version")
QMAKE_CFLAGS_MSVC_COMPAT = $$replace(QMAKE_MSC_FULL_VER, "(..)(..)(.*)", \
"-fms-compatibility-version=\\1.\\2.\\3")
cache($${target_prefix}.QMAKE_CFLAGS_MSVC_COMPAT, set stash, QMAKE_CFLAGS_MSVC_COMPAT)
$${target_prefix}.COMPILER_MACROS += QMAKE_CFLAGS_MSVC_COMPAT
vars = $$qtVariablesFromMSVC($$QMAKE_CXX, $$QMAKE_CFLAGS_MSVC_COMPAT)
} else {
vars = $$qtVariablesFromMSVC($$QMAKE_CXX)
}
} else: gcc|ghs {
vars = $$qtVariablesFromGCC($$QMAKE_CXX)
}
for (v, vars) {
!contains(v, "[A-Z_]+ = .*"): next()
# Set both <varname> for the outer scope ...
eval($$v)
v ~= s/ .*//
isEmpty($$v): error("Compiler produced empty value for $${v}.")
# ... and save QMAKE_(HOST_)?CXX.<varname> in the cache.
cache($${target_prefix}.$$v, set stash, $$v)
$${target_prefix}.COMPILER_MACROS += $$v
}
cache($${target_prefix}.COMPILER_MACROS, set stash)
} else {
# load from the cache
for (i, $${target_prefix}.COMPILER_MACROS): \
$$i = $$eval($${target_prefix}.$$i)
}
# Populate QMAKE_COMPILER_DEFINES and some compatibility variables.
# The $$format_number() calls strip leading zeros to avoid misinterpretation as octal.
QMAKE_COMPILER_DEFINES += __cplusplus=$$QT_COMPILER_STDCXX
!isEmpty(QMAKE_MSC_VER): \
QMAKE_COMPILER_DEFINES += _MSC_VER=$$QMAKE_MSC_VER _MSC_FULL_VER=$$QMAKE_MSC_FULL_VER
!isEmpty(QMAKE_ICC_VER): \
QMAKE_COMPILER_DEFINES += __INTEL_COMPILER=$$QMAKE_ICC_VER __INTEL_COMPILER_UPDATE=$$QMAKE_ICC_UPDATE_VER
!isEmpty(QMAKE_APPLE_CC): \
QMAKE_COMPILER_DEFINES += __APPLE_CC__=$$QMAKE_APPLE_CC
!isEmpty(QMAKE_APPLE_CLANG_MAJOR_VERSION): \
QMAKE_COMPILER_DEFINES += __clang__ \
__clang_major__=$$QMAKE_APPLE_CLANG_MAJOR_VERSION \
__clang_minor__=$$QMAKE_APPLE_CLANG_MINOR_VERSION \
__clang_patchlevel__=$$QMAKE_APPLE_CLANG_PATCH_VERSION
!isEmpty(QMAKE_CLANG_MAJOR_VERSION): \
QMAKE_COMPILER_DEFINES += __clang__ \
__clang_major__=$$QMAKE_CLANG_MAJOR_VERSION \
__clang_minor__=$$QMAKE_CLANG_MINOR_VERSION \
__clang_patchlevel__=$$QMAKE_CLANG_PATCH_VERSION
!isEmpty(QMAKE_GCC_MAJOR_VERSION): \
QMAKE_COMPILER_DEFINES += \
__GNUC__=$$QMAKE_GCC_MAJOR_VERSION \
__GNUC_MINOR__=$$QMAKE_GCC_MINOR_VERSION \
__GNUC_PATCHLEVEL__=$$QMAKE_GCC_PATCH_VERSION
!isEmpty(QMAKE_GHS_VERSION): \
QMAKE_COMPILER_DEFINES += __ghs__ __GHS_VERSION_NUMBER=$$QMAKE_GHS_VERSION
QMAKE_CFLAGS += $$QMAKE_CFLAGS_MSVC_COMPAT
QMAKE_CXXFLAGS += $$QMAKE_CFLAGS_MSVC_COMPAT
clang_cl|intel_icl {
include(../common/msvc-based-version.conf)
} else: msvc {
include(../common/msvc-version.conf)
}
#
# Determine and cache the default search paths
#
defineReplace(qtMakeExpand) {
out = "$$1"
for(ever) {
m = $$replace(out, ".*\\$\\(EXPORT_([^)]+)\\).*", \\1)
equals(m, $$out): \
return($$out)
out = $$replace(out, "\\$\\(EXPORT_$$m\\)", $$eval($$m))
}
}
defineReplace(qtSplitPathList) {
paths = $$split(1, $$QMAKE_DIRLIST_SEP)
ret =
for (p, paths): \
ret += $$clean_path($$p)
return($$ret)
}
defineReplace(qtNmakePathList) {
paths =
for (p, 1): \
paths += $$shell_path($$p)
paths ~= s,$${LITERAL_HASH},^$${LITERAL_HASH},g
paths ~= s,\\$,\$\$,g
return($$join(paths, $$QMAKE_DIRLIST_SEP))
}
msvc {
arch = $$lower($$VCPROJ_ARCH)
equals(arch, x64): \ # may be "win32" or undefined
arch = amd64
else: !equals(arch, arm):!equals(arch, arm64): \ # may be "win32" or undefined
arch = x86
# Consider only ARM64 desktop builds to be cross-builds -
# the host is assumed to be Intel and capable of running the target
# executables (so building for x64 on x86 will break).
equals(arch, arm64): \
CONFIG += msvc_cross
}
isEmpty($${target_prefix}.INCDIRS) {
#
# Get default include and library paths from compiler
#
wasm {
# wasm compiler does not work here, just use defaults
} else: gcc {
cmd_suffix = "<$$QMAKE_SYSTEM_NULL_DEVICE >$$QMAKE_SYSTEM_NULL_DEVICE"
equals(QMAKE_HOST.os, Windows): \
cmd_prefix = "set LC_ALL=C&"
else: \
cmd_prefix = "LC_ALL=C"
cxx_flags = $$QMAKE_CXXFLAGS
# Manually inject the sysroot for Apple Platforms because its resolution
# normally does not happen until default_post.prf. This is especially
# important for moc to gain the correct default include directory list.
# While technically incorrect but without any likely practical effect,
# UIKit simulator platforms will see the device SDK's sysroot in
# QMAKE_DEFAULT_*DIRS, because they're handled in a single build pass.
darwin {
uikit {
# Clang doesn't automatically pick up the architecture, just because
# we're passing the iOS sysroot below, and we will end up building the
# test for the host architecture, resulting in linker errors when
# linking against the iOS libraries. We work around this by passing
# the architecture explicitly.
cxx_flags += -arch $$first(QMAKE_APPLE_DEVICE_ARCHS)
}
uikit:macx-xcode: \
cxx_flags += -isysroot $$sdk_path_device.value
else: \
cxx_flags += -isysroot $$QMAKE_MAC_SDK_PATH
}
rim_qcc: \
# Need the cc1plus and ld command lines to pick up the paths
cxx_flags += $$QMAKE_LFLAGS_SHLIB -o $$QMAKE_SYSTEM_NULL_DEVICE -v
else: darwin:clang: \
# Need to link to pick up library paths
cxx_flags += -g0 $$QMAKE_LFLAGS_SHLIB -o /dev/null -v -Wl,-v
else: \
# Just preprocess, might not pick up library paths
cxx_flags += -E -v
output = $$system("$$cmd_prefix $$QMAKE_CXX $$qtMakeExpand($$cxx_flags) -xc++ - 2>&1 $$cmd_suffix", lines, ec)
!equals(ec, 0): qtCompilerError($$QMAKE_CXX, $$output)
rim_qcc {
for (line, output) {
contains(line, "^[^ ]*cc1plus .*") {
take_next = false
for (parameter, $$list($$line)) {
$$take_next {
QMAKE_DEFAULT_INCDIRS += $$clean_path($$parameter)
take_next = false
} else: equals(parameter, "-isystem") {
take_next = true
}
}
} else: contains(line, "^[^ ]*-ld .*") {
for (parameter, $$list($$line)) {
contains(parameter, "^-L.*") {
parameter ~= s/^-L//
QMAKE_DEFAULT_LIBDIRS += $$clean_path($$parameter)
}
}
}
}
} else {
add_includes = false
add_libraries = false
for (line, output) {
line ~= s/^[ \\t]*// # remove leading spaces
contains(line, "LIBRARY_PATH=.*") {
line ~= s/^LIBRARY_PATH=// # remove leading LIBRARY_PATH=
equals(QMAKE_HOST.os, Windows): \
paths = $$split(line, ;)
else: \
paths = $$split(line, $$QMAKE_DIRLIST_SEP)
for (path, paths): \
QMAKE_DEFAULT_LIBDIRS += $$clean_path($$path)
} else: contains(line, "Library search paths:") {
add_libraries = true
} else: contains(line, "$${LITERAL_HASH}include <.*") { # #include <...> search starts here:
add_includes = true
} else: contains(line, "End of search.*") {
add_includes = false
} else: $$add_libraries {
# We assume all library search paths are absolute
!contains(line, "^/.*") {
add_libraries = false
next()
}
QMAKE_DEFAULT_LIBDIRS += $$clean_path($$line)
} else: $$add_includes {
!contains(line, ".* \\(framework directory\\)"): \
QMAKE_DEFAULT_INCDIRS += $$clean_path($$line)
}
}
}
if(!darwin:clang)|intel_icc {
# Clang on a non-Apple system (that is, a system without ld64 -- say, with GNU ld
# or gold under Linux) will not print any library search path. Need to use another
# invocation with different options (which in turn doesn't print include search
# paths, so it can't just be used in place of the above code).
# What's more, -print-search-dirs can't be used on clang on Apple because it
# won't print all the library paths (only the clang-internal ones).
output = $$system("$$cmd_prefix $$QMAKE_LINK $$QMAKE_LFLAGS -print-search-dirs", lines, ec)
!equals(ec, 0): qtCompilerError($$QMAKE_LINK, $$output)
for (line, output) {
contains(line, "^libraries: .*") {
line ~= s,^libraries: ,,
equals(QMAKE_HOST.os, Windows) {
# clang (7.x) on Windows uses the wrong path list separator ...
line ~= s,:(?![/\\\\]),;,
paths = $$split(line, ;)
} else {
paths = $$split(line, $$QMAKE_DIRLIST_SEP)
}
for (path, paths): \
QMAKE_DEFAULT_LIBDIRS += $$clean_path($$replace(path, ^=, $$[SYSROOT]))
}
}
}
isEmpty(QMAKE_DEFAULT_INCDIRS): \
!integrity: \
error("failed to parse default include paths from compiler output")
isEmpty(QMAKE_DEFAULT_LIBDIRS): \
!integrity:!darwin: \
error("failed to parse default library paths from compiler output")
QMAKE_DEFAULT_LIBDIRS = $$unique(QMAKE_DEFAULT_LIBDIRS)
} else: ghs {
cmd = $$QMAKE_CXX $$QMAKE_CXXFLAGS -$${LITERAL_HASH} -o /tmp/fake_output /tmp/fake_input.cpp
output = $$system("$$cmd", blob, ec)
!equals(ec, 0): qtCompilerError($$QMAKE_CXX, $$output)
output ~= s/\\\\\\n {8}//g
output = $$split(output, $$escape_expand(\\n))
for (line, output) {
contains(line, "^[^ ]+/ecom[^ ]+ .* /tmp/fake_input\\.cpp") {
for (parameter, $$list($$line)) {
contains(parameter, "^(-I|--include_no_mmd=|--sys_include=).*") {
parameter ~= s/^(-I|--include_no_mmd=|--sys_include=)//
QMAKE_DEFAULT_INCDIRS += $$clean_path($$parameter)
}
}
} else: contains(line, "^[^ ]+/elxr .*") {
for (parameter, $$list($$line)) {
contains(parameter, "^-L.*") {
parameter ~= s/^-L//
QMAKE_DEFAULT_LIBDIRS += $$clean_path($$parameter)
}
}
}
}
} else: msvc_cross {
# Use a batch file, because %VAR% in the system() call expands to
# the pre-script-call value, and !VAR! cannot be enabled outside
# a batch file without invoking another shell instance.
cmd = $$system_quote($$system_path($$PWD/data/dumpvcvars.bat))
hostArch = $$QMAKE_HOST.arch
equals(hostArch, x86_64): \
hostArch = amd64
!equals(arch, $$hostArch): \
arch = $${hostArch}_$$arch
isEmpty(MSVC_VER): \
error("Mkspec does not specify MSVC_VER. Cannot continue.")
versionAtLeast(MSVC_VER, 15.0) {
dir = $$(VSINSTALLDIR)
isEmpty(dir) {
version_parts = $$split(MSVC_VER, .)
MSVC_NEXT_MAJOR = $$num_add($$first(version_parts), 1)
vswhere = "$$getenv(ProgramFiles\(x86\))/Microsoft Visual Studio/Installer/vswhere.exe"
!exists($$vswhere): \
error("Could not find $$vswhere")
vswhere = $$system_quote($$system_path($$vswhere))
# -version parameter: A version range for instances to find. 15.0 will get all versions >= 15.0
# Example: [15.0,16.0) will find versions 15.*.
dir = $$system("$$vswhere -latest -version [$$MSVC_VER,$${MSVC_NEXT_MAJOR}.0] -property installationPath")
}
isEmpty(dir): \
error("Failed to find the Visual Studio installation directory.")
cmd += $$system_quote($$dir\\VC\\Auxiliary\\Build\\vcvarsall.bat) $$arch
} else {
dir = $$(VCINSTALLDIR)
isEmpty(dir): \
dir = $$read_registry(HKLM, \
"Software\\Microsoft\\VisualStudio\\$$MSVC_VER\\Setup\\VC\\ProductDir", 32)
isEmpty(dir): \
error("Failed to find the Visual C installation directory.")
cmd += $$system_quote($$dir\\vcvarsall.bat) $$arch
}
isEmpty(WINSDK_VER): \
error("Mkspec does not specify WINSDK_VER. Cannot continue.")
# We prefer the environment variable, because that may work around
# a broken registry entry after uninstalling a newer SDK.
# However, we do that only if the major+minor SDK version matches
# the one requested by the mkspec, as we might be building for a
# newer target than the host.
winsdk_ver = $$(WindowsSDKVersion)
!isEmpty(winsdk_ver) {
winsdk_ver ~= s,\\\\$,, # Work around SDK breakage.
!equals(WINSDK_VER, $$replace(winsdk_ver, ^(\\d+\\.\\d+).*$, \\1)): \
winsdk_ver =
}
!isEmpty(winsdk_ver) {
cmd += $$winsdk_ver
} else {
winsdk_ver = $$read_registry(HKLM, \
"Software\\Microsoft\\Microsoft SDKs\\Windows\\v$$WINSDK_VER\\ProductVersion", 32)
isEmpty(winsdk_ver): \
error("Windows SDK $$WINSDK_VER requested by mkspec is not installed. Cannot continue.")
cmd += $${winsdk_ver}.0
}
output = $$system("$$cmd 2>&1", lines, ec)
!equals(ec, 0): \
qtToolchainError("SDK setup script failed. Output:", $$output, \
"Command was: $$cmd")
lines = $$output
for(ever) {
isEmpty(lines): \
break()
line = $$take_first(lines)
equals(line, "=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+="): \
break()
}
!count(lines, 3): \
qtToolchainError("SDK setup script returned unexpected output:", $$output, \
"Command was: $$cmd")
# These contain only paths for the target.
QMAKE_DEFAULT_INCDIRS = $$qtSplitPathList($$member(lines, 0))
QMAKE_DEFAULT_LIBDIRS = $$qtSplitPathList($$member(lines, 1))
# PATH is inherently for the host, and paths that are not shadowed
# by vcvarsall.bat are assumed to contain only tools that work for
# both host and target builds.
QMAKE_DEFAULT_PATH = $$qtSplitPathList($$member(lines, 2))
# We de-duplicate, because the script just prepends to the paths for
# the host, some of which are identical to the ones for the target.
QMAKE_DEFAULT_PATH = $$unique(QMAKE_DEFAULT_PATH)
} else: msvc {
LIB = $$getenv("LIB")
QMAKE_DEFAULT_LIBDIRS = $$split(LIB, $$QMAKE_DIRLIST_SEP)
INCLUDE = $$getenv("INCLUDE")
QMAKE_DEFAULT_INCDIRS = $$split(INCLUDE, $$QMAKE_DIRLIST_SEP)
}
unix:!darwin:if(!cross_compile|host_build) {
isEmpty(QMAKE_DEFAULT_INCDIRS): QMAKE_DEFAULT_INCDIRS = /usr/include /usr/local/include
isEmpty(QMAKE_DEFAULT_LIBDIRS): QMAKE_DEFAULT_LIBDIRS = /lib /usr/lib
}
# cache() complains about undefined variables and doesn't persist empty ones.
!isEmpty(QMAKE_DEFAULT_INCDIRS): \
cache($${target_prefix}.INCDIRS, set stash, QMAKE_DEFAULT_INCDIRS)
!isEmpty(QMAKE_DEFAULT_LIBDIRS): \
cache($${target_prefix}.LIBDIRS, set stash, QMAKE_DEFAULT_LIBDIRS)
!isEmpty(QMAKE_DEFAULT_PATH): \
cache($${target_prefix}.PATH, set stash, QMAKE_DEFAULT_PATH)
} else {
QMAKE_DEFAULT_INCDIRS = $$eval($${target_prefix}.INCDIRS)
QMAKE_DEFAULT_LIBDIRS = $$eval($${target_prefix}.LIBDIRS)
QMAKE_DEFAULT_PATH = $$eval($${target_prefix}.PATH)
}
msvc_cross {
qmake_inc_exp.name = INCLUDE
qmake_inc_exp.value = $$qtNmakePathList($$QMAKE_DEFAULT_INCDIRS)
qmake_lib_exp.name = LIB
qmake_lib_exp.value = $$qtNmakePathList($$QMAKE_DEFAULT_LIBDIRS)
qmake_path_exp.name = PATH
qmake_path_exp.value = $$qtNmakePathList($$QMAKE_DEFAULT_PATH)
QMAKE_EXPORTED_VARIABLES += qmake_inc_exp qmake_lib_exp qmake_path_exp
}
unset(target_prefix)

View File

@ -1 +1 @@
7.4.1
8.1.1