Compare commits

...

787 Commits

Author SHA1 Message Date
02e3cbaa18 Increased minimum required version of Node.js to 22 2025-02-04 11:47:29 +03:00
370879f636 [jsdoc] Code block for expression 2025-01-31 17:18:30 +07:00
679afe1bc4 Fix generation on mac 2025-01-27 10:18:20 +03:00
8b5cfff24a [jsdoc] Remove output dirs before generation 2025-01-27 06:48:12 +00:00
27de97031e Merge pull request '[jsdoc] FIx paths in enums' (#39) from fix/js-doc into release/v8.3.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/39
2025-01-27 06:37:46 +00:00
8ee874da14 [jsdoc] FIx paths in enums 2025-01-27 13:23:58 +07:00
a3cb31291f [desktop] debug 2025-01-23 13:32:21 +03:00
6a43b86912 [desktop] for bug 62528 2025-01-23 11:15:01 +03:00
21bb535ee0 [jsdoc] Added plugins md docs generation 2025-01-22 12:49:26 +00:00
9ea948b825 [jsdoc] Fix generation 2025-01-22 12:49:26 +00:00
fe2fad9378 [desktop] refactoring 2025-01-22 13:00:50 +03:00
d566ffd9fa Fix mac builder packages build 2025-01-21 12:22:46 +03:00
370b23f38f Fix md syntax 2025-01-20 21:03:27 +03:00
253ee696be Merge pull request '[desktop] remove folders for IE from web-apps' (#35) from feature/desktop-clean-ie-folder into release/v8.3.0 2025-01-20 16:18:08 +00:00
e08c6f79bc [desktop] remove folders for IE from web-apps 2025-01-20 19:16:03 +03:00
4240319fef [develop] Print error in check_mysqlServer 2025-01-17 11:23:20 +03:00
e1aaa2415b Merge pull request 'Delete unnecessary files on builder deploy' (#34) from feature/minimize-builder-deploy into release/v8.3.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/34
2025-01-15 11:51:28 +00:00
e71eb56630 Fix builder packages build 2025-01-15 11:25:06 +03:00
38496f2971 Delete unnecessary files on builder deploy 2025-01-14 20:04:54 +04:00
d1c7d8d9f6 HWPFile added to build_tools 2025-01-13 14:06:16 +03:00
36fdfd672f Merge pull request 'Fix bug 72349' (#30) from fix/bug72349 into release/v8.3.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/30
2025-01-10 10:03:10 +00:00
55c0f61189 Fix bug 72349 2025-01-10 13:50:59 +04:00
38296bf292 Merge pull request '[jsdoc][plugins] Fixed reading common methods' (#28) from fix/build-jsdoc-plugins into release/v8.3.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/28
2024-12-22 13:34:46 +00:00
f0ba4564cc [jsdoc][plugins] Fixed reading common methods 2024-12-22 19:53:43 +07:00
21ec70214d fix Bug 70053 - Fix warning with JSONArgsRecommended 2024-12-20 10:47:39 +00:00
6d1a8376ba Merge pull request '[jsdoc] Fixes for build plugins api' (#23) from fix/build-jsdoc-plugins into release/v8.3.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/23
2024-12-18 07:42:02 +00:00
0ca83fe152 [jsdoc] Fixes for build plugins api 2024-12-16 18:44:35 +07:00
2301c407a2 Remove HtmlRenderer library 2024-12-10 14:33:10 +03:00
d7532d5b83 Merge branch hotfix/v8.2.2 into release/v8.3.0 2024-12-09 11:57:11 +00:00
c7d805f8df Up iwork version 2024-12-02 17:02:51 +03:00
d78ab30cdf Merge pull request 'Online-installer: add dev-channel support' (#21) from feature/online-installer-dev-channel into develop 2024-11-28 14:06:02 +00:00
c123f77195 [win] online-installer: add dev-channel support 2024-11-28 15:30:47 +02:00
78ee107e85 Fix build builder with branding onlyoffice 2024-11-28 14:57:00 +03:00
12c3310451 Up version 2024-11-27 20:01:36 +03:00
d525d8f603 Add change author for templates 2024-11-24 22:50:21 +03:00
337d1095dc Up iwork version 2024-11-22 17:51:36 +03:00
fab40cb6b3 Reformat code 2024-11-22 17:13:08 +03:00
f4cdc1aecd Merge pull request 'Update iwork build' (#20) from fix/iwork into develop
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/20
2024-11-22 13:45:29 +00:00
f702e3245a Update iwork build 2024-11-22 15:38:56 +03:00
d890ba4f43 Fix ds prerequisites build 2024-11-22 12:48:32 +03:00
d929ed411f Fix typo 2024-11-22 11:34:58 +03:00
55daa28d74 Merge pull request 'hotfix/v8.2.2' (#18) from hotfix/v8.2.2 into develop
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/18
2024-11-22 08:26:38 +00:00
2bab12aad1 Merge pull request 'feature/iwork' (#17) from feature/iwork into develop
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/17
2024-11-22 07:37:28 +00:00
80fb376132 Fix bug 71595 2024-11-21 17:29:22 +03:00
1d557f1065 Fix typo 2024-11-21 15:10:51 +03:00
30df3df8cf Setup qmake with portable data 2024-11-21 14:32:00 +03:00
02b4655a16 Refactoring 2024-11-20 16:07:21 +03:00
debf0158d4 iWork added to build_tools 2024-11-19 14:05:51 +03:00
0f730c1948 Write macos update changelog placeholder 2024-11-19 08:03:36 +00:00
fa7e324fe0 Add deps 2024-11-18 18:30:05 +03:00
e2313e6a3d fix Bug 71385 - Fix issue with DocsApi 2024-11-18 10:04:31 +00:00
2ce8c42323 Remove errors with setup system git 2024-11-18 11:57:30 +03:00
684e65adaa Skip errors from git config 2024-11-17 14:08:07 +03:00
a8fc3fb2f1 Add DocumentServer Prerequisites installer build 2024-11-15 18:10:04 +03:00
68bcdb2f88 Merge branch hotfix/v8.2.1 into master 2024-11-12 12:48:18 +00:00
af3627bccb Fix build 2024-11-11 16:50:32 +03:00
4cbe032363 Add online installer upload 2024-11-07 15:47:57 +03:00
5e4b3cf0d2 Fix build with modern compilers 2024-11-05 14:22:50 +03:00
593af1048b Remove windows changelog deploy 2024-11-02 12:44:00 +00:00
ae00ecb773 Fix web-apps closure maps deploy 2024-11-02 12:44:00 +00:00
da83e42172 Refactor packages deploy 2024-11-02 12:44:00 +00:00
2895d53f8e Fix mac old zip copy 2024-11-01 15:45:38 +03:00
10d1f22ec3 Merge pull request 'Deploy online-installer' (#10) from feature/deploy-online-installer into develop
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/10
2024-10-29 18:20:12 +00:00
4ed1e64a61 [win] online-installer: temporarily comment out upload stage 2024-10-29 18:44:18 +02:00
6402936285 [win] deploy online-installer 2024-10-29 14:59:03 +02:00
e01e5c145a Update scripts/sdkjs_common/jsdoc/get_latest_branch.py 2024-10-29 06:52:02 +00:00
56f6d82c8f Update scripts/sdkjs_common/jsdoc/get_latest_branch.py 2024-10-29 06:50:29 +00:00
3e79cf0c12 Add get latest branch 2024-10-29 05:57:06 +00:00
efc09657a8 Add get latest branch 2024-10-29 05:54:44 +00:00
64390c3e01 Fix build v8 on mac 2024-10-24 14:00:14 +03:00
513edb802d Merge branch release/v8.2.0 into develop 2024-10-21 11:46:47 +00:00
52c35b8e3c Merge branch release/v8.2.0 into master 2024-10-17 11:08:22 +00:00
cf1c25031c Update version 2024-10-17 12:45:40 +03:00
7b9f18867a Revert all remplates to desktop for macos 2024-10-10 10:51:28 +03:00
0985b4dbe8 Add script for deploy templates 2024-10-10 02:45:16 +03:00
772fb721ae Add data for hard update hunspell module 2024-10-09 10:48:59 +03:00
1ef1c795c1 Merge pull request '[desktop] debug macOS app launch' (#4) from fix/mac-launch-with-templates into release/v8.2.0 2024-10-08 22:15:31 +00:00
6d956566c5 [desktop] debug 2024-10-09 01:13:47 +03:00
edec5bb25f [desktop] debug macOS app launch 2024-10-09 01:08:19 +03:00
3534f65f0e Remove empty items in plugins list 2024-10-04 18:10:31 +03:00
6fbea9c8a4 Fix build v8 on macos with xcode 16+ 2024-10-03 21:54:17 +03:00
18bba5da3d Disable bitcode by default 2024-10-03 13:49:58 +03:00
952270e1ba Fix rpaths 2024-10-02 13:51:17 +00:00
0c180e6ee5 Fix deploy automation API 2024-09-27 12:13:45 +00:00
fdd9c329b1 Fix deploy automation API 2024-09-26 16:51:44 +00:00
5b80459b37 Merge pull request 'feature/templates-for-desktop' (#1) from feature/templates-for-desktop into release/v8.2.0
Reviewed-on: https://git.onlyoffice.com/ONLYOFFICE/build_tools/pulls/1
2024-09-18 09:52:59 +00:00
1b646a6e00 [desktop] correct path 2024-09-18 12:51:45 +03:00
cf970efbec [desktop] copy templates to package 2024-09-18 12:32:18 +03:00
4020cdac69 Merge branch hotfix/v8.1.3 into release/v8.2.0 2024-09-13 10:35:16 +00:00
2415c2ffe8 Merge branch hotfix/v8.1.3 into develop 2024-09-13 09:46:47 +00:00
d41502ea19 Merge branch hotfix/v8.1.3 into master 2024-09-13 09:46:46 +00:00
f5d0ef4005 Fix macos bug 2024-09-11 11:21:30 +03:00
c4a89ecf61 Fix update 2024-09-10 14:23:17 +03:00
71eb25e561 Remove unused files 2024-09-10 09:58:13 +03:00
486a6683fd Add dependencies for doctrenderer 2024-09-09 06:38:13 +03:00
2175d8d87c Update version 2024-09-05 10:38:13 +03:00
f463bff49e Fixes for working by ssh 2024-09-03 15:09:37 +03:00
a817e2b046 Update generate_docs_plugins_json.py
delete generating missing files with json
2024-09-01 00:34:25 +03:00
3539e36bde Update generate_docs_json.py
delete generate missing examples with json files
2024-09-01 00:32:50 +03:00
6930a9ffe1 Update generate_docs_md.py 2024-08-31 12:58:41 +03:00
e0a44502b1 Update generate_docs_md.py 2024-08-31 12:49:30 +03:00
19e1bd5586 Update generate_docs_md.py 2024-08-31 10:27:23 +03:00
ea65ba02f1 Update generate_docs_md.py 2024-08-31 10:23:46 +03:00
8406e48009 Update generate_docs_md.py 2024-08-31 10:14:28 +03:00
a8f1d11cbc Update generate_docs_md.py 2024-08-31 01:48:55 +03:00
f245a4a9c6 Update generate_docs_md.py 2024-08-31 01:40:00 +03:00
597529a16d Fix crossbuild 2024-08-30 10:49:09 +03:00
9b9dba05c2 Update generate_docs_plugins_json.py
delete branch from path
2024-08-30 09:36:10 +03:00
2d0bbc824f Update generate_docs_json.py
delete branch name from path
2024-08-30 09:33:34 +03:00
fa523c673f Update generate_docs_plugins_json.py
change default
2024-08-30 09:29:54 +03:00
da1a4ba393 Update README.md
change default
2024-08-30 09:29:44 +03:00
e9c9712e52 Update generate_docs_json.py
change default path
2024-08-30 09:25:34 +03:00
78561ca659 Update README.md 2024-08-30 09:25:23 +03:00
1ad87383e3 Update README.md
fix Requirements and Installation
2024-08-30 08:31:45 +03:00
c29ac1549f Update README.md
fix default path
2024-08-30 08:26:58 +03:00
f09eeb19e5 Fix generation on linux 2024-08-29 23:19:21 +03:00
4b7b2c78a2 Merge pull request #871 from ONLYOFFICE/fix/jsdoc
[jsdoc] Fixed style
2024-08-29 00:31:34 -07:00
414af6bdb0 [jsdoc] Fixed style 2024-08-29 14:27:47 +07:00
df7288b275 Merge pull request #868 from ONLYOFFICE/feature/extend-apijs-load
[web-apps] copy api.js as api.js.tpl for server package
2024-08-28 14:39:25 +03:00
ce80953086 Merge pull request #870 from ONLYOFFICE/fix/jsdoc
[jsdoc] Fixed creating data types.
2024-08-28 04:05:29 -07:00
d1344dab71 [jsdoc] Fixed creating data types. 2024-08-28 18:00:53 +07:00
4f2ba4ae76 Merge pull request #869 from ONLYOFFICE/fix/jsdocs
[jsdoc] Fixed output path
2024-08-28 03:32:34 -07:00
6bd525c3b4 [jsdoc] Fixed output path 2024-08-28 17:29:12 +07:00
341671a612 Fix typo 2024-08-28 13:10:29 +03:00
9161aa1556 Add generate snapshots 2024-08-28 13:06:12 +03:00
70e9fbabce [web-apps] copy api.js as api.js.tpl for server package 2024-08-27 21:45:15 +03:00
a2c00deba2 Add function for portable test utilities 2024-08-26 14:20:14 +03:00
9b4ef9d1d7 [web-apps] rename check translation script 2024-08-23 12:52:28 +03:00
3baee0c14e Move pro to builder module 2024-08-22 14:07:05 +03:00
0508bf43d1 Merge pull request #867 from ONLYOFFICE/feature/webapps-fix-translations
[web-apps] check translation for 'main' apps before build
2024-08-20 22:15:14 +03:00
bd279d1ad7 Merge pull request #861 from ONLYOFFICE/feature/docbuilder-java
Build and deploy docbuilder Java wrapper
2024-08-19 04:51:08 -07:00
4d55a66307 Merge pull request #865 from ONLYOFFICE/fix/jsdoc
[jsdoc] Replacing line breaks with spaces
2024-08-16 06:09:23 -07:00
9481e01581 [jsdoc] Replacing line breaks with spaces 2024-08-16 20:04:51 +07:00
fe91bf9620 Merge pull request #864 from ONLYOFFICE/fix/jsdocs
[jsdoc] Fixed paths generation
2024-08-16 05:30:20 -07:00
d812ba379b [jsdoc] Fixed paths generation 2024-08-16 19:28:56 +07:00
e1cc7f3c83 Fix libraries not loading on mac 2024-08-14 19:24:34 +04:00
f50d5d2cd1 Fix path problems on mac 2024-08-14 15:54:07 +04:00
b3987b0ad5 Move build of Java wrapper to build_sln.py 2024-08-13 20:42:54 +04:00
243946a189 Merge pull request #863 from ONLYOFFICE/release/v8.2.0
Merge branch release/v8.2.0 into develop
2024-08-13 07:46:12 -07:00
63fbbc5603 Add missed library to deploy 2024-08-13 17:26:14 +03:00
fcb857df69 [web-apps] check translation for 'main' apps before build 2024-08-13 11:19:18 +03:00
dabbc31c09 Handling complex dependencies in project file 2024-08-13 10:38:33 +03:00
997bfa3dd5 Fix typo 2024-08-13 09:14:23 +03:00
50eca8aab5 Fix build 2024-08-13 07:45:29 +03:00
6e4a2e4d5e Add dictionariestester to core deploy 2024-08-13 00:25:56 +03:00
40e9938885 Add test for dicts & spellmodule to core 2024-08-13 00:25:06 +03:00
5bc8ca2266 Build and deploy JAR 2024-08-12 18:42:18 +04:00
4cdbfbfb86 Deploy JNI helper dynamic library 2024-08-12 18:24:51 +04:00
01575d1f2e Fix core and builder archive deploy (#860)
* Refactoring script parameters

* Add builder 7z deploy

* Refactoring core 7z deploy

* Small fix
2024-08-12 16:53:56 +03:00
8f75c75b80 Merge pull request #859 from ONLYOFFICE/fix/builder-docs
[jsdoc][bu] Removed example filed from json docs
2024-08-07 04:24:42 -07:00
ebc084f9ea [jsdoc][bu] Removed example filed from json docs 2024-08-07 18:24:02 +07:00
626efaf5cf Merge pull request #858 from ONLYOFFICE/fix/jsdoc
[jsdoc][plugins] Added examples field to json.
2024-08-07 03:50:29 -07:00
096ce99588 [jsdoc][plugins] Added examples field to json. 2024-08-07 17:45:40 +07:00
9ce103b31b Add returncode in runcommand function 2024-08-06 17:54:39 +03:00
13cbd84b58 Change documentType for pdf 2024-08-06 14:24:12 +03:00
a8912dff41 Refactoring 2024-08-06 13:46:42 +03:00
8b773614ba Fix builder rpm package deploy (#857) 2024-08-06 11:45:54 +03:00
d04f04f382 Merge pull request #856 from ONLYOFFICE/fix/plugins-docs
Jsdocs api plugins generation script
2024-08-05 03:35:02 -07:00
9a44dae4f9 Jsdocs api plugins generation script 2024-08-05 17:28:06 +07:00
07665dd93e Merge pull request #855 from ONLYOFFICE/fix/jsdoc
Fixed jsdoc md generation
2024-08-02 07:35:32 -07:00
eeca17e78b Fixed jsdoc md generation 2024-08-02 21:30:29 +07:00
f91264bc94 Merge pull request #854 from ONLYOFFICE/fix/docs-generation
[jsdoc] Fixed api docs generation
2024-08-02 06:34:17 -07:00
0983e67f21 [jsdoc] Fixed api docs generation 2024-08-02 20:31:11 +07:00
8e7db87554 Refactoring linux packages deploy (#853)
* Fix deploy desktop editors linux packages (#842)

* Small fix

* Fix make targets

* Small fix

* Refactoring linux packages deploy
2024-07-31 14:18:27 +03:00
9d000b2284 Merge pull request #852 from ONLYOFFICE/fix/generation-path
Added branch name to dist path for jddoc json generation
2024-07-29 04:07:03 -07:00
e29fd0ca09 Added branch name to dist path for jsdoc json generation 2024-07-29 18:06:17 +07:00
dcfde5b5e7 Refactoring 2024-07-29 13:57:17 +03:00
871750d6ae Merge pull request #851 from ONLYOFFICE/fix/jsdoc
Fixed comments
2024-07-29 01:40:16 -07:00
d6b5dc0830 Fixed comments 2024-07-29 15:35:26 +07:00
e99a3e8978 Merge pull request #850 from ONLYOFFICE/fix/jsdoc
Fixed path in jsdoc generation script
2024-07-29 00:17:11 -07:00
13db6d3155 Fixed path in jsdoc generation script 2024-07-29 14:15:46 +07:00
f8845d4fc5 Merge pull request #849 from ONLYOFFICE/fix/jsdoc
Fixed docs generation scripts
2024-07-28 23:28:44 -07:00
efcfb00239 Fixed docs generation scripts 2024-07-29 13:27:41 +07:00
1727313e54 Merge pull request #848 from ONLYOFFICE/fix/jsdoc
Fixed getting doclets for docs generation
2024-07-26 08:55:03 -07:00
f6d55d07c1 Fixed getting doclets for docs generation 2024-07-26 22:51:35 +07:00
331bbadaad Merge pull request #847 from ONLYOFFICE/fix/documentation
Build json docs fixes
2024-07-26 06:47:02 -07:00
f012c604b8 Build json docs fixes 2024-07-26 20:42:09 +07:00
a8f6b0c599 Merge pull request #846 from ONLYOFFICE/feature/documentation
Added documentation generation scripts
2024-07-26 06:24:06 -07:00
e46d73869c Added documentation generation scripts 2024-07-26 20:20:55 +07:00
6bf413a008 Merge branch hotfix/v8.1.1 into release/v8.2.0 2024-07-26 08:02:47 +00:00
10b7f63f9f Merge branch hotfix/v8.1.1 into develop 2024-07-26 08:02:44 +00:00
f2dff2d173 Merge branch hotfix/v8.1.1 into master 2024-07-26 08:02:40 +00:00
963c3bf212 Merge pull request #801 from ONLYOFFICE/feature/split_functions
Split functions
2024-07-25 04:05:41 +01:00
f7071569d9 Merge remote-tracking branch 'remotes/origin/release/v8.2.0' into feature/split_functions
# Conflicts:
#	scripts/develop/run_server.py
2024-07-25 03:56:14 +01:00
4e5eadbf82 For bug 68924 2024-07-22 00:07:45 +03:00
113e2e7821 Fixed builder interface script generation 2024-07-19 15:00:50 +07:00
21c8c699dd [desktop] for bug 62528 2024-07-18 14:15:51 +03:00
db36b7dc40 [develop] Fix mysql check. "SHOW DATABASES" returns lowercase result 2024-07-17 14:56:25 +03:00
38522989d3 [develop] Add db-name config option 2024-07-16 19:26:19 +03:00
aa49605ac4 Merge branch hotfix/v8.1.1 into master 2024-07-15 11:38:25 +00:00
3af65bf276 Version up 2024-07-12 11:12:32 +03:00
0a51c3bdea Fix bug 46933 2024-07-11 19:32:44 +03:00
ba6c3a8f38 Fix bug 68571 2024-07-07 23:43:03 +03:00
66e196b5ec [develop] Remove confusion with working dir in readme 2024-07-03 17:37:59 +03:00
d4a49d7137 Update github actions (#832) 2024-07-02 16:57:51 +03:00
1cca8af54f Merge branch 'develop' of https://github.com/ONLYOFFICE/build_tools into feature/split_functions 2024-07-02 12:15:59 +03:00
7e925fd931 Merge pull request #830 from ONLYOFFICE/feature/libvlc-linux
Correct RPATHs for libvlc build
2024-06-29 11:32:38 -07:00
45448171d4 Correct rpaths for libvlc build 2024-06-28 20:25:21 +04:00
64ae3d9029 Merge branch release/v8.1.0 into develop 2024-06-26 10:39:48 +00:00
edccac17f6 Merge pull request #827 from ONLYOFFICE/fix/readme
Fix/readme
2024-06-24 12:46:54 +03:00
1d36cad17e [develop] Clarify cwd for docker run command 2024-06-19 11:54:16 +03:00
08e6d5ba53 [docs] Fix PostgreSQL database creation commands in readme 2024-06-19 11:26:02 +03:00
6505ee1b35 Merge branch release/v8.1.0 into master 2024-06-19 08:19:22 +00:00
709612090a Refactoring 2024-06-14 17:17:00 +03:00
1af5c373e4 Refactoring desktop packages build (#824) 2024-06-14 17:10:49 +03:00
8181d187dd Fix previous commit 2024-06-14 17:04:27 +03:00
4b448e3305 Add new options for spreadsheets convertation 2024-06-14 16:40:18 +03:00
fd579511ae Update hard-coded version to v8.1.0 2024-06-11 13:21:19 +00:00
e166237e5d Fix previous commit 2024-06-04 23:29:20 +03:00
b934429e41 Rename modules for standard libs correctly work 2024-06-04 23:27:09 +03:00
d61c1da666 Resolve conflicts when importing modules 2024-06-04 09:21:34 +03:00
8f633771d9 Merge pull request #821 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-06-03 17:17:02 +03:00
684f478c54 Fix mac builder upload path (#820) 2024-06-03 16:29:31 +03:00
cb0099d746 Refactoring 2024-05-27 13:08:05 +03:00
a72ead91dc [develop] Add no-cache option to readme 2024-05-23 01:25:48 +03:00
fd7c3c6cf3 Merge pull request #815 from ONLYOFFICE/release/v8.1.0
Merge release/v8.1.0 into develop
2024-05-22 12:03:32 +03:00
5ef8abacfa Update vcredist checksums (#813) 2024-05-22 11:52:04 +03:00
a01221ffc6 Merge pull request #811 from ONLYOFFICE/fix/docbuilder-python-deploy
Fix docbuilder python deploy on linux and mac
2024-05-21 18:12:17 +03:00
cbd4ab2e15 Fix for linux 2024-05-21 19:00:19 +04:00
e70152b85b Correct deploy rpath for linux 2024-05-21 18:30:58 +04:00
8a9c9a587e Fix rpath for mac 2024-05-21 17:38:31 +04:00
29c15d9acd Fix typo 2024-05-19 12:03:08 +03:00
bf6773f666 Add python wrapper for builder 2024-05-19 11:23:05 +03:00
bba0ff87da Add script for qt arm builds 2024-05-16 13:31:32 +03:00
c9de5278ea Fix build 2024-05-16 12:10:50 +03:00
6f5a791a1f Add hack for android debug builds 2024-05-06 15:18:54 +03:00
1e7a720e74 Disable unused modules 2024-05-03 12:31:04 +03:00
10a7080928 Merge pull request #806 from ONLYOFFICE/feature/build-qt-arm
Qt build for linux arm
2024-04-26 19:09:57 +03:00
7349c64253 Add arm32 build 2024-04-26 19:18:48 +04:00
88649507c7 Fix problem with cores 2024-04-26 14:08:27 +04:00
cc503473f9 Add dockerfile and python script 2024-04-25 22:07:46 +04:00
10fcec1dd8 [license_checker] Update web-apps config 2024-04-24 17:15:47 +03:00
0679c0f6d7 [license_checker] Allow different license templates 2024-04-24 14:23:51 +03:00
a1a69bdbab Add build-only-branding param 2024-04-15 14:41:05 +03:00
da02b358e2 [develop] Check private repo existence(server-lockstorage) 2024-04-10 01:47:31 +03:00
60dcea6ff4 Fix creation xcframeworks if destination exist 2024-04-09 23:34:37 +03:00
b5796d5e6c Add v1 plugins engine to local server 2024-04-02 21:57:00 +03:00
6338fd58c3 Split functions 2024-03-19 17:39:28 +03:00
39b6841557 Fix build 2024-03-18 12:05:26 +03:00
f3a20e8e59 Merge pull request #800 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-15 10:45:45 +03:00
830df65573 Add mobile module support 2024-03-15 10:44:58 +03:00
2aeb9e1315 Fix build on linux 2024-03-14 21:55:49 +03:00
696c48c251 Merge pull request #799 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-14 20:08:54 +03:00
dcf02e7e93 Fix mobile package 2024-03-14 20:03:40 +03:00
581091591b Fix typo 2024-03-14 19:39:49 +03:00
0e6f1a064d Change project type to json 2024-03-14 16:45:40 +03:00
70975098e2 Add web-apps js maps deploy (#796)
* Exclude js maps

* Add web-apps js maps deploy
2024-03-14 14:10:56 +03:00
5b27f9843f test commit 2024-03-14 09:38:37 +03:00
71e29a6599 Fix windows build 2024-03-14 09:38:17 +03:00
6fd43a4b18 Add support short names for ndk 2024-03-14 09:19:09 +03:00
11f207fbe2 Fix typo 2024-03-14 09:12:38 +03:00
6559d589dd Merge pull request #798 from ONLYOFFICE/release/v8.1.0
Release/v8.1.0
2024-03-13 18:33:44 +03:00
b7e9acc242 Merge pull request #797 from ONLYOFFICE/fix/8.0.2
Fix/8.0.2
2024-03-13 18:33:13 +03:00
bfd1cd0555 Fix build with old ndk 2024-03-13 17:14:01 +03:00
590dffdb78 Revert ndk version 2024-03-13 11:24:49 +03:00
0205dd6853 Refactoring 2024-03-10 21:53:45 +03:00
cd03a42c1b Fix packages build (#793) 2024-03-05 11:16:34 +03:00
c1a8d181d2 Fix desktop package build (#792) 2024-03-01 19:04:24 +03:00
a17d5e04bb Remove unused dependency 2024-02-27 14:40:42 +03:00
e719ae24f0 Merge branch hotfix/v8.0.1 into master 2024-02-26 07:32:18 +00:00
b4922e6899 Merge pull request #788 from ONLYOFFICE/hotfix/v8.0.1
Hotfix/v8.0.1
2024-02-07 12:46:01 +03:00
d8c2505fb8 Fix xp build without path env 2024-02-07 12:44:24 +03:00
02426e413f Switch python2 version to version from bootstrap 2024-02-07 12:17:40 +03:00
bd05971ebb Patch python script on windows 2024-02-06 23:16:45 +03:00
4e12692325 Fix build 2024-02-06 20:02:22 +03:00
f7ea69acc9 Update VCRedist (#787) 2024-02-06 17:44:15 +03:00
3640cea64d Update hard-coded version to v8.0.1 2024-02-06 14:27:51 +00:00
f5ac8ac39d Merge branch release/v8.0.0 into develop 2024-02-05 08:38:40 +00:00
f801e77208 Merge branch release/v8.0.0 into master 2024-01-30 11:23:27 +00:00
2a8c5ea9eb Disable drawio by default 2024-01-26 14:17:16 +03:00
181a42e344 Fix xp plugins (desktop) 2024-01-22 12:14:58 +03:00
a0511ca3ac Fix build js for native 2024-01-20 21:24:02 +03:00
0b48f3a67f Refactoring build native scripts 2024-01-18 17:05:17 +03:00
15727e83cc [desktop] add noconnect.html to package 2023-12-26 19:17:58 +03:00
7d06432a76 Fix vcredist download (#777) 2023-12-20 18:46:46 +03:00
761c47e26d Add fonts to desktop package 2023-12-11 21:25:53 +03:00
edc6a38dfb Fix typo 2023-12-11 21:25:20 +03:00
2b79e127c4 Fix native build 2023-12-09 23:43:22 +03:00
449875d5b8 Fix msi icon paths (#775) 2023-12-08 15:18:30 +03:00
bbdb9e0107 Merge pull request #774 from ONLYOFFICE/release/v8.0.0
Release/v8.0.0
2023-12-07 22:28:19 +03:00
0a613734f7 Fix build 2023-12-07 22:15:12 +03:00
ff2aa0434a Fix android build 2023-12-07 14:51:19 +03:00
2fa22ca2b3 Fix build 2023-12-07 13:31:08 +03:00
25473c1b5c Merge branch 'release/v8.0.0' of https://github.com/ONLYOFFICE/build_tools into release/v8.0.0 2023-12-07 12:08:33 +03:00
7c087e20b7 Fix build 2023-12-07 12:08:17 +03:00
7250b59f19 Update hard-coded version to v8.0.0 2023-12-07 07:29:35 +00:00
e54e7ad6ec Merge pull request #773 from ONLYOFFICE/release/v7.6.0
Change fetching icu (github deprecated svn)
2023-12-06 18:52:21 +03:00
4a2fd9fb72 Fix aws s3 artifacts upload (#772) 2023-12-06 18:33:28 +03:00
afd5f2b3be Change fetching icu (github deprecated svn) 2023-12-06 12:55:07 +03:00
d468b93e9f Merge pull request #769 from ONLYOFFICE/fix/license-checker-readme
[license_checker] update Readme for allowListFile
2023-11-28 16:37:33 +03:00
188ad0057f Merge pull request #770 from ONLYOFFICE/release/v7.6.0
Release/v7.6.0
2023-11-28 10:58:33 +03:00
bde91e3dbf [license_checker] update Readme for allowListFile 2023-11-25 22:08:51 +03:00
3e9b233ecb [license] For new repo server-license-key 2023-11-21 00:08:52 +03:00
1f6a3010b5 Add method for auto-check modules 2023-11-17 15:08:47 +03:00
fa15db70c9 Merge branch release/v7.6.0 into develop (#765)
* Update hard-coded version to v7.6.0

* Fix vcredist [2] (#763)

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2023-11-16 17:32:35 +03:00
b74c359523 Fix vcredist [2] (#763) 2023-11-16 16:08:28 +03:00
6d7e67820a Update hard-coded version to v7.6.0 2023-11-15 14:52:29 +00:00
3d884963a7 Merge pull request #762 from ONLYOFFICE/release/v7.6.0
Merge branch release/v7.6.0 into develop
2023-11-15 17:08:32 +03:00
265cac6474 Fix vcredist download (#761) 2023-11-15 17:03:30 +03:00
96ff18b45c Merge branch hotfix/v7.5.1 into release/v7.6.0 2023-11-15 12:46:13 +00:00
8eb2d689fd Merge branch hotfix/v7.5.1 into develop 2023-11-15 12:46:10 +00:00
d2888db960 [desktop] for bug 65074 2023-11-14 23:07:15 +03:00
5d7de5a7ba [deploy] Build and deploy server without coping and grunt module 2023-11-14 18:42:21 +03:00
4d3e9c39b1 Fix msi build (#758) 2023-11-12 21:11:59 +03:00
a2639afd7a Merge branch hotfix/v7.5.1 into master 2023-10-31 14:39:47 +00:00
6d8f89deba Fix typo 2023-10-27 10:37:52 +03:00
b3a2493767 Update hard-coded version to v7.5.1 2023-10-26 07:59:57 +00:00
5d3cbbe194 Merge pull request #752 from ONLYOFFICE/fix/develop-instruction
[develop] Fix develop readme instruction
2023-10-25 23:05:04 +03:00
fd2e480e17 Fix build v8 for xp 2023-10-25 22:25:59 +03:00
342556b763 Fix windows build 2023-10-24 22:49:15 +03:00
1dd67ac7a9 [desktop] "connection error" page moved to "start" page 2023-10-24 18:17:53 +03:00
2311c55319 [develop] Fix develop readme instruction 2023-10-19 23:32:25 +03:00
eb80d0d6c1 [build] Revert nodejs version to 16 due to endless error messages when building on node18-linux-arm64(pkg-fetch@3.5 node@v18.15.0) 2023-10-19 12:43:47 +03:00
c629596198 Merge branch release/v7.5.0 into master 2023-10-19 08:18:08 +00:00
3fe86f753f Merge pull request #747 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-10-18 16:57:28 +03:00
031e5a74d7 Fix build with old python 2023-10-18 16:55:34 +03:00
b26baed61e Merge branch release/v7.5.0 into master 2023-10-17 12:04:35 +00:00
a311f41f0c Fix deploy plugin store in desktop 2023-10-17 00:03:32 +03:00
07c3fb05a8 Move speechrecognition to server only 2023-10-16 20:46:53 +03:00
0625ad2652 Fix del dir with long filenames 2023-10-15 06:35:01 +03:00
ed3e4082a1 Fix copy directory with long names 2023-10-15 00:17:29 +03:00
b187130c34 Fix typo 2023-10-14 21:54:09 +03:00
3c56477f3a Deploy marketplace plugin from base repo 2023-10-14 19:19:44 +03:00
8c15ed7887 Merge pull request #744 from ONLYOFFICE/feature/new-default-plugins
Add some plugins by default.
2023-10-13 11:25:43 +03:00
1f46c647f9 Add some plugins by default.
drawio, zotero, speech intput.
2023-10-13 11:11:57 +03:00
8791ddf547 [desktop] fix build 2023-10-11 11:32:28 +03:00
04679efe76 Merge pull request #743 from ONLYOFFICE/feature/connection-error-path 2023-10-09 23:18:50 +03:00
9197d31552 [desktop] added connection error page 2023-10-09 23:12:48 +03:00
47977e3b37 Update windows desktop build (#742) 2023-10-09 19:01:02 +03:00
e358689181 [develop] Fix markdown errors 2023-10-05 13:30:18 +03:00
4cda4793bc [develop] Fix Dockerfile. Allow branding command line params
# Conflicts:
#	develop/run_build_js.py
2023-10-05 13:30:18 +03:00
7b470fa1f8 Fix build icu for android on mac 2023-10-05 12:46:35 +03:00
33b14d8848 Fix vlc-cache-gen paths (#740) 2023-09-27 15:09:50 +03:00
01f6464a71 Fix typo 2023-09-27 11:55:26 +03:00
c0c0755505 Fix check config option 2023-09-27 11:50:25 +03:00
5bccf567fd Fix update DesktopEditors vlc plugin cache (#739) 2023-09-27 10:32:12 +03:00
670235480b Merge pull request #738 from ONLYOFFICE/feature/win-vlc-cache
Update windows package DesktopEditors vlc plugin cache
2023-09-26 16:26:23 +03:00
bfab104961 Small fix 2023-09-26 16:23:47 +03:00
658ce63a04 Update windows package DesktopEditors vlc plugin cache 2023-09-26 16:15:16 +03:00
208b602c97 Add vlc-cache-gen to deploy folder 2023-09-26 15:09:00 +03:00
49c65d9f64 Merge pull request #737 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-09-26 12:16:00 +03:00
afdd241116 [desktop] use fonts from core-fonts repo 2023-09-21 23:46:17 +03:00
241748308a [build] Bump nodejs version to 18; npm ci instead of npm i 2023-09-21 12:59:23 +03:00
fc0e0adbc7 Add catch error on file copying 2023-09-21 10:10:30 +03:00
ce648a2649 Add patch for xcode 15.0 2023-09-20 22:46:17 +03:00
92602510a5 Merge pull request #736 from ONLYOFFICE/feature/MetafileTester
Added console matafile tester
2023-09-20 12:45:51 +03:00
2429745cc0 Added console matafile tester 2023-09-20 12:19:09 +03:00
98f70179ef Fix dictionaries deploy 2023-09-17 15:01:38 +03:00
bd167f6258 Add dictiories path to doctrenderer.config 2023-09-16 18:42:06 +03:00
88423908f2 Fix build 2023-09-16 17:24:43 +03:00
23a4c4b0b5 Add support config addon for each platform 2023-09-16 16:36:58 +03:00
879b6b2810 Merge pull request #732 from ONLYOFFICE/fix/videoplayer
Updated libvlc builds
2023-09-13 10:39:31 +03:00
3a3652e753 Fix closure maps deploy path (#733) 2023-09-12 18:28:50 +03:00
e79079f4d4 [libvlc] Fixes for linux and win builds 2023-09-11 18:06:31 +04:00
3660eb62ec [libvlc] Small fix for mac builds 2023-09-11 17:59:15 +04:00
c6d41ba35d [libvlc] Remove dependency on qtmultimedia. 2023-09-04 16:48:26 +03:00
c9fb306823 Merge pull request #730 from ONLYOFFICE/feature/libvlc
Feature/libvlc
2023-08-31 21:23:01 +03:00
6d9a9032b2 Merge pull request #729 from ONLYOFFICE/release/v7.5.0
Release/v7.5.0
2023-08-31 21:19:58 +03:00
fab8edef79 [test] Use os.walk 2023-08-31 15:22:36 +03:00
df60f1c273 [test] Add convert_directory_test.py to convert_directory with test exe(without doctrender) 2023-08-31 15:01:45 +03:00
fc0e3972a3 Merge branch 'hotfix/v7.4.2' into release/v7.5.0 2023-08-25 17:14:55 +03:00
3ff6c327f9 Add method for deploy dictionaries 2023-08-25 17:10:51 +03:00
c22b067e30 Fix remove repo method
(cherry picked from commit 4e760a2a38)
2023-08-25 15:20:50 +03:00
6d6eff662d Fix build
(cherry picked from commit 8a1a2b93c6)
2023-08-25 15:20:41 +03:00
48ecc3915e Increased minimum required version of Node.js 2023-08-22 19:35:32 +03:00
8c87ead486 Merge pull request #727 from ONLYOFFICE/fix/nodeV
Increased minimum required version of Node.js
2023-08-22 19:34:31 +03:00
66cffd6722 Increased minimum required version of Node.js 2023-08-22 15:38:48 +08:00
eadad135e2 Merge pull request #726 from ONLYOFFICE/feature/docker-instruction-2
Feature/docker instruction 2
2023-08-22 01:23:06 +03:00
9a44988707 [develop] Add note; remove branch from Dockerfile to prepare for master 2023-08-22 01:19:42 +03:00
d9b2f92e64 [develop] Fix markdown errors 2023-08-22 01:17:08 +03:00
a8cb907b71 [develop] Fix markdown errors 2023-08-22 01:17:07 +03:00
633c176e50 [develop] Add ALLOW_PRIVATE_IP_ADDRESS (to fix http://localhost/example/images/logo.png insertion) 2023-08-22 01:17:07 +03:00
91e8c60036 [develop] Fix typo 2023-08-22 01:17:06 +03:00
9c1398814d [develop] Add check__docker_dependencies call for external sdkjs and web-apps 2023-08-22 01:17:06 +03:00
672d1dc800 [develop] Fix missing platform 2023-08-22 01:17:06 +03:00
fca666825a [develop] Fix linux path 2023-08-22 01:17:05 +03:00
dac76abd74 [develop] Fix Dockerfile 2023-08-22 01:17:05 +03:00
e8ad53b990 [develop] Fix Dockerfile 2023-08-22 01:17:05 +03:00
002504fca3 [develop] Fix new supervisor config path 2023-08-22 01:17:04 +03:00
16c36d346e [develop] Edit readme for linux 2023-08-22 01:17:04 +03:00
18142fc257 [develop] Update Dockerfile and instruction 2023-08-22 01:17:04 +03:00
72d3244dcb [develop] Fix readme 2023-08-22 01:17:03 +03:00
34ee246673 [develop] Fix readme 2023-08-22 01:17:03 +03:00
3ea1cff8de [develop] Update readme 2023-08-22 01:17:03 +03:00
a9de3f6f0e [linter] Fix gitflow 2023-08-22 01:17:02 +03:00
a54bf745ae [linter] Fix gitflow 2023-08-22 01:16:03 +03:00
8ee547cad7 [linter] Set code_block_line_length linter rule to 300 2023-08-22 01:15:04 +03:00
325a68877b [develop] Fix readme(linter) 2023-08-22 01:14:03 +03:00
fd13759a79 [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
283ac31f9b [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
ea253634d2 [develop] Fix readme(linter) 2023-08-22 01:14:02 +03:00
426c24ac52 [develop] Fix readme(linter) 2023-08-22 01:14:01 +03:00
15f7a39997 [develop] Modify readme 2023-08-22 01:14:01 +03:00
69107bb48c [develop] Edit readme to mount server volume; Set up debug logging and start test example in docker by default; 2023-08-22 01:13:52 +03:00
681e9deafd Remove gsttools in vlc package 2023-08-21 15:46:08 +03:00
62911b8490 Refactoring 2023-08-20 18:53:39 +03:00
4e760a2a38 Fix remove repo method 2023-08-20 13:50:27 +03:00
8a1a2b93c6 Fix build 2023-08-19 10:20:56 +03:00
102458d9c8 Fix typo 2023-08-18 15:59:16 +03:00
2a75912ca4 Fix typo 2023-08-18 15:24:48 +03:00
81c6410394 Change deploy with libvlc option 2023-08-18 14:58:53 +03:00
d8b759841e Merge branch 'release/v7.5.0' into develop 2023-08-16 18:51:03 +03:00
d66d9a03ec Revert partially "[develop] Use npm ci instead of npm i when building web-apps"
This reverts commit 5012e4e9bd.
because error with npm ci (-v 9) on package-lock.json(-v 6) file with local dependencies
https://github.com/npm/cli/issues/5125
https://github.com/npm/cli/issues/529
2023-08-16 18:49:19 +03:00
1894c5c971 Merge release/v7.5.0 into develop
Release/v7.5.0
2023-08-10 12:07:40 +03:00
5012e4e9bd [develop] Use npm ci instead of npm i when building web-apps 2023-08-08 19:24:19 +03:00
b9ccd9849a Refactoring packages deploy (#720)
* Refactoring packages deploy

* Small fix
2023-08-04 17:59:21 +03:00
898f961e2a some restyling in libvlc build script 2023-08-04 13:00:33 +04:00
88843a1f2d Fix windows package build (#719) 2023-08-03 17:40:20 +03:00
8ea37f2b03 MacOS package build refactoring (#718) 2023-08-03 11:00:15 +03:00
162b5dcb00 automatic plugins.dat generation on mac 2023-08-02 18:50:23 +04:00
2889258304 ignoring timestamps on cache loading
+ forming linux_64 build directory reworked
+ patching on mac enhanced
2023-08-02 16:53:58 +04:00
7770a41f08 Update hard-coded version to v7.5.0 2023-08-02 12:14:58 +00:00
0ba4a6a968 Merge branch hotfix/v7.4.1 into develop 2023-08-01 08:09:30 +00:00
c2d39b1357 Merge branch hotfix/v7.4.1 into master 2023-07-31 07:20:29 +00:00
415d47658b libvlc: fixed build for mac_arm64 2023-07-28 12:49:33 +04:00
a3e58605a5 Remove open from io 2023-07-27 17:17:48 +03:00
7936c3d097 libvlc: added build for mac 2023-07-27 18:12:25 +04:00
d4da415e4d Add method for replace path of text file with utf8 content (python2&3 support) 2023-07-26 22:30:02 +03:00
3197700bc0 Fix bug #62960 / Fix zip sign (#715) 2023-07-21 19:44:06 +05:00
285b99a5ac libvlc: build libvlc for linux in docker 2023-07-19 17:15:32 +04:00
fac40064ce libvlc: added build script for linux 2023-07-17 19:26:53 +04:00
4c4ef3ad64 Fix msi build (#714) 2023-07-17 18:37:22 +05:00
73bfa8e069 Fix bug #63360 / Fix msi VisualElementsManifest files (#712)
* Fix bug #63360 / Fix msi VisualElementsManifest files

* Small fix
2023-07-14 17:33:22 +05:00
2b9b254aaf Fix bug #63350 / Fix msi package.config (#711) 2023-07-14 13:59:36 +05:00
69edb29412 [develop] npm ci in server dir 2023-07-13 10:34:41 +03:00
a1deadc40c Fix bug in deploy osign library 2023-07-13 02:55:31 +05:00
495aa71860 Add module osign 2023-07-13 00:46:40 +03:00
2cf672ed17 Refactoring build mobile version 2023-07-10 12:09:52 +05:00
95770429ef Merge pull request #710 from ONLYOFFICE/feature/embedJS
Feature/embed js
2023-07-09 17:21:45 +03:00
724b42f938 libvlc: added build in docker for win32 and win64 2023-07-07 22:32:40 +04:00
010f1f7a77 Fix build scripts 2023-07-05 21:53:51 +03:00
9e96f3e9bd Fix fonts deploy 2023-07-05 12:52:02 +05:00
c68437cce8 Fix msi build (#709) 2023-07-04 19:11:12 +05:00
3f0385d469 Change online-help url (#708)
* Change online-help uri

* Change online help url
2023-07-03 18:58:15 +05:00
1d37344d01 Merge pull request #707 from ONLYOFFICE/feature/vboxtester
build
2023-07-02 00:44:30 +03:00
a5e412ee85 Add vboxtexter to deploy 2023-07-02 00:43:46 +03:00
a4b920b1ce for bug 63038 2023-07-01 23:52:34 +03:00
bdd1d765bf Add macos builder archive deploy (#706) 2023-06-30 21:13:22 +05:00
0f66ce9343 Merge pull request #705 from ONLYOFFICE/feature/for-bug-63038
[desktop] for bug 63038
2023-06-30 11:03:02 -04:00
68de1c72e7 Fix check action (#704) 2023-06-29 15:48:56 +05:00
2062bd0b92 Fix package build (#703) 2023-06-29 15:48:44 +05:00
0ea1b6c527 Fix mac appcast parsing (#702) 2023-06-29 13:25:38 +05:00
0f1dcb88d4 Merge pull request #701 from ONLYOFFICE/fix/nodejsV
Updated max permitted node.js version
2023-06-29 08:23:16 +03:00
12500bbd70 Changed node.js minor version 2023-06-29 10:31:06 +08:00
a2a40d122e Update hard-coded version to v7.4.1 2023-06-28 16:06:57 +00:00
7bc15e05d6 Updated max ermitted node.js version 2023-06-28 21:50:24 +08:00
84a8032233 Add files to gitignore 2023-06-28 11:47:25 +03:00
67a4ab0dfe Merge pull request #700 from ONLYOFFICE/feature/tests
Add tests for editors
2023-06-28 11:41:20 +03:00
eff25a9245 Merge branch 'release/v7.4.0' into develop 2023-06-28 11:07:28 +03:00
b0c09da0bc Merge branch release/v7.4.0 into master 2023-06-28 07:17:13 +00:00
5497cb527b Disable precompiled version on release build 2023-06-27 18:49:41 +03:00
9e6010f650 Fix win update deploy (#697) 2023-06-21 18:34:00 +05:00
8d4ff54463 Fix bug #63097
Fix bug 63097
2023-06-21 16:51:17 +05:00
a3f2ec8161 Fix bug #63077
Fix bug 63077
2023-06-20 15:09:31 +05:00
a02f6b0276 [desktop] for bug 63038 2023-06-17 15:54:31 +03:00
570a433826 Merge branch release/v7.4.0 into master 2023-06-13 11:14:28 +00:00
e811ce765c Add tests for editors 2023-06-12 22:31:18 +03:00
c17037ef65 Change link to qt 2023-06-12 12:10:09 +03:00
bd3682f4f1 Package build fix (#691)
* Fix upload errors

* Fix inno help build
2023-06-08 13:16:05 +05:00
28767c0f2d Fix desktop help signing (#690)
Fix bug 62938
2023-06-07 18:10:42 +05:00
f70431f7a5 build 2023-06-06 15:00:28 +03:00
26448858e8 Package build small fix (#689) 2023-06-06 16:07:49 +05:00
fc2d4a45ca Package scripts fix (#688) 2023-06-06 14:48:37 +05:00
478f4b86e7 Restore desktop help installer (#687) 2023-06-06 13:59:34 +05:00
5e8f7aa52d Package build fix (#686) 2023-06-05 20:50:08 +05:00
c9b9cb5846 Add core-fonts repo for all modules 2023-06-05 12:29:05 +03:00
06a1b12069 Revert "Fix support python 2.7"
This reverts commit 815adb0856.
2023-06-05 09:01:16 +03:00
815adb0856 Fix support python 2.7 2023-06-05 07:06:26 +03:00
ef22f84ab1 Merge remote-tracking branch 'remotes/origin/fix/bug-59392' into release/v7.4.0
# Conflicts:
#	scripts/base.py
#	scripts/deploy_desktop.py
#	scripts/develop/config_server.py
2023-06-05 01:15:48 +03:00
596e7bf617 Add default fonts to deploy 2023-06-03 00:13:45 +03:00
a19609f5b1 Add desktop standalone package build (#685)
* Fix desktop help deploy

* Fix zip

* Fix inno build

* Fix msi build
2023-06-02 18:54:12 +05:00
2ad21f9cd0 [desktop] fix build package 2023-06-01 15:12:40 +03:00
e91c9f06dd [desktop] fix option name 2023-05-31 19:07:49 +03:00
d8e1cfe702 [desktop] debug building update's package 2023-05-31 14:48:25 +03:00
4949a8d464 Fix appcast stuff (#684) 2023-05-30 20:35:28 +05:00
121c624026 Remove debug staff 2023-05-29 12:44:47 +03:00
0ba5118b5e Do not remove marketplace directory 2023-05-28 21:40:22 +03:00
80d3824150 Update v8 version for android 2023-05-28 16:55:51 +05:00
5bf74fda13 Fix packages md5 command (#682) 2023-05-26 14:02:24 +05:00
a9cbfe93d0 Push md5 sums to S3 (#681) 2023-05-25 21:55:22 +05:00
e4d30cb842 Fix for local fonts directory 2023-05-25 13:49:17 +03:00
9b81677fbf Update patchelf 2023-05-24 20:59:19 +03:00
ce9762da73 Update patchelf and add build script 2023-05-24 11:56:33 +03:00
cb96902786 Add flag for convertaion to images 2023-05-20 22:40:27 +03:00
750d5efc0e Refactoring 2023-05-19 16:43:25 +03:00
929ebf6e0e Merge pull request #679 from ONLYOFFICE/fix/remove-deploy
Remove deploy folders
2023-05-18 18:03:02 +05:00
3d446a5d30 Merge branch 'release/v7.4.0' into fix/remove-deploy 2023-05-18 15:03:14 +03:00
978fe74291 Update base.py 2023-05-18 15:02:53 +03:00
04a5f4667a Fix plugin list format (#677) 2023-05-17 20:33:35 +05:00
095bbc9d19 Update vcredist checksums (#674) (#675) 2023-05-17 16:19:41 +05:00
2478eb6873 Update vcredist checksums (#674) 2023-05-17 13:15:33 +05:00
d1b490b59a Remove sdkjs-plugins from server deploy (#673)
* Remove sdkjs-plugins from server deploy

* Generation plugin list json

* Fix filename
2023-05-16 20:05:10 +05:00
01158bb16a Fix typo 2023-05-16 10:23:33 +03:00
dd583cb908 Enable bundle_xcframeworks by default for ios build 2023-05-16 10:18:34 +03:00
702952740f Use v8 9.9 version instead of 6.0 on macOS (use_v8 version) 2023-05-15 17:40:32 +05:00
99741d0805 Merge pull request #672 from ONLYOFFICE/merge-hotfix
Merge branch hotfix/v7.3.3 into develop
2023-05-15 12:38:52 +05:00
fcec89bf9c Merge branch 'hotfix/v7.3.3' into develop 2023-05-15 10:28:28 +03:00
d2aa0f521f Merge branch hotfix/v7.3.3 into master 2023-05-15 06:43:45 +00:00
dab33a829b Merge branch 'release/v7.4.0' of github.com:ONLYOFFICE/build_tools into release/v7.4.0 2023-05-02 17:22:34 +03:00
dc08890d4c Merge branch 'hotfix/v7.3.3' into release/v7.4.0 2023-05-02 17:22:16 +03:00
40c5192c1c Fix cef103 for new systems 2023-05-02 17:17:23 +03:00
06c0ef6dde Small fix 2023-05-02 17:13:22 +03:00
cb1ad31c86 Add cef103 for old mac systems (enable with use_v8 flag) 2023-05-02 17:04:19 +03:00
8fbf7485dc Merge pull request #666 from ONLYOFFICE/release/v7.4.0
Fix build arm64 version
2023-05-02 02:57:21 +05:00
c03aa9cbfd Fix build arm64 version 2023-05-02 00:55:35 +03:00
063ffc6ece Merge pull request #665 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-28 21:37:19 +05:00
b5881ef5b4 Merge pull request #664 from ONLYOFFICE/feature/pmutility
pluginsmanager deploy
2023-04-28 20:44:01 +05:00
91582bc164 Package changelogs refactoring (#663) 2023-04-28 19:13:08 +05:00
c11b53bba9 pluginsmanager deploy 2023-04-28 11:59:57 +03:00
362d82e32c Disable warnings by default 2023-04-27 14:38:38 +03:00
a024ff4899 Enable clean openssl by version for ios & android 2023-04-27 10:05:30 +03:00
fb3dda807e Merge pull request #662 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-27 01:55:46 +05:00
98a4dbddfc Fix build v8 not on windows 2023-04-22 16:14:57 +03:00
e2f0d4b643 Merge pull request #661 from ONLYOFFICE/feature/googletest
Feature/googletest
2023-04-22 00:09:39 +03:00
ad67f0de2c Fix build msi package for x86 platform (#660) 2023-04-21 18:14:43 +05:00
6df78fb8c0 Fix typo. Removed unnecessary imports from googletest.py 2023-04-21 15:31:07 +04:00
b75f9cc039 Remove build gtests (move sources to pri file) 2023-04-21 14:02:05 +03:00
776b50945f Set update flag for md2 alg 2023-04-21 13:53:09 +03:00
6bdfc8a141 Create use_system_qt.py 2023-04-21 01:51:35 +03:00
88fc4a26ed Add support build with system qt on linux 2023-04-21 01:51:11 +03:00
904fee3f53 Fix build with python >= 3.10 2023-04-21 01:50:11 +03:00
784a9928cd Add midl file to deploy 2023-04-21 01:21:18 +03:00
493b79a18c Added googletest module to 3dParty 2023-04-20 22:52:59 +04:00
503ae7679f Fix closure maps deploy (#659)
* Fix closure maps deploy

* Small fix
2023-04-20 20:15:45 +05:00
873b1f3774 Fix deploy desktop inno update (#658) 2023-04-20 19:42:49 +05:00
e9e7e7a4c7 Enabled MD2 in openssl builds 2023-04-20 18:24:09 +04:00
e508edb680 Refactoring desktop inno update (#657) 2023-04-19 19:35:21 +05:00
334ad55548 Fix powershell args (#656) 2023-04-19 14:51:56 +05:00
d14816d18e Fix desktop zip build (#655) 2023-04-19 13:28:18 +05:00
204f9fbe51 Merge pull request #653 from ONLYOFFICE/feature/storeK0R0L
testing from K0R0L fork in desktop
2023-04-18 18:09:08 +03:00
6d8b407872 testing from K0R0L fork in desktop 2023-04-18 17:50:28 +03:00
0f5dab8095 [desktop] fix copying package info to dest path 2023-04-18 09:56:58 +03:00
e1271a62cf [desktop] refactoring 2023-04-16 14:33:26 +03:00
549e182867 [desktop] added reserved url for updates 2023-04-16 14:21:10 +03:00
59f8e39092 Fix linux_arm64 build 2023-04-15 08:06:53 +03:00
5ff3c615e9 Merge pull request #652 from ONLYOFFICE/feature/newpm
deploy new plugin manager
2023-04-14 18:54:16 +03:00
00300d80c2 Fix indents 2023-04-14 18:53:48 +03:00
7dd97a8490 Refactoring deploy plugins in desktop 2023-04-14 18:50:01 +03:00
f39cfe8a22 Merge pull request #651 from ONLYOFFICE/feature/automate-update
Update linux automate script
2023-04-14 18:31:35 +03:00
3251c1125f deploy new plugin manager 2023-04-14 16:10:01 +03:00
a7f0f1611f Update linux automate script 2023-04-14 16:00:52 +03:00
cf1f0bdb63 Fix typo 2023-04-14 15:54:14 +03:00
b7808a8fa6 Refactoring & enable old v8 for old macOS 2023-04-14 14:22:26 +03:00
7184016b62 Deploy small fix (#650) 2023-04-14 13:52:04 +05:00
cb137bb28a Fix typo 2023-04-14 10:50:18 +03:00
5309911e2b Fix typo 2023-04-14 09:43:31 +03:00
101949ba1c Correct default v8 version 2023-04-14 00:07:14 +03:00
1431d3a541 Package deploy refactoring (#649)
* Deploy closure maps (#635)

* Package deploy refactoring
2023-04-13 19:06:56 +05:00
c04ef86daf [develop] Use npm ci instead of npm ci when building sdk 2023-04-13 13:51:40 +03:00
f780bef0a9 Merge pull request #646 from ONLYOFFICE/fix/java_version
Added checking Java version to 11+ x64-bit
2023-04-13 14:41:49 +05:00
014b74bb1d Merge pull request #647 from ONLYOFFICE/release/v7.4.0
Release/v7.4.0
2023-04-13 10:20:53 +03:00
2578d22b93 Added checking Java version to 11+ x64-bit 2023-04-13 09:29:46 +03:00
b91cbf1233 enable vs 2019 by default 2023-04-12 15:49:40 +03:00
55955b7731 v8 patch for debug build (remove after update v8 version) 2023-04-12 11:26:39 +03:00
484c9dc910 Changes for use _ITERATOR_DEBUG_LEVEL in debug by default 2023-04-12 11:21:53 +03:00
c235a78634 Fix builder package name (#644) 2023-04-10 17:09:56 +05:00
09bf6684d3 Correct old v8 for build in debug mode 2023-04-09 13:34:41 +03:00
ea0a80e4d1 [windows] Use _ITERATOR_DEBUG_LEVEL=0 in debug build 2023-04-08 22:22:42 +03:00
52e706d212 Fix package names (#643) 2023-04-07 20:53:42 +05:00
55f8633cce Merge pull request #642 from ONLYOFFICE/feature/add-desktop-subproj
[desktop] skip build update service for win xp
2023-04-07 18:00:58 +03:00
11fa48d1b0 [desktop] skip build update service for win xp 2023-04-07 17:08:54 +03:00
a3d7c0bbcd Merge pull request #641 from ONLYOFFICE/feature/add-desktop-subproj
[desktop] build update service for win package
2023-04-07 15:49:29 +03:00
32af7d10c6 [desktop] build update service for win package 2023-04-07 15:45:44 +03:00
1241e7e868 Remove .system directory from deploy 2023-04-07 09:45:29 +03:00
0a8601ca79 Change icu build script 2023-04-07 09:40:09 +03:00
8fa8424f24 Remove cmap file 2023-04-02 16:42:50 +03:00
c7465ba9ee Add library for ubuntu14 2023-03-30 12:25:34 +03:00
604e627233 Add excludes for elf paths 2023-03-28 10:28:34 +03:00
7c1f957275 Add rpath to elf tools 2023-03-28 01:28:32 +03:00
ad762c667b Fix elf tools 2023-03-28 00:30:43 +03:00
25148a4ccc Fix rpath 2023-03-27 14:42:58 +03:00
803fa4781b Update patchelf 2023-03-27 13:21:52 +03:00
d1133a01a8 Add script for change origin in rpath/runpath 2023-03-27 00:18:48 +03:00
63cdb366ba Fix rpath for old systems 2023-03-26 21:08:38 +03:00
585d1bfba9 Add tools 2023-03-26 20:59:49 +03:00
fb9d1e69a4 Merge pull request #634 from ONLYOFFICE/feature/license-checker
Feature/license checker
2023-03-24 13:30:34 +03:00
98f84e8740 Update hard-coded version to v7.4.0 2023-03-23 08:38:14 +00:00
75d975f91d Autoselect best cert for inno installer signing (#640) 2023-03-21 20:07:54 +05:00
0cebd3646f Merge pull request #639 from ONLYOFFICE/fix/sio
Add PING_TIMEOUT_INTERVAL param
2023-03-16 20:54:42 +03:00
7633022d82 Add PING_TIMEOUT_INTERVAL param 2023-03-16 22:53:57 +05:00
045747f625 Merge branch hotfix/v7.3.3 into master 2023-03-15 10:46:15 +00:00
8f146582a4 Merge pull request #637 from ONLYOFFICE/feature/v7.4
Feature/v7.4
2023-03-15 12:42:21 +03:00
59bb27998f Fix build 2023-03-12 14:00:06 +05:00
d45cd9932b revert test 2023-03-10 21:23:37 +03:00
58b6a91f65 test action 2023-03-10 21:21:25 +03:00
942875d1a1 fix actions 2023-03-10 21:20:33 +03:00
bcb38f8731 Deploy closure maps (#635) 2023-03-06 18:10:58 +05:00
ad53559b4f Developing 2023-03-04 09:39:48 +03:00
6b740baf73 revert actions 2023-03-02 17:28:29 +03:00
1ada97c409 fix actions 2023-03-02 17:26:10 +03:00
89caa5f87c fix docs 2023-03-02 17:24:38 +03:00
1badc69477 fix doc 2023-03-02 17:10:01 +03:00
6769ade9a9 fix doc 2023-03-02 17:07:35 +03:00
0e783f0413 fix docs 2023-03-02 17:05:48 +03:00
577ab77f1d [build] fix docs 2023-03-02 17:01:43 +03:00
7ee44be072 fix docs 2023-03-02 16:57:36 +03:00
fa7bbaf98b fix doc 2023-03-02 16:53:21 +03:00
672fcfdb6d add test doc 2023-03-02 16:50:00 +03:00
fc01b4ad8a [build] changed check 2023-03-02 16:49:17 +03:00
ca7f0f5951 deleted test doc 2023-03-02 16:44:11 +03:00
f003ad3277 [build] changed paths 2023-03-02 16:43:28 +03:00
dc6f59943f test doc 2023-03-02 16:38:03 +03:00
ee51adb675 [build] revert base.py 2023-03-02 16:35:31 +03:00
5406c24771 [build] revert base.py 2023-03-02 16:34:33 +03:00
d9c768c2d0 [build] fix deleted semicolons 2023-03-02 16:19:50 +03:00
d876c4d100 [build] small fix 2023-03-02 15:32:37 +03:00
894aaa9fa9 Merge pull request #633 from ONLYOFFICE/develop
Develop
2023-03-02 15:30:24 +03:00
010f22ea3b Revert "[build] added checks for plugins"
This reverts commit 72cf0a5837.
2023-03-02 15:24:40 +03:00
0a560c9594 Revert "[develop] Add plugins deploy for developer version"
This reverts commit 03d371d9fc.
2023-03-02 15:23:57 +03:00
7e53c18f5b Fix removing locale help 2023-03-02 12:05:49 +05:00
f0a3325ab8 [license] Update address in Copyright 2023-03-01 23:38:07 +03:00
a18b226ea2 Add help from common directory 2023-03-01 19:27:08 +05:00
4112c88c1b Generate closure compiler maps (#632) 2023-03-01 13:15:12 +05:00
abda397c9f Remove exteranal providers on winXP 2023-03-01 10:58:27 +03:00
ba0c7173c9 [license] Update config; Fix bug with bom, leading space; and minor changes 2023-02-28 15:20:59 +03:00
0c40287764 Merge pull request #629 from ONLYOFFICE/hotfix/v7.3.3
Hotfix/v7.3.3
2023-02-20 20:20:23 +05:00
05902d88a7 Update vcredist checksums (#628) 2023-02-17 20:07:25 +05:00
228b00d5c7 Setting up Info.plist version (#627)
* Setting up Info.plist version

* Small fix
2023-02-16 17:44:19 +05:00
6c2ce95b0e Fix test appcast links (#626) 2023-02-14 21:19:24 +05:00
fcb7ece378 Merge pull request #624 from ONLYOFFICE/hotfix/v7.3.2
Merge branch hotfix/v7.3.2 into hotfix/v7.3.3
2023-02-14 20:19:34 +05:00
65ef84179f Update hard-coded version to v7.3.3
(cherry picked from commit b7aa164ed8)
2023-02-14 18:18:21 +03:00
0811018560 Merge branch hotfix/v7.3.2 into develop 2023-02-14 07:52:35 +00:00
c37f8153c0 Merge branch hotfix/v7.3.2 into master 2023-02-14 07:52:31 +00:00
1a70ce90f9 Allow 32-bit msi only on 32-bit system (#621) 2023-02-10 17:34:31 +05:00
cd011035ff [develop] Add correct_plugins_branding to develop start; and minor changes 2023-02-10 00:17:25 +03:00
b7aa164ed8 Update hard-coded version to v7.3.3 2023-02-08 08:35:15 +00:00
cecf304ace Update hard-coded version to v7.3.2 2023-02-08 08:31:39 +00:00
ce60b83e65 Update win appcast generation (#619) 2023-02-08 13:27:22 +05:00
892ddc8a79 fix path 2023-02-07 18:02:54 +03:00
de237fb4af fix deploy 2023-02-07 17:35:12 +03:00
d60fc52e74 fix comments 2023-02-07 17:31:08 +03:00
1c8e702399 [fix] fix prev 2023-02-07 01:57:42 +03:00
c2dc35e857 fix prev commit 2023-02-07 01:56:10 +03:00
2067e12bdf [build] fix bug 59392 2023-02-07 01:54:21 +03:00
7764d4ba30 Merge remote-tracking branch 'origin/hotfix/v7.3.2' into develop 2023-02-06 23:28:38 +03:00
719a198e55 Merge pull request #617 from ONLYOFFICE/hotfix/v7.3.1
Hotfix/v7.3.1
2023-02-05 13:11:07 +05:00
8eac35df75 fix package building for desktop 2023-02-05 10:06:11 +03:00
975972885a Merge branch release/v7.3.0 into develop 2023-02-02 13:29:00 +00:00
056da4b782 Merge branch release/v7.3.0 into master 2023-02-02 13:28:57 +00:00
0de3c26200 Update hard-coded version to v7.3.1 2023-02-02 08:08:32 +00:00
f5539cf79f Fix build (ninja missing) 2023-02-02 09:56:09 +03:00
a0bdca62b5 Disable unused modules 2023-02-01 23:27:47 +03:00
ad996d39d2 Merge pull request #614 from ONLYOFFICE/feature/change-desktop-updates-build
[desktop] changed building updates for desktop
2023-02-02 00:15:44 +05:00
d393b9ea90 [desktop] changed building updates for desktop 2023-02-01 22:09:31 +03:00
3ae37d764b [build] fix 2023-02-01 12:40:24 +03:00
6b15d7fca2 [build] add marketplace plugin at ignore 2023-02-01 12:26:19 +03:00
d8167ea9dd [build] fix plugins 2023-02-01 12:22:12 +03:00
6efb0cfccf Merge pull request #613 from ONLYOFFICE/release/v7.3.0
Merge release/v7.3.0 into hotfix/v7.3.0
2023-02-01 12:35:24 +05:00
19ac16ff62 Fix DesktopEditorsHelp installer build [2] (#612)
* Fix DesktopEditorsHelp installer build

* Small fix
2023-02-01 12:29:54 +05:00
1710df79f2 [build] fix prev commit 2023-02-01 07:24:49 +03:00
72cf0a5837 [build] added checks for plugins 2023-02-01 06:48:36 +03:00
468f1788b8 Add support no tls version socket.io 2023-01-31 22:58:47 +05:00
36b5e1b5d7 Fix DesktopEditorsHelp installer build (#610) 2023-01-31 20:27:03 +05:00
03d371d9fc [develop] Add plugins deploy for developer version 2023-01-31 14:17:08 +03:00
4b50455a22 Merge branch release/v7.3.0 into master 2023-01-31 07:56:10 +00:00
5250de602c Enable DesktopEditorsHelp installer build (#608) 2023-01-30 17:39:24 +05:00
ffb88cdf57 Remove depends to python3 2023-01-28 23:45:57 +05:00
06773a22c9 [build] license_checker small fix 2023-01-26 17:57:31 +03:00
6ddcbc7c18 fix 2023-01-25 13:45:16 +03:00
1cdc9142df license_checker readme added 2023-01-25 13:30:15 +03:00
3bc88c4bf3 fix license checker 2023-01-24 22:14:10 +03:00
c4b21c554f [build] updated license checker 2023-01-24 22:06:28 +03:00
151c691af2 changed source for desktop updates info (#607) 2023-01-24 14:03:38 +05:00
9f00f08c30 Fix bug 60569 2023-01-23 10:52:50 +03:00
3e2c03d3a3 Fix ios build 2023-01-22 19:21:28 +05:00
cd1c420fae Fix typo 2023-01-22 13:38:37 +05:00
c4d592be20 Small refactoring 2023-01-22 13:29:19 +05:00
808e470b27 Add support bundle_xcframeworks flag 2023-01-22 13:26:54 +05:00
f7bbe2d9f7 [build] license checker fix docs 2023-01-20 16:43:30 +03:00
92760b2835 [build] license checker updated config options
Added ignoreListDirName
2023-01-20 16:41:28 +03:00
379718dbf9 [build] fix license_checker
deleted unused ignore
2023-01-20 15:54:44 +03:00
787d690c41 [build] license checker added config for repos 2023-01-20 14:43:29 +03:00
32f124517a [build] small fix license checker 2023-01-19 18:09:28 +03:00
f501a6ebac [build] license checker is ready
(tested only for sdkjs)
2023-01-19 17:53:11 +03:00
2f632a0f8d init commit 2023-01-17 12:35:47 +03:00
597b8a67e2 Small fix desktop packages (#606)
* Fix generate_appcast command

* Fix html changes generation
2023-01-16 19:09:44 +03:00
f21689f8dd Changed node.js version to download. 2023-01-13 12:57:17 +03:00
9bd3f170e5 Merge pull request #600 from ONLYOFFICE/fix/bug58644
Fix bug 58644
2023-01-11 16:12:34 +03:00
34e9c614b8 Fix desktop macos build (#601) 2023-01-11 15:50:02 +03:00
960db59935 Update markdownlint action (#602) 2023-01-11 15:47:13 +03:00
d57efcf0fe Fix bug 58644 2023-01-10 14:32:19 +03:00
306703e677 Merge branch 'release/v7.3.0' into develop 2023-01-02 14:54:40 +03:00
256edf489c Merge pull request #598 from ONLYOFFICE/feature/package-develop-fix
Merge release/v7.3.0 package fixes
2023-01-02 14:53:08 +03:00
655837f8cd Merge release/v7.3.0 package fixes 2023-01-01 23:17:14 +03:00
ef43e6a9a4 Remove not used websocket engine 2023-01-01 19:55:25 +03:00
d8ac434e7e Fix desktop macos build (#596)
* Fix curl follow redirects

* Build sparkle updates for success dmg build
2022-12-29 19:08:28 +03:00
6907fadce3 Merge branch hotfix/v7.2.2 into release/v7.3.0 2022-12-27 14:42:26 +00:00
8fa222a9b9 Merge branch hotfix/v7.2.2 into develop 2022-12-27 13:43:24 +00:00
fc05ba6f4d Optimize linux build targets (#593)
* Small fix

* Optimize linux build targets

* Fix linux deploy targets

* Small fix

* Small fix
2022-12-27 00:00:55 +03:00
3c6d7edea0 Fix desktop macos build (#592)
* Fix uploads

* Fix sh args

* Fix macos build

* Small fix

* Small fix
2022-12-26 17:13:11 +03:00
908f2efd43 Fix typo (#591) 2022-12-25 23:33:19 +03:00
0e90989998 Small fix (#590) 2022-12-23 22:51:34 +03:00
329ba4a62d Small fix (#589) 2022-12-23 22:48:44 +03:00
dd9a8b9df5 Fix develop (#588)
* Fix msi build (#583)

* Add packages upload s3 endpoint url option (#584)

* Fix package build (#585)

* Refactoring package scripts (#587)

* Refactoring logs

* Refactoring packages

* Small fix
2022-12-23 20:11:12 +03:00
feac842b8a Refactoring package scripts (#587)
* Refactoring logs

* Refactoring packages

* Small fix
2022-12-23 20:08:53 +03:00
2916e4e625 Update hard-coded version to v7.3.0 2022-12-22 08:44:30 +00:00
d758cd1e7d Merge branch hotfix/v7.2.2 into master 2022-12-20 07:54:28 +00:00
b8bee2a9fe Fix package build (#585) 2022-12-19 13:56:18 +03:00
65e9994963 [ios] Fix Info.plist for xcframework 2022-12-16 19:32:05 +03:00
cd8ced38f2 Build SocketRocket as xcframework 2022-12-16 19:31:35 +03:00
f6e35f7250 Add packages upload s3 endpoint url option (#584) 2022-12-15 15:04:55 +03:00
29299704aa Fix msi build (#583) 2022-12-12 18:22:48 +03:00
ad83a772a1 Merge commit 'ab838ae3ba50283cd683a1bbaa7ac256d28cc256' into develop 2022-12-12 12:30:30 +03:00
ba5a532da0 Temporary remove project from linux_arm64 build 2022-12-09 11:24:12 +03:00
ab838ae3ba Merge branch 'hotfix/v7.2.2' into release/v7.3.0 2022-12-08 20:41:56 +03:00
4dedb18137 Add new app for core 2022-12-07 14:48:26 +03:00
0c18cbc758 Fix android build 2022-12-06 22:27:48 +03:00
c012a8045f Disable precompiled headers for mobile arches 2022-12-06 19:48:51 +03:00
536b64a63d Fix package reports (#581) 2022-12-06 19:36:49 +03:00
6b6b91c083 Add socketio library checkout 2022-12-06 17:06:14 +03:00
d4cd2d83d4 Fix package build (#580) 2022-12-03 16:04:40 +03:00
606b73d92f Fixes from hotfix (#579) 2022-12-02 15:27:45 +03:00
75543fe126 Fix package build (#578) 2022-12-01 14:45:20 +03:00
41e5f53c45 Fix package upload (#577) 2022-12-01 01:18:26 +03:00
626efceaee Fix package upload (#576) 2022-11-30 21:00:15 +03:00
9d0596089d Update package upload (#575)
* Update package upload

* Small fix
2022-11-30 19:31:32 +03:00
9d17f14fbb Fix run developer docker 2022-11-29 11:30:05 +03:00
1ad42f671a Merge pull request #574 from ONLYOFFICE/release/v7.3.0
Fix previous commit
2022-11-23 14:02:48 +03:00
2c407117dd Fix previous commit 2022-11-23 14:02:27 +03:00
b3ab757416 Merge pull request #572 from ONLYOFFICE/release/v7.3.0
Release/v7.3.0
2022-11-23 10:52:33 +03:00
6667c03ff6 Merge pull request #571 from ONLYOFFICE/feature/pdffile
Feature/pdffile
2022-11-22 20:56:23 +03:00
91b75fcae5 Merge remote-tracking branch 'origin/release/v7.3.0' into feature/pdffile 2022-11-22 16:49:27 +03:00
048a54716f Merge pull request #570 from ONLYOFFICE/feature/refactoringX2T
Fix build
2022-11-22 14:43:04 +03:00
694d562a80 [ios] Add xcframeworks to mobile deploy 2022-11-20 21:43:50 +03:00
a12f5dba9f Fix build 2022-11-18 22:20:17 +03:00
65571cfa06 Release/v7.3.0 (#569)
* Delete quotes in AddUpgradeCode action (#555)

* Add feature switches (#553)

* Update vcredist checksums (#556)

* Fix macos package script (#558)

* Fix macos package script

* Small fix

* Small fix

* [desktop] skip WinSparkle from deploy

* Fix macos package build (#561)

* Add build mobile package (#562)

* Fix packages build (#563)

* Fix macOS ARM package build (#564)

* Updated script for generation plugin macros documentation.

* Fix core archive

* Small fix

* Add macOS ARM core archive build (#568)

Co-authored-by: Eugene Kozyrev <67453079+EugeneKozyrev@users.noreply.github.com>
Co-authored-by: Maxim Kadushkin <maxim.kadushkin@onlyoffice.com>
Co-authored-by: Nikita Khromov <nikita.khromov@onlyoffice.com>
Co-authored-by: Oleg Korshul <Oleg.Korshul@onlyoffice.com>
2022-11-18 18:20:00 +03:00
7841606a41 Add macOS ARM core archive build (#568) 2022-11-18 18:14:39 +03:00
cf67d1cb77 Merge pull request #566 from ONLYOFFICE/feature/core-archive-fix
Fix core archive
2022-11-17 14:28:57 +03:00
255ecd64b2 Small fix 2022-11-17 12:56:15 +03:00
96913b568f Fix core archive 2022-11-17 12:22:29 +03:00
9c046cf10f Merge pull request #565 from ONLYOFFICE/fix/scriptMacros
Updated script for generation plugin macros documentation.
2022-11-17 10:24:43 +03:00
68367474d0 Updated script for generation plugin macros documentation. 2022-11-17 12:19:48 +05:00
ab77f6d936 Fix macOS ARM package build (#564) 2022-11-15 15:24:54 +03:00
8dadf0dada Fix packages build (#563) 2022-11-14 16:11:24 +03:00
f074914f1b Add build mobile package (#562) 2022-11-11 17:46:41 +03:00
110981066e Fix macos package build (#561) 2022-11-11 16:17:18 +03:00
4fd5d6a814 Merge pull request #560 from ONLYOFFICE/feature/merge-hotfix
Merge branch hotfix/v7.2.1 into develop
2022-11-11 15:42:04 +03:00
c68c365261 Merge branch 'hotfix/v7.2.1' into develop 2022-11-11 15:39:31 +03:00
6195485cc7 [desktop] skip WinSparkle from deploy 2022-11-11 11:32:29 +03:00
f13471428c Fix macos package script (#558)
* Fix macos package script

* Small fix

* Small fix
2022-11-10 15:56:27 +03:00
d2d7dc0717 Update vcredist checksums (#557) 2022-11-09 18:33:47 +03:00
55f1a05d17 Update vcredist checksums (#556) 2022-11-09 18:28:26 +03:00
173b81c288 Add feature switches (#553) 2022-11-08 10:24:51 +02:00
62a8e2f72a PdfFile instead of PdfReader and PdfWriter 2022-11-08 09:41:57 +03:00
e50a0e84f2 Delete quotes in AddUpgradeCode action (#555) 2022-11-07 16:00:15 +03:00
0307890bf3 Delete quotes in AddUpgradeCode action (#555) 2022-11-07 15:33:56 +03:00
998daaa8d0 Merge pull request #552 from ONLYOFFICE/release/v7.3.0
Release/v7.3.0
2022-11-05 12:58:07 +03:00
0b4faf9c80 Merge pull request #551 from ONLYOFFICE/feature/docbuilder.com
docbuilder.com paths fix
2022-11-05 12:26:31 +03:00
bafeadd809 docbuilder.com paths fix 2022-11-05 02:48:34 +03:00
ecab59b715 Merge pull request #549 from ONLYOFFICE/develop
Develop
2022-11-04 22:21:07 +03:00
0edb21a44b Merge pull request #548 from ONLYOFFICE/feature/docbuilder.com
docbuilder.com build fix
2022-11-04 20:15:03 +03:00
652fa57245 docbuilder.com build fix 2022-11-04 19:34:43 +03:00
108f7bd8f7 Merge pull request #547 from ONLYOFFICE/develop
Develop
2022-11-04 15:00:06 +03:00
fce06d28a2 Fix build for new xcode 2022-11-04 14:56:34 +03:00
62169f91db Change minimum ios version 2022-11-03 21:56:32 +03:00
2d2f1ec7d1 fix build (#546) 2022-11-02 18:45:05 +03:00
3a60d08eb3 Disable x32 arches by default for ios 2022-11-02 17:01:39 +03:00
04f8f175b9 Change function name (#545) 2022-11-02 10:24:31 +03:00
c687a4ae5b Bugfix #59471/Fix a bug of package cannot be installed if installed a newer version (#544)
* Add AddUpgradeCode action

* Delete detect flag
2022-11-01 17:01:12 +03:00
c19c692ace fix build 2022-10-28 10:29:43 +03:00
8e71fa736b Disable 32bit arches for ios 2022-10-27 12:35:24 +03:00
e76fc53e85 Updated script for generate documenation 2022-10-26 12:04:37 +05:00
dc548da9eb Changed python to python3 in run_build.js 2022-10-24 19:27:31 +05:00
c618c0a6c3 Update run_build_js.py 2022-10-24 14:52:53 +03:00
6e4c75144a Update python version 2022-10-21 12:52:44 +03:00
abe9b200c9 Merge pull request #539 from ONLYOFFICE/feature/x2ttester
Add x2ttester
2022-10-16 20:01:18 +03:00
8b542376c5 Add x2ttester 2022-10-16 05:23:51 +03:00
b59df7faec Merge pull request #537 from ONLYOFFICE/release/v7.2.0
Merge branch release/v7.2.0 into develop
2022-10-12 18:46:01 +03:00
c9c516daf2 Merge branch 'develop' into release/v7.2.0 2022-10-12 18:25:02 +03:00
75109ea476 Merge pull request #531 from ONLYOFFICE/hotfix/v7.2.1
Hotfix/v7.2.1
2022-09-28 12:07:51 +03:00
6b62d86151 Merge pull request #530 from ONLYOFFICE/feature/hyphen
Add hyphen
2022-09-28 11:39:10 +03:00
30d331b16e Fix desktop package build (#528) 2022-09-27 14:23:52 +03:00
f8216e4f6a Merge pull request #523 from ONLYOFFICE/feature/merge-develop
Merge release/v7.2.0 into develop
2022-09-21 11:40:21 +03:00
2e9a66c70c Merge branch 'release/v7.2.0' into feature/merge-develop 2022-09-21 11:37:28 +03:00
7455472856 Add hyphen 2022-09-14 18:05:35 +03:00
6b46c5d2b2 Fix package build (#510)
* Fix json save

* Fix core deploy

* Fix linux deploy
2022-08-12 15:22:33 +03:00
370fa31c11 Fix package build (#509) 2022-08-12 11:09:57 +03:00
29f5c6e111 Packages deploy (#508)
* Improve logs

* Refactoring core

* Add deploy desktop

* Add deploy builder

* Add deploy server

* Other fixes

* Small fix
2022-08-11 18:00:13 +03:00
0e4134b5f8 Merge branch release/v7.2.0 into develop 2022-08-11 11:11:25 +00:00
48cc6e7f5a Merge branch hotfix/v7.1.1 into develop 2022-07-18 09:55:08 +00:00
7530a20cd8 Fix packages module path (#491) 2022-07-08 17:55:33 +03:00
f3145e0d06 Fix branding (#489) 2022-07-07 14:52:12 +03:00
72a9c18b94 Fix vcredist download (#488) 2022-07-07 14:51:35 +03:00
14522ee010 Remake packages (#485)
* Remove isxdl

* Fix targets bools

* Fix builder base dir

* Update builder innosetup build

* Fix builder build

* Run ps script function

* Fix ps script function

* Print build results

* Fix dict

* Function add_task

* Fix results log

* Add deploy

* Fix deploy

* Add core deploy

* Fix core deploy

* Debug scripts

* Fix workspace_dir

* Refactoring core

* Refactoring core

* Fix platforms

* Refactoring builder

* Small fix

* Fix core

* Fix cmd

* Refactoring builder

* Fix builder

* Fix

* Fix

* Fix server

* Fix builder

* Fix desktop linux

* Fix desktop windows

* Add appcast-prod creation

* Fix appcast

* Fix vcredist verbose

* Fix appcast

* Small fix builder

* Small fix desktop

* Small fix desktop linux

* Fix desktop macos

* Check vc redist md5 sums

* Fix kwargs

* Fix log_h1

* Fix macos fastlane params
2022-07-05 15:34:00 +03:00
d3d53b983a Fixed port of documentserver-example for linux 2022-06-30 12:13:15 +03:00
2a3b6d0ebb Merge pull request #483 from ONLYOFFICE/release/v7.2.0
Add multiprocess option
2022-06-22 21:53:45 +03:00
2bc9e29e4b Merge pull request #481 from ONLYOFFICE/release/v7.2.0
Release/v7.2.0
2022-06-18 18:54:38 +03:00
115 changed files with 7614 additions and 1902 deletions

View File

@ -3,7 +3,7 @@ name: Bug Report
about: Report an issue with build_tools you've discovered.
---
**Describe your problem**:
# Describe your problem:
*Be clear in your description of the problem.
Open an issue with a descriptive title and a summary in complete sentences.*

View File

@ -1,15 +1,24 @@
name: check
on: [push]
name: Markdown Lint
on:
workflow_dispatch:
push:
branches:
- '**'
paths:
- '*.md'
- 'develop/*.md'
- 'scripts/**.md'
- '.markdownlint.jsonc'
jobs:
markdownlint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 12
uses: actions/setup-node@v1
- uses: actions/checkout@v4
- uses: DavidAnson/markdownlint-cli2-action@v16
with:
node-version: 12
- name: Check *.md files by `markdownlint`
run: |
npm install -g markdownlint-cli
markdownlint *.md develop/*.md
globs: |
*.md
develop/*.md
scripts/**.md

View File

@ -3,16 +3,13 @@ name: Update hard-coded version
on: workflow_dispatch
jobs:
update-version:
if: >-
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
contains(github.ref, 'refs/heads/release/v') }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
@ -25,9 +22,9 @@ jobs:
run: echo "${{ env.version }}" > version
- name: Commit & push changes
uses: EndBug/add-and-commit@v8
uses: EndBug/add-and-commit@v9
with:
author_name: github-actions[bot]
author_email: github-actions[bot]@users.noreply.github.com
message: Update hard-coded version to v${{ env.version }}
message: Update hard-coded version to ${{ env.version }}
add: version

7
.gitignore vendored
View File

@ -7,3 +7,10 @@ config
*.*~
**~
*.DS_Store
scripts/license_checker/reports
tests/puppeteer/node_modules
tests/puppeteer/work_directory
tests/puppeteer/package.json
tests/puppeteer/package-lock.json
scripts/sdkjs_common/jsdoc/node_modules
scripts/sdkjs_common/jsdoc/package-lock.json

5
.markdownlint.jsonc Normal file
View File

@ -0,0 +1,5 @@
{
"line-length": {
"code_block_line_length": 300
}
}

View File

@ -11,5 +11,4 @@ RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python
ADD . /build_tools
WORKDIR /build_tools
CMD cd tools/linux && \
python3 ./automate.py
CMD ["sh", "-c", "cd tools/linux && python3 ./automate.py"]

View File

@ -196,9 +196,8 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
**Note**: The created database must have **onlyoffice** both for user and password.
```bash
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
```
3. Configure the database:

119
build.pro
View File

@ -1,119 +0,0 @@
TEMPLATE = subdirs
ROOT_DIR=$$PWD/..
DEPLOY_DIR=$$PWD/deploy
CORE_ROOT_DIR=$$ROOT_DIR/core
include($$PWD/common.pri)
CONFIG += ordered
core_windows {
desktop:CONFIG += core_and_multimedia
}
core_linux {
desktop:CONFIG += core_and_multimedia
}
core_mac {
CONFIG += no_desktop_apps
}
core_ios {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
core_android {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
cryptopp)
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
unicodeconverter)
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
kernel unicodeconverter)
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
kernel unicodeconverter)
addSubProject(pdfwriter, $$CORE_ROOT_DIR/PdfWriter/PdfWriter.pro,\
kernel unicodeconverter graphics)
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(pdfreader, $$CORE_ROOT_DIR/PdfReader/PdfReader.pro,\
kernel unicodeconverter graphics pdfwriter htmlrenderer)
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
kernel unicodeconverter graphics network)
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
kernel unicodeconverter graphics)
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
kernel unicodeconverter graphics htmlfile2)
!no_x2t {
addSubProject(docxformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/DocxFormatLib.pro)
addSubProject(pptxformat, $$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/PPTXFormatLib.pro)
addSubProject(docxfile, $$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/ASCOfficeDocxFile2Lib.pro)
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/TxtXmlFormatLib.pro)
addSubProject(rtfformat, $$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/RtfFormatLib.pro)
addSubProject(pptformat, $$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/PPTFormatLib.pro)
addSubProject(docformat, $$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/DocFormatLib.pro)
addSubProject(odffilereader,$$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/OdfFileReaderLib.pro)
addSubProject(odffilewriter,$$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/OdfFileWriterLib.pro)
addSubProject(xlsformat, $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/XlsFormatLib.pro)
addSubProject(xlsbformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/XlsbFormatLib.pro)
addSubProject(vbaformat, $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/VbaFormatLib.pro)
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
docxformat pptxformat docxfile txtxmlformat rtfformat pptformat docformat odffilereader odffilewriter xlsformat xlsbformat fb2file epubfile docxrenderer)
}
!no_use_common_binary {
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
kernel unicodeconverter graphics)
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
kernel unicodeconverter graphics)
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
kernel unicodeconverter graphics doctrenderer)
}
!no_tests {
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
}
core_and_multimedia {
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
kernel unicodeconverter graphics)
}
desktop {
message(desktop)
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
kernel unicodeconverter graphics)
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdfwriter pdfreader djvufile xpsfile)
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
documentscore)
!core_mac {
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
documentscore)
}
!no_desktop_apps {
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
documentscore videoplayer)
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
documentscore videoplayer)
}
}
mobile {
message(mobile)
!desktop {
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
}
}

View File

@ -21,12 +21,13 @@ parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp"
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")

View File

@ -1,3 +1,3 @@
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode"
sdkjs-plugin-server="speech, zotero, mendeley"
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode, zotero"
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition, drawio"
sdkjs-addons="sdkjs-forms"

View File

@ -1,11 +1,19 @@
FROM onlyoffice/documentserver:latest
RUN apt-get update -y && \
apt-get install git -y \
python -y \
python3 -y \
openjdk-11-jdk -y \
bzip2 -y \
npm -y && \
npm install -g grunt-cli -y && \
npm install -g grunt grunt-cli -y && \
ln -s /usr/bin/python3 /usr/bin/python && \
ln -s /usr/bin/pip3 /usr/bin/pip && \
git clone --depth 1 https://github.com/ONLYOFFICE/build_tools.git var/www/onlyoffice/documentserver/build_tools && \
sed -i '/documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}/d' /app/ds/run-document-server.sh && \
#Set Up Debug Logging
sed -i 's/WARN/ALL/g' /etc/onlyoffice/documentserver/log4js/production.json && \
#Start test example
if [ -s /etc/supervisor/conf.d/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /etc/supervisor/conf.d/ds-example.conf; fi && \
if [ -s /app/ds/setup/config/supervisor/ds/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /app/ds/setup/config/supervisor/ds/ds-example.conf; fi && \
rm -rf /var/lib/apt/lists/*
ENTRYPOINT python /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver && /bin/sh -c /app/ds/run-document-server.sh
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver $@ && /bin/sh -c /app/ds/run-document-server.sh

View File

@ -1,50 +1,219 @@
# Docker
This directory containing instruction for developers,
who want to change something in sdkjs or web-apps module,
who want to change something in sdkjs or web-apps or server module,
but don't want to compile pretty compilcated core product to make those changes.
## Installing ONLYOFFICE Docs
## System requirements
## How to use - Linux or macOS
### Windows
**Note**: You need the latest Docker version installed.
You need the latest
[Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/)
installed.
You might need to pull **onlyoffice/documentserver** image:
**Note**: Docker Desktop does not start automatically after installation.
You should manually start the **Docker Desktop** application.
**Note**: If you have problems running Docker Desktop with the
"Use WSL 2 instead of Hyper-V" installation option,
try reinstalling it without this option.
### Linux or macOS
You need the latest
[Docker](https://docs.docker.com/engine/install/)
version installed.
## Create develop Docker Images
To create a image with the ability to include external non-minified sdkjs code,
use the following commands:
### Clone development environment to work dir
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
```
### Modify Docker Images
**Note**: Do not prefix docker command with sudo.
[This](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
instruction show how to use docker without sudo.
```bash
docker pull onlyoffice/documentserver
```
### Create develop image
To create a image with the ability to include external non-minified sdkjs code,
use the following command:
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
cd build_tools/develop
docker build -t documentserver-develop .
docker pull onlyoffice/documentserver
docker build --no-cache -t documentserver-develop .
```
**Note**: The dot at the end is required.
### Connecting external folders
**Note**: Sometimes script may fail due to network errors. Just restart it.
To connect external folders to the container,
you need to pass the "-v" parameter
along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow
## Clone development modules
Clone development modules to the work dir
* `sdkjs` repo is located [here](https://github.com/ONLYOFFICE/sdkjs/)
* `web-apps` repo is located [here](https://github.com/ONLYOFFICE/web-apps/)
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
```bash
docker run -i -t -d -p 80:80 --restart=always \
-v /host-dir/sdkjs:/var/www/onlyoffice/documentserver/sdkjs \
-v /host-dir/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
git clone https://github.com/ONLYOFFICE/sdkjs.git
git clone https://github.com/ONLYOFFICE/web-apps.git
git clone https://github.com/ONLYOFFICE/server.git
```
## Start server with external folders
To mount external folders to the container,
you need to pass the "-v" parameter
along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow.
The folders `server` is optional
**Note**: Run command with the current working directory
containing `sdkjs`, `web-apps`...
**Note**: ONLYOFFICE server uses port 80.
Look for another application using port 80 and stop it
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
and takes 20 minutes with `server`
### docker run on Windows (PowerShell)
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
run with `sdkjs` and `web-apps`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $pwd/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
### docker run on Linux or macOS
run with `sdkjs` and `web-apps`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $(pwd)/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
## Open editor
After the server starts successfully, you will see Docker log messages like this
```bash
[Date] [WARN] [localhost] [docId] [userId] nodeJS
```
To try the document editor, open a browser tab and type
[http://localhost/example](http://localhost/example) into the URL bar.
**Note**: Disable **ad blockers** for localhost page.
It may block some scripts (like Analytics.js)
## Modify sources
### To change something in `sdkjs` do the following steps
1)Edit source file. Let's insert an image url into each open document.
Following command inserts (in case of problems, you can replace URL)
`this.AddImageUrl(['http://localhost/example/images/logo.png']);`
after event
`this.sendEvent('asc_onDocumentContentReady');`
in file
`sdkjs/common/apiBase.js`
### change sdkjs on Windows (PowerShell)
```bash
(Get-Content sdkjs/common/apiBase.js) -replace "this\.sendEvent\('asc_onDocumentContentReady'\);", "this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);" | Set-Content sdkjs/common/apiBase.js
```
### change sdkjs on Linux or macOS
```bash
sed -i "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
```
2)Delete browser cache or hard reload the page `Ctrl + Shift + R`
3)Open new file in browser
### To change something in `server` do the following steps
1)Edit source file. Let's send `"Hello World!"`
chart message every time a document is opened.
Following command inserts
`yield* onMessage(ctx, conn, {"message": "Hello World!"});`
in function
`sendAuthInfo`
in file
`server/DocService/sources/DocsCoServer.js`
### change server on Windows (PowerShell)
```bash
(Get-Content server/DocService/sources/DocsCoServer.js) -replace 'opt_hasForgotten, opt_openedAt\) \{', 'opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});' | Set-Content server/DocService/sources/DocsCoServer.js
```
### change server on Linux or macOS
```bash
sed -i 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
```
2)Restart document server process
**Note**: Look for ``CONTAINER_ID`` in the result of ``docker ps``.
```bash
docker exec -it CONTAINER_ID supervisorctl restart all
```
3)Open new file in browser
## Start server with additional functionality(addons)
To get additional functionality and branding you need to connect a branding folder,
additional addon folders and pass command line arguments
For example run with `onlyoffice` branding and
addons:`sdkjs-forms`, `sdkjs-ooxml`, `web-apps-mobile`
### docker run on Windows (PowerShell) with branding
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true `
-v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps `
-v $pwd/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $pwd/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $pwd/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $pwd/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile `
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
```
### docker run on Linux or macOS with branding
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true \
-v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps \
-v $(pwd)/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $(pwd)/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $(pwd)/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $(pwd)/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile \
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
```

View File

@ -1,12 +1,57 @@
#!/usr/bin/env python
#!/usr/bin/env python3
import sys
sys.path.append(sys.argv[1] + '/build_tools/scripts')
sys.path.append(sys.argv[1] + '/build_tools/scripts/develop')
import build_js
import run_server
import config
import base
base.cmd_in_dir(sys.argv[1] + '/build_tools/', 'python', ['configure.py'])
config.parse()
git_dir = sys.argv[1];
build_js.build_js_develop(sys.argv[1])
base.print_info('argv :'+' '.join(sys.argv))
base.cmd_in_dir(git_dir + '/build_tools/', 'python3', ['configure.py', '--develop', '1'] + sys.argv[2:])
config.parse()
config.parse_defaults()
if base.is_exist(git_dir + "/server/FileConverter/bin/fonts.log"):
base.print_info('remove font cache to regenerate fonts in external sdkjs volume')
base.delete_file(git_dir + "/server/FileConverter/bin/fonts.log");
# external server volume
if base.is_exist(sys.argv[1] + '/server/DocService/package.json'):
base.print_info('replace supervisor cfg to run docservice and converter from source')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.print_info('run_server.run_docker_server')
run_server.run_docker_server();
else:
#Fix theme generation for external sdkjs volume
if base.is_exist(git_dir + "/server/FileConverter/bin/DoctRenderer.config"):
base.print_info('replace DoctRenderer.config for external sdkjs volume')
base.generate_doctrenderer_config(git_dir + "/server/FileConverter/bin/DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
addons = {}
addons.update(base.get_sdkjs_addons())
addons.update(base.get_web_apps_addons())
staticContent = ""
for addon in addons:
if (addon):
staticContent += '"/' + addon + '": {"path": "/var/www/onlyoffice/documentserver/' + addon + '","options": {"maxAge": "7d"}},'
if staticContent:
base.print_info('replace production-linux.json for addons'+staticContent)
base.replaceInFileRE("/etc/onlyoffice/documentserver/production-linux.json", '"static_content": {.*', '"static_content": {' + staticContent)
base.print_info('replace supervisor cfg to run docservice and converter from pkg')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.print_info('run_server.run_docker_sdk_web_apps: ' + git_dir)
run_server.run_docker_sdk_web_apps(git_dir)

54
make.py
View File

@ -1,19 +1,32 @@
#!/usr/bin/env python
import os
import sys
sys.path.append('scripts')
sys.path.append('scripts/develop')
sys.path.append('scripts/develop/vendor')
sys.path.append('scripts/core_common')
sys.path.append('scripts/core_common/modules')
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/scripts')
sys.path.append(__dir__name__ + '/scripts/develop')
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
sys.path.append(__dir__name__ + '/scripts/core_common')
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
import config
import base
import build
import build_sln
import build_js
import build_server
import deploy
import make_common
import develop
import argparse
base.check_python()
parser = argparse.ArgumentParser(description="options")
parser.add_argument("--build-only-branding", action="store_true")
args = parser.parse_args()
if (args.build_only_branding):
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
# parse configuration
config.parse()
@ -33,7 +46,7 @@ if ("1" != base.get_env("OO_RUNNING_BRANDING")) and ("" != config.option("brandi
base.cmd("git", ["clone", config.option("branding-url"), branding_dir])
base.cmd_in_dir(branding_dir, "git", ["fetch"], True)
if not is_exist or ("1" != config.option("update-light")):
base.cmd_in_dir(branding_dir, "git", ["checkout", "-f", config.option("branch")], True)
@ -59,40 +72,31 @@ if ("1" == config.option("update")):
base.configure_common_apps()
# developing...
develop.make();
develop.make()
# check only js builds
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
build_js.make()
exit(0)
#base.check_tools()
# core 3rdParty
make_common.make()
# build updmodule for desktop (only for windows version)
if config.check_option("module", "desktop"):
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "-1/apps")
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "/apps")
if "windows" == base.host_platform():
config.extend_option("config", "updmodule")
config.extend_option("qmake_addon", "LINK=https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.xml")
if not base.is_file(base_dir + "/tools/WinSparkle-0.7.0.zip"):
base.cmd("curl.exe", ["https://d2ettrnqo7v976.cloudfront.net/winsparkle/WinSparkle-0.7.0.zip", "--output", base_dir + "/tools/WinSparkle-0.7.0.zip"])
if not base.is_dir(base_dir + "/tools/WinSparkle-0.7.0"):
base.cmd("7z.exe", ["x", base_dir + "/tools/WinSparkle-0.7.0.zip", "-otools"])
base.create_dir(base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle")
#base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/include", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/include")
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_32")
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/x64/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_64")
if ("windows" == base.host_platform()):
base.set_env("VIDEO_PLAYER_VLC_DIR", base_dir + "/../desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc")
base.set_env("DESKTOP_URL_UPDATES_MAIN_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
base.set_env("DESKTOP_URL_INSTALL_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/distrib/onlyoffice/<file>")
base.set_env("DESKTOP_URL_INSTALL_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/onlineinstallerdev/<file>")
# build
build.make()
build_sln.make()
# js
build_js.make()

View File

@ -2,17 +2,108 @@
# -*- coding: utf-8 -*-
import sys
sys.path.append('scripts')
sys.path.append("scripts")
import argparse
import package_common as common
import package_utils as utils
# config
utils.parse()
# parse
parser = argparse.ArgumentParser(description="Build packages.")
parser.add_argument("-P", "--platform", dest="platform", type=str,
action="store", help="Defines platform", required=True)
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
action="store", help="Defines targets", required=True)
parser.add_argument("-V", "--version", dest="version", type=str,
action="store", help="Defines version")
parser.add_argument("-B", "--build", dest="build", type=str,
action="store", help="Defines build")
parser.add_argument("-H", "--branch", dest="branch", type=str,
action="store", help="Defines branch")
parser.add_argument("-R", "--branding", dest="branding", type=str,
action="store", help="Provides branding path")
args = parser.parse_args()
# vars
common.os_family = utils.host_platform()
common.platform = args.platform
common.prefix = common.platformPrefixes[common.platform] if common.platform in common.platformPrefixes else ""
common.targets = args.targets
common.clean = "clean" in args.targets
common.sign = "sign" in args.targets
common.deploy = "deploy" in args.targets
if args.version: common.version = args.version
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
utils.set_env("PRODUCT_VERSION", common.version)
utils.set_env("BUILD_VERSION", common.version)
if args.build: common.build = args.build
else: common.build = utils.get_env("BUILD_NUMBER", "0")
utils.set_env("BUILD_NUMBER", common.build)
if args.branch: common.branch = args.branch
else: common.branch = utils.get_env("BRANCH_NAME", "null")
utils.set_env("BRANCH_NAME", common.branch)
common.branding = args.branding
common.timestamp = utils.get_timestamp()
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
common.branding_dir = utils.get_abspath(common.workspace_dir + "/" + args.branding) if args.branding else common.workspace_dir
common.summary = []
utils.log("os_family: " + common.os_family)
utils.log("platform: " + str(common.platform))
utils.log("prefix: " + str(common.prefix))
utils.log("targets: " + str(common.targets))
utils.log("clean: " + str(common.clean))
utils.log("sign: " + str(common.sign))
utils.log("deploy: " + str(common.deploy))
utils.log("version: " + common.version)
utils.log("build: " + common.build)
utils.log("branding: " + str(common.branding))
utils.log("timestamp: " + common.timestamp)
utils.log("workspace_dir: " + common.workspace_dir)
utils.log("branding_dir: " + common.branding_dir)
# branding
if utils.branding is not None:
branding_path = utils.get_path('..', utils.branding)
sys.path.insert(-1, utils.get_path(branding_path, 'build_tools/scripts'))
if common.branding is not None:
sys.path.insert(-1, \
utils.get_path("../" + common.branding + "/build_tools/scripts"))
import package_core
import package_desktop
import package_server
import package_builder
import package_mobile
# build
import package
package.make(utils.product)
utils.set_cwd(common.workspace_dir, verbose=True)
if "core" in common.targets:
package_core.make()
if "closuremaps_sdkjs_opensource" in common.targets:
package_core.deploy_closuremaps_sdkjs("opensource")
if "closuremaps_sdkjs_commercial" in common.targets:
package_core.deploy_closuremaps_sdkjs("commercial")
if "closuremaps_webapps" in common.targets:
package_core.deploy_closuremaps_webapps("opensource")
if "desktop" in common.targets:
package_desktop.make()
if "builder" in common.targets:
package_builder.make()
if "server_community" in common.targets:
package_server.make("community")
if "server_enterprise" in common.targets:
package_server.make("enterprise")
if "server_developer" in common.targets:
package_server.make("developer")
if "server_prerequisites" in common.targets:
package_server.make("prerequisites")
if "mobile" in common.targets:
package_mobile.make()
# summary
utils.log_h1("Build summary")
exitcode = 0
for i in common.summary:
if list(i.values())[0]:
utils.log("[ OK ] " + list(i.keys())[0])
else:
utils.log("[FAILED] " + list(i.keys())[0])
exitcode = 1
exit(exitcode)

File diff suppressed because it is too large Load Diff

View File

@ -1,113 +0,0 @@
#!/usr/bin/env python
import config
import base
import os
import multiprocessing
def make_pro_file(makefiles_dir, pro_file):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
old_env = dict(os.environ)
# if you need change output libraries path - set the env variable
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
isAndroid = False if (-1 == platform.find("android")) else True
if isAndroid:
toolchain_platform = "linux-x86_64"
if ("mac" == base.host_platform()):
toolchain_platform = "darwin-x86_64"
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
old_path = base.get_env("PATH")
new_path = base.qt_setup(platform) + "/bin:"
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
new_path += old_path
base.set_env("PATH", new_path)
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
# makefile suffix
file_suff = platform
if (config.check_option("config", "debug")):
file_suff += "_debug_"
file_suff += config.option("branding")
# setup qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# qmake CONFIG+=...
config_param = base.qt_config(platform)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
print("THIS PLATFORM IS NOT SUPPORTED")
continue
# non windows platform
if not base.is_windows():
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if not base.is_file(pro_file):
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("0" != config.option("multiprocess")):
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
else:
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
else:
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
base.run_as_bat(qmake_bat)
os.environ.clear()
os.environ.update(old_env)
base.delete_file(".qmake.stash")
# make build.pro
def make():
make_pro_file("makefiles", "build.pro")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
return

View File

@ -27,6 +27,8 @@ def correct_sdkjs_licence(directory):
def make():
if ("1" == base.get_env("OO_NO_BUILD_JS")):
return
if not base.is_need_build_js():
return
base.set_env('NODE_ENV', 'production')
@ -39,6 +41,7 @@ def make():
base.create_dir(out_dir)
# builder
base.cmd_in_dir(base_dir + "/../web-apps/translation", "python", ["merge_and_check.py"])
build_interface(base_dir + "/../web-apps/build")
build_sdk_builder(base_dir + "/../sdkjs/build")
base.create_dir(out_dir + "/builder")
@ -53,19 +56,20 @@ def make():
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
if not base.is_file(out_dir + "/desktop/sdkjs/common/AllFonts.js"):
base.copy_file(base_dir + "/../sdkjs/common/HtmlFileInternal/AllFonts.js", out_dir + "/desktop/sdkjs/common/AllFonts.js")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/mobile")
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/mobile")
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/mobile")
deldirs = ['ie', 'mobile', 'embed']
[base.delete_dir(root + "/" + d) for root, dirs, f in os.walk(out_dir + "/desktop/web-apps/apps") for d in dirs if d in deldirs]
# for bug 62528. remove empty folders
walklist = list(os.walk(out_dir + "/desktop/sdkjs"))
[os.remove(p) for p, _, _ in walklist[::-1] if len(os.listdir(p)) == 0]
base.copy_file(base_dir + "/../web-apps/apps/api/documents/index.html.desktop", out_dir + "/desktop/web-apps/apps/api/documents/index.html")
build_interface(base_dir + "/../desktop-apps/common/loginpage/build")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/index.html", out_dir + "/desktop/index.html")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/noconnect.html", out_dir + "/desktop/noconnect.html")
# mobile
if config.check_option("module", "mobile"):
build_sdk_native(base_dir + "/../sdkjs/build", False)
@ -73,44 +77,38 @@ def make():
base.create_dir(out_dir + "/mobile/sdkjs")
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_word.js")
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_cell.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_slide.js")
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
base.create_dir(out_dir + "/mobile/sdkjs/word")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/cell")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/slide")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
base.delete_file(out_dir + "/mobile/sdkjs/banners_word.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_cell.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_slide.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
return
# JS build
def _run_npm(directory):
return base.cmd_in_dir(directory, "npm", ["install"])
def _run_npm_ci(directory):
return base.cmd_in_dir(directory, "npm", ["ci"])
def _run_npm_cli(directory):
return base.cmd_in_dir(directory, "npm", ["install", "-g", "grunt-cli"])
@ -139,33 +137,47 @@ def build_sdk_desktop(directory):
def build_sdk_builder(directory):
#_run_npm_cli(directory)
_run_npm(directory)
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param())
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param() + ["--map"])
return
def build_sdk_native(directory, minimize=True):
#_run_npm_cli(directory)
_run_npm(directory)
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
addons = base.sdkjs_addons_param()
if not config.check_option("sdkjs-addons", "sdkjs-native"):
addons.append("--addon=sdkjs-native")
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
return
def build_sdkjs_develop(root_dir):
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
def build_js_develop(root_dir):
#_run_npm_cli(root_dir + "/sdkjs/build")
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
build_sdkjs_develop(root_dir)
_run_npm(root_dir + external_folder + "/web-apps/build")
_run_npm(root_dir + external_folder + "/web-apps/build/sprites")
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
base.cmd_in_dir(root_dir + external_folder + "/web-apps/translation", "python", ["merge_and_check.py"])
old_cur = os.getcwd()
old_product_version = base.get_env("PRODUCT_VERSION")
base.set_env("PRODUCT_VERSION", old_product_version + "d")
os.chdir(root_dir + external_folder + "/web-apps/vendor/framework7-react")
base.cmd("npm", ["install"])
base.cmd("npm", ["ci"])
base.cmd("npm", ["run", "deploy-word"])
base.cmd("npm", ["run", "deploy-cell"])
base.cmd("npm", ["run", "deploy-slide"])

View File

@ -14,6 +14,9 @@ parser.add_option("--output",
parser.add_option("--write-version",
action="store_true", dest="write_version", default=False,
help="Create version file of build")
parser.add_option("--minimize",
action="store", type="string", dest="minimize", default="0",
help="Is minimized version")
(options, args) = parser.parse_args(arguments)
def write_version_files(output_dir):
@ -31,7 +34,12 @@ def write_version_files(output_dir):
# parse configuration
config.parse()
config.extend_option("jsminimize", "0")
config.parse_defaults()
isMinimize = False
if ("1" == options.minimize or "true" == options.minimize):
isMinimize = True
config.set_option("jsminimize", "disable")
branding = config.option("branding-name")
if ("" == branding):
@ -45,41 +53,32 @@ if (options.output):
base.create_dir(out_dir)
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_word.js")
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_cell.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_slide.js")
base.join_scripts(prefix_js, out_dir + "/banners.js")
base.create_dir(out_dir + "/word")
base.join_scripts([out_dir + "/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
base.create_dir(out_dir + "/cell")
base.join_scripts([out_dir + "/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
base.create_dir(out_dir + "/slide")
base.join_scripts([out_dir + "/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
base.delete_file(out_dir + "/banners_word.js")
base.delete_file(out_dir + "/banners_cell.js")
base.delete_file(out_dir + "/banners_slide.js")
base.delete_file(out_dir + "/banners.js")
# Write sdk version mark file if needed
if (options.write_version):

View File

@ -16,8 +16,7 @@ def make():
if("" != config.option("branding")):
branding_dir = git_dir + '/' + config.option("branding") + '/server'
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "grunt", ["--no-color", "-v"] + base.server_addons_param())
build_server_with_addons()
#env variables
product_version = base.get_env('PRODUCT_VERSION')
@ -30,18 +29,16 @@ def make():
cur_date = datetime.date.today().strftime("%m/%d/%Y")
server_build_dir = server_dir + "/build/server"
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_build_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
custom_public_key = branding_dir + '/debug.js'
if(base.is_exist(custom_public_key)):
base.copy_file(custom_public_key, server_build_dir + '/Common/sources')
base.copy_file(custom_public_key, server_dir + '/Common/sources')
pkg_target = "node14"
pkg_target = "node16"
if ("linux" == base.host_platform()):
pkg_target += "-linux"
@ -51,16 +48,26 @@ def make():
if ("windows" == base.host_platform()):
pkg_target += "-win"
base.cmd_in_dir(server_build_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_build_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_build_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
base.cmd_in_dir(server_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
example_dir = base.get_script_dir() + "/../../document-server-integration/web/documentserver-example/nodejs"
base.delete_dir(example_dir + "/node_modules")
base.cmd_in_dir(example_dir, "npm", ["install"])
base.cmd_in_dir(example_dir, "npm", ["ci"])
base.cmd_in_dir(example_dir, "pkg", [".", "-t", pkg_target, "-o", "example"])
def build_server_with_addons():
addons = {}
addons["server"] = [True, False]
addons.update(base.get_server_addons())
for addon in addons:
if (addon):
addon_dir = base.get_script_dir() + "/../../" + addon
base.cmd_in_dir(addon_dir, "npm", ["ci"])
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
def build_server_develop():
server_dir = base.get_script_dir() + "/../../server"
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "npm", ["ci"])
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())

63
scripts/build_sln.py Normal file
View File

@ -0,0 +1,63 @@
#!/usr/bin/env python
import config
import base
import os
import sys
sys.path.append(os.path.dirname(__file__) + "/..")
import sln
import qmake
# make solution
def make(solution=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
if ("" == solution):
solution = "./sln.json"
projects = sln.get_projects(solution, platform)
for pro in projects:
qmake_main_addon = ""
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
print("[WARNING:] temporary enable strip for x2t library in debug")
qmake_main_addon += "build_strip_debug"
qmake.make(platform, pro, qmake_main_addon)
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check replace
directory_builder_branding = os.getcwd() + "/../core/DesktopEditor/doctrenderer"
if base.is_dir(directory_builder_branding):
new_replace_path = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
# build Java docbuilder wrapper
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
for platform in platforms:
if not platform in config.platforms:
continue
# build JNI library
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
# build Java code to JAR
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
return

View File

@ -70,19 +70,18 @@ def parse():
options["platform"] += " android_arm64_v8a android_armv7 android_x86 android_x86_64"
# check vs-version
if ("" == option("vs-version")):
options["vs-version"] = "2015"
# enable v8 8.9 version, if compiler support sources
if ("linux" == host_platform) and (5004 <= base.get_gcc_version()) and not check_option("platform", "android"):
extend_option("config", "v8_version_89")
if ("windows" == host_platform) and ("" == option("vs-version")):
options["vs-version"] = "2019"
if check_option("platform", "win_64_xp") or check_option("platform", "win_32_xp"):
options["vs-version"] = "2015"
if ("windows" == host_platform) and ("2019" == option("vs-version")):
extend_option("config", "v8_version_89")
extend_option("config", "vs2019")
extend_option("config", "vs2019")
if check_option("platform", "linux_arm64"):
extend_option("config", "v8_version_89")
if is_cef_107():
extend_option("config", "cef_version_107")
if is_v8_60():
extend_option("config", "v8_version_60")
# check vs-path
if ("windows" == host_platform) and ("" == option("vs-path")):
@ -108,6 +107,23 @@ def parse():
if not "arm64-toolchain-bin" in options:
options["arm64-toolchain-bin"] = "/usr/bin"
if check_option("platform", "ios"):
if not check_option("config", "no_bundle_xcframeworks"):
if not check_option("config", "bundle_xcframeworks"):
extend_option("config", "bundle_xcframeworks")
if check_option("config", "bundle_xcframeworks"):
if not check_option("config", "bundle_dylibs"):
extend_option("config", "bundle_dylibs")
if check_option("use-system-qt", "1"):
base.cmd_in_dir(base.get_script_dir() + "/../tools/linux", "python", ["use_system_qt.py"])
options["qt-dir"] = base.get_script_dir() + "/../tools/linux/system_qt"
# disable all warnings (enable if needed with core_enable_all_warnings options)
if not check_option("config", "core_enable_all_warnings"):
extend_option("config", "core_disable_all_warnings")
return
def check_compiler(platform):
@ -166,6 +182,9 @@ def extend_option(name, value):
else:
options[name] = value
def set_option(name, value):
options[name] = value
def branding():
branding = option("branding-name")
if ("" == branding):
@ -203,4 +222,28 @@ def parse_defaults():
options[name] = options[name].replace("default", defaults_options[name])
else:
options[name] = defaults_options[name]
if ("config_addon" in defaults_options):
extend_option("config", defaults_options["config_addon"])
return
def is_cef_107():
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
return True
return False
def is_v8_60():
if check_option("platform", "linux_arm64"):
return False
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
return True
if ("windows" == base.host_platform()) and ("2015" == option("vs-version")):
return True
#if check_option("config", "use_v8"):
# return True
return False

View File

@ -13,12 +13,16 @@ import cef
import icu
import openssl
import curl
import websocket
import websocket_all
import v8
import html2
import iwork
import hunspell
import glew
import harfbuzz
import hyphen
import googletest
import libvlc
def check_android_ndk_macos_arm(dir):
if base.is_dir(dir + "/darwin-x86_64") and not base.is_dir(dir + "/darwin-arm64"):
@ -39,10 +43,18 @@ def make():
openssl.make()
v8.make()
html2.make()
iwork.make(False)
hunspell.make(False)
harfbuzz.make()
glew.make()
hyphen.make()
googletest.make()
if config.check_option("build-libvlc", "1"):
libvlc.make()
if config.check_option("module", "mobile"):
curl.make()
websocket.make()
if (config.check_option("platform", "android")):
curl.make()
websocket_all.make()
return

View File

@ -0,0 +1,170 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import re
def get_android_ndk_version():
env_val = base.get_env("ANDROID_NDK_ROOT")
if (env_val == ""):
env_val = "21.1.6352462"
return env_val.strip("/").split("/")[-1]
def get_android_ndk_version_major():
val = get_android_ndk_version().split(".")[0]
val = re.sub("[^0-9]", "", val)
return int(val)
def get_sdk_api():
if (23 > get_android_ndk_version_major()):
return "21"
return "23"
global archs
archs = ["arm64", "arm", "x86_64", "x86"]
global platforms
platforms = {
"arm64" : {
"abi" : "arm64-v8a",
"target" : "aarch64-linux-android",
"dst" : "arm64_v8a",
"api" : get_sdk_api(),
"old" : "aarch64-linux-android"
},
"arm" : {
"abi" : "armeabi-v7a",
"target" : "armv7a-linux-androideabi",
"dst" : "armv7",
"api" : get_sdk_api(),
"old" : "arm-linux-android"
},
"x86_64" : {
"arch" : "x86_64",
"target" : "x86_64-linux-android",
"dst" : "x86_64",
"api" : get_sdk_api(),
"old" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86",
"target" : "i686-linux-android",
"dst" : "x86",
"api" : get_sdk_api(),
"old" : "i686-linux-android"
}
}
# todo: check arm host!
global host
if ("linux" == base.host_platform()):
host = {
"name" : "linux",
"arch" : "linux-x86_64"
}
else:
host = {
"name" : "darwin",
"arch" : "darwin-x86_64"
}
def get_android_ndk_version():
#return "26.2.11394342"
return "21.1.6352462"
def get_android_ndk_version_major():
return int(get_android_ndk_version().split(".")[0])
def get_options_dict_as_array(opts):
value = []
for key in opts:
value.append(key + "=" + opts[key])
return value
def get_options_array_as_string(opts):
return " ".join(opts)
def ndk_dir():
return base.get_env("ANDROID_NDK_ROOT")
def sdk_dir():
ndk_path = ndk_dir()
if (-1 != ndk_path.find("/ndk/")):
return ndk_path + "/../.."
return ndk_path + "/.."
def toolchain_dir():
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
def prepare_platform(arch, cpp_standard=11):
target = platforms[arch]["target"]
api = platforms[arch]["api"]
ndk_directory = ndk_dir()
toolchain = toolchain_dir()
base.set_env("TARGET", target)
base.set_env("TOOLCHAIN", toolchain)
base.set_env("NDK_STANDARD_ROOT", toolchain)
base.set_env("ANDROIDVER", api)
base.set_env("ANDROID_API", api)
base.set_env("AR", toolchain + "/bin/llvm-ar")
base.set_env("AS", toolchain + "/bin/llvm-as")
base.set_env("LD", toolchain + "/bin/ld")
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
base.set_env("CC", target + api + "-clang")
base.set_env("CXX", target + api + "-clang++")
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
if (23 > get_android_ndk_version_major()):
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
base.set_env("LDFLAGS", ld_flags)
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
cflags = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-Wno-unused-function",
"-fPIC",
"-I" + toolchain + "/sysroot/usr/include",
"-D__ANDROID_API__=" + api,
"-DANDROID"
]
cflags_string = " ".join(cflags)
cppflags_string = cflags_string
if (cpp_standard >= 11):
cppflags_string += " -std=c++11"
base.set_env("CFLAGS", cflags_string)
base.set_env("CXXFLAGS", cppflags_string)
base.set_env("CPPPLAGS", cflags_string)
return
def extend_cflags(params):
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
return
def extend_cxxflags(params):
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
return
def extend_ldflags(params):
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
return

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_version = "curl-7_68_0"
lib_name = "curl-7.68.0"
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
params = []
if ("arm64" == arch):
params.append("--host=aarch64-linux-android")
elif ("arm" == arch):
params.append("--host=arm-linux-androideabi")
elif ("x86_64" == arch):
params.append("--host=x86_64-linux-android")
elif ("x86" == arch):
params.append("--host=i686-linux-android")
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
params.append("--enable-ipv6")
params.append("--enable-static")
params.append("--disable-shared")
params.append("--prefix=" + arch_build_dir)
params.append("--with-ssl=" + openssl_dir)
base.cmd("./configure", params)
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,147 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
icu_major = "58"
icu_minor = "3"
options = {
"--enable-strict" : "no",
"--enable-extras" : "no",
"--enable-draft" : "yes",
"--enable-samples" : "no",
"--enable-tests" : "no",
"--enable-renaming" : "yes",
"--enable-icuio" : "no",
"--enable-layoutex" : "no",
"--with-library-bits" : "nochange",
"--with-library-suffix" : "",
"--enable-static" : "yes",
"--enable-shared" : "no",
"--with-data-packaging" : "archive"
}
cpp_flags_base = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-fPIC"
]
cpp_flags = [
"-fno-short-wchar",
"-fno-short-enums",
"-DU_USING_ICU_NAMESPACE=0",
"-DU_HAVE_NL_LANGINFO_CODESET=0",
"-DU_TIMEZONE=0",
"-DU_DISABLE_RENAMING=0",
"-DUCONFIG_NO_COLLATION=0",
"-DUCONFIG_NO_FORMATTING=0",
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
"-DUCONFIG_NO_TRANSLITERATION=0",
"-DU_STATIC_IMPLEMENTATION"
]
def fetch_icu():
if not base.is_dir(current_dir + "icu"):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + icu_major, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
base.delete_dir_with_access_error(current_dir + "icu2")
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
if False and ("mac" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
return
def build_host():
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
if not base.is_dir(cross_build_dir):
base.create_dir(cross_build_dir)
os.chdir(cross_build_dir)
ld_flags = "-pthread"
if ("linux" == base.host_platform()):
ld_flags += " -Wl,--gc-sections"
else:
# gcc on OSX does not support --gc-sections
ld_flags += " -Wl,-dead_strip"
base.set_env("LDFLAGS", ld_flags)
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
host_type = "Linux"
if ("mac" == base.host_platform()):
host_type = "MacOSX/GCC"
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(current_dir + "build")
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
os.chdir(current_dir)
return
def build_arch(arch):
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
android_ndk.extend_cflags(" ".join(cpp_flags))
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
base.create_dir(arch_build_dir)
os.chdir(arch_build_dir)
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
base.cmd("make", ["-j4"])
os.chdir(current_dir)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
base.delete_dir(arch_build_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
old_env = dict(os.environ)
fetch_icu()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -0,0 +1,94 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_name="openssl-1.1.1t"
options = [
"no-shared",
"no-tests",
"enable-ssl3",
"enable-ssl3-method",
"enable-md2",
"no-asm"
]
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
# not needed, just create directories
if not base.is_dir(current_dir + "/build"):
base.create_dir(current_dir + "/build")
if not base.is_dir(current_dir + "/build/android"):
base.create_dir(current_dir + "/build/android")
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.create_dir(dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -73,6 +73,8 @@ def make():
win_toolset = "msvc-14.2"
win_boot_arg = "vc142"
win_vs_version = "vc142"
# add "define=_ITERATOR_DEBUG_LEVEL=0" to b2 args before install for disable _ITERATOR_DEBUG_LEVEL
if (-1 != config.option("platform").find("win_64")) and not base.is_file("../build/win_64/lib/libboost_system-" + win_vs_version + "-mt-x64-1_72.lib"):
base.cmd("bootstrap.bat", [win_boot_arg])
base.cmd("b2.exe", ["headers"])
@ -101,9 +103,15 @@ def make():
base.copy_files(directory_build + "/linux_arm64/*.a", directory_build)
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios"):
old_cur2 = os.getcwd()
clang_correct()
os.chdir("../")
base.bash("./boost_ios")
os.chdir(old_cur2)
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios_xcframework"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios_simulator", "xcframework_platform_ios_simulator")
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios")
if (-1 != config.option("platform").find("android")) and not base.is_dir("../build/android"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"])

View File

@ -1,108 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
platforms = {
"arm64_v8a" : {
"name" : "arm64-v8a",
"toolset" : "arm64v8a",
"clang_triple" : "aarch64-linux-android21",
"tool_triple" : "aarch64-linux-android",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
},
"armv7" : {
"name" : "armeabi-v7a",
"toolset" : "armeabiv7a",
"clang_triple" : "armv7a-linux-androideabi16",
"tool_triple" : "arm-linux-androideabi",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "32",
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
"linker_flags" : "-Wl,--fix-cortex-a8"
},
"x86" : {
"name" : "x86",
"toolset" : "x86",
"clang_triple" : "i686-linux-android16",
"tool_triple" : "i686-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "32",
"compiler_flags" : "",
"linker_flags" : ""
},
"x86_64" : {
"name" : "x86_64",
"toolset" : "x8664",
"clang_triple" : "x86_64-linux-android21",
"tool_triple" : "x86_64-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
}
}
base_dir = base.get_script_dir()
def make(platform):
tmp_build_dir = base_dir + "/core_common/modules/boost"
if (base.is_dir(tmp_build_dir)):
base.delete_dir(tmp_build_dir)
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
current_platform = platforms[platform]
if (base.host_platform() == "mac"):
source = "prebuilt/linux-x86_64"
dest = "prebuilt/darwin-x86_64"
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
build_dir_tmp = tmp_build_dir + "/tmp"
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
old_path = base.get_env("PATH")
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
print(current_platform)
base.cmd("./b2", ["-q", "-j4",
"toolset=clang-" + current_platform["toolset"],
"binary-format=elf",
"address-model=" + current_platform["address_model"],
"architecture=" + current_platform["arch"],
"abi=" + current_platform["abi"],
"link=static",
"threading=multi",
"target-os=android",
"--user-config=" + tmp_build_dir + "/user-config.jam",
"--ignore-site-config",
"--layout=system",
"install"], True)
base.set_env("PATH", old_path)
base.delete_dir(tmp_build_dir)
return

View File

@ -5,9 +5,9 @@ sys.path.append('../..')
import config
import base
import os
import build
import qmake
def make(src_dir, modules, build_platform="android"):
def make(src_dir, modules, build_platform="android", qmake_addon=""):
old_cur = os.getcwd()
print("boost-headers...")
@ -23,17 +23,13 @@ def make(src_dir, modules, build_platform="android"):
pro_file_content.append("TARGET = boost_" + module)
pro_file_content.append("TEMPLATE = lib")
pro_file_content.append("CONFIG += staticlib")
if (build_platform == "android"):
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
pro_file_content.append("")
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
pro_file_content.append("include($$PWD/../../../../../base.pri)")
pro_file_content.append("")
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
pro_file_content.append("")
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
pro_file_content.append("INCLUDEPATH += $$PWD/include")
@ -43,7 +39,7 @@ def make(src_dir, modules, build_platform="android"):
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
os.chdir(module_dir)
build.make_pro_file("./", module + ".pro")
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
os.chdir(old_cur)
return

View File

@ -15,54 +15,74 @@ def make():
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64"]
url = "http://d2ettrnqo7v976.cloudfront.net/cef/4280/"
for platform in platforms:
if not config.check_option("platform", platform):
continue
url = "http://d2ettrnqo7v976.cloudfront.net/cef/"
archive_name = "./cef_binary.7z"
if (-1 != platform.find("_xp")):
url += "4280/"
archive_name = "./cef_binary_xp.7z"
elif (config.check_option("config", "cef_version_107")):
url += "5304/"
archive_name = "./cef_binary_107.7z"
elif ("mac_64" == platform) and (config.check_option("config", "use_v8")):
url += "5060/"
archive_name = "./cef_binary_103.7z"
else:
url += "5414/"
url_platform = (url + platform + "/cef_binary.7z")
archive_name_data = archive_name + ".data"
if not base.is_dir(platform):
base.create_dir(platform)
os.chdir(platform)
data_url = base.get_file_last_modified_url(url_platform)
old_data_url = base.readFile("./cef_binary.7z.data")
old_data_url = base.readFile(archive_name_data)
build_dir_name = "build"
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
build_dir_name = "build_107"
if ("mac_64" == platform) and (config.check_option("config", "use_v8")):
build_dir_name = "build_103"
if (data_url != old_data_url):
if base.is_file("./cef_binary.7z"):
base.delete_file("./cef_binary.7z")
if base.is_dir("build"):
base.delete_dir("build")
if base.is_file(archive_name):
base.delete_file(archive_name)
if base.is_dir(build_dir_name):
base.delete_dir(build_dir_name)
if base.is_dir("build"):
if base.is_dir(build_dir_name):
os.chdir(base_dir)
continue
# download
if not base.is_file("./cef_binary.7z"):
base.download(url_platform, "./cef_binary.7z")
if not base.is_file(archive_name):
base.download(url_platform, archive_name)
# extract
base.extract("./cef_binary.7z", "./")
base.extract(archive_name, "./")
base.delete_file("./cef_binary.7z.data")
base.writeFile("./cef_binary.7z.data", data_url)
base.delete_file(archive_name_data)
base.writeFile(archive_name_data, data_url)
base.create_dir("./build")
base.create_dir("./" + build_dir_name)
# deploy
if (0 != platform.find("mac")):
base.copy_files("cef_binary/Release/*", "build/")
base.copy_files("cef_binary/Resources/*", "build/")
if (0 == platform.find("linux")):
base.cmd("chmod", ["a+xr", "build/locales"])
if (0 == platform.find("mac")):
base.cmd("mv", ["Chromium Embedded Framework.framework", "build/Chromium Embedded Framework.framework"])
base.cmd("mv", ["Chromium Embedded Framework.framework", build_dir_name + "/Chromium Embedded Framework.framework"])
base.delete_dir("./Chromium Embedded Framework.framework")
else:
base.copy_files("cef_binary/Release/*", build_dir_name + "/")
base.copy_files("cef_binary/Resources/*", build_dir_name + "/")
if (0 == platform.find("linux")):
base.cmd("chmod", ["a+xr", build_dir_name + "/locales"])
base.delete_dir("./cef_binary")
os.chdir(base_dir)

View File

@ -2,21 +2,19 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import subprocess
import os
import base
import curl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
old_cur = os.getcwd()
os.chdir(path)
if (-1 != config.option("platform").find("android")):
if base.is_dir(path + "/build/android"):
os.chdir(old_cur)
return
subprocess.call(["./build-android-curl.sh"])
curl_android.make()
elif (-1 != config.option("platform").find("ios")):
if base.is_dir(path + "/build/ios"):
os.chdir(old_cur)

View File

@ -0,0 +1,19 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
def make():
print("[fetch]: googletest")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/googletest"
old_cur = os.getcwd()
os.chdir(base_dir)
if not base.is_dir("googletest"):
base.cmd("git", ["clone", "https://github.com/google/googletest.git", "-b", "v1.13.0"])
os.chdir(old_cur)
return

View File

@ -3,6 +3,11 @@ sys.path.append('../../../scripts')
import base
import os
def clean():
if base.is_dir("hunspell"):
base.delete_dir_with_access_error("hunspell")
return
def make(build_js = True):
old_cur_dir = os.getcwd()
@ -11,6 +16,8 @@ def make(build_js = True):
core_common_dir = base.get_script_dir() + "/../../core/Common"
os.chdir(core_common_dir + "/3dParty/hunspell")
base.common_check_version("hunspell", "1", clean)
base.cmd("python", ["./before.py"])
if (build_js):

View File

@ -0,0 +1,21 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
def make():
print("[fetch]: hyphen")
new_dir = base.get_script_dir() + "/../../core/Common/3dParty/hyphen"
old_dir = os.getcwd()
os.chdir(new_dir)
if not base.is_dir("hyphen"):
base.cmd("git", ["clone", "https://github.com/hunspell/hyphen"])
os.chdir(old_dir)
return

View File

@ -2,26 +2,50 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import base
import os
import glob
import icu_android
def fetch_icu(major, minor):
base.cmd("git", ["clone", "--depth", "1", "--branch", "maint/maint-" + major, "https://github.com/unicode-org/icu.git", "./icu2"])
base.copy_dir("./icu2/icu4c", "./icu")
base.delete_dir_with_access_error("icu2")
#base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
return
def clear_module():
if base.is_dir("icu"):
base.delete_dir_with_access_error("icu")
# remove build
for child in glob.glob("./*"):
if base.is_dir(child):
base.delete_dir(child)
return
def make():
print("[fetch & build]: icu")
if (-1 != config.option("platform").find("android")):
icu_android.make()
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
old_cur = os.getcwd()
os.chdir(base_dir)
icu_major = "58"
icu_minor = "2"
base.check_module_version("3", clear_module)
if (-1 != config.option("platform").find("android")):
icu_android.make()
os.chdir(base_dir)
icu_major = "58"
icu_minor = "3"
if not base.is_dir("icu"):
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
fetch_icu(icu_major, icu_minor)
if ("windows" == base.host_platform()):
platformToolset = "v140"
@ -66,6 +90,7 @@ def make():
base.create_dir(base_dir + "/icu/cross_build")
os.chdir("icu/cross_build")
base.cmd("./../source/runConfigureICU", ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
base.replaceInFile("./../source/icudefs.mk.in", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS)", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS) -static-libstdc++ -static-libgcc")
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(base_dir + "/linux_64")

View File

@ -1,172 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
toolshains_dir = current_dir + "/toolchains"
icu_major = "58"
icu_minor = "2"
icu_is_shared = False
current_path = base.get_env("PATH")
platforms = {
"arm64" : {
"arch" : "aarch64-linux-android",
"bin" : "aarch64-linux-android"
},
"arm" : {
"arch" : "arm-linux-androideabi",
"bin" : "arm-linux-androideabi"
},
"x86_64" : {
"arch" : "x86_64-linux-android",
"bin" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86-linux-android",
"bin" : "i686-linux-android"
}
}
def build_arch(arch, api_version):
print("icu build: " + arch + " ----------------------------------------")
if base.is_dir(current_dir + "/icu/" + arch):
base.delete_dir(current_dir + "/icu/" + arch)
base.create_dir(current_dir + "/icu/" + arch)
os.chdir(current_dir + "/icu/" + arch)
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
"--platform=android-" + api_version,
"--install-dir=" + current_dir + "/toolchain/" + arch,
"--toolchain=" + platforms[arch]["arch"],
"--force"
])
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
if not icu_is_shared:
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
base.cmd("../source/configure", command_args.split())
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
base.set_env("PATH", current_path)
os.chdir(current_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
if base.is_dir(current_dir + "/build"):
return
current_dir_old = os.getcwd()
print("[fetch & build]: icu_android")
os.chdir(current_dir)
if not base.is_dir("icu"):
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
#if ("mac" == base.host_platform()):
# base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
if not base.is_dir(current_dir + "/icu/cross_build"):
base.create_dir(current_dir + "/icu/cross_build")
os.chdir(current_dir + "/icu/cross_build")
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
os.chdir(current_dir)
build_arch("arm64", "21")
build_arch("arm", "16")
build_arch("x86_64","21")
build_arch("x86", "16")
os.chdir(current_dir)
base.create_dir(current_dir + "/build")
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
if icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
# patch elf information
os.chdir(current_dir + "/build")
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
os.chdir("./patchelf")
base.cmd("./bootstrap.sh")
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
base.cmd("make")
base.cmd("make", ["install"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
base.delete_dir(current_dir + "/build/patchelf")
if not icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
os.chdir(current_dir_old)
return

View File

@ -35,7 +35,7 @@ def restore_icu_defs(current_dir):
return
icu_major = "58"
icu_minor = "2"
icu_minor = "3"
current_dir_old = os.getcwd()
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"

View File

@ -0,0 +1,38 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
import subprocess
def clear_module():
directories = ["glm", "libetonyek", "libodfgen", "librevenge", "mdds"]
for dir in directories:
if base.is_dir(dir):
base.delete_dir_with_access_error(dir)
def make(use_gperf = True):
old_cur_dir = os.getcwd()
print("[fetch & build]: iwork")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/apple"
os.chdir(base_dir)
base.check_module_version("3", clear_module)
os.chdir(old_cur_dir)
cmd_args = ["fetch.py"]
if use_gperf:
cmd_args.append("--gperf")
base.cmd_in_dir(base_dir, "python", cmd_args)
return
if __name__ == '__main__':
# manual compile
make(False)

View File

@ -126,11 +126,14 @@ def make():
def param_apple(platform, arch):
return ["-G","Xcode", "-DDEPLOYMENT_TARGET=10", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
def param_apple_ios(platform, arch, params=[]):
return params + ["-G","Xcode", "-DDEPLOYMENT_TARGET=11", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
if(platform == "ios"):
build_arch("ios", "armv7", param_apple("OS", "armv7"))
build_arch("ios", "arm64", param_apple("OS64", "arm64"))
build_arch("ios", "i386", param_apple("SIMULATOR", "i386"))
build_arch("ios", "x86_64", param_apple("SIMULATOR64", "x86_64"))
#build_arch("ios", "armv7", param_apple("OS", "armv7"))
build_arch("ios", "arm64", param_apple_ios("OS64", "arm64"))
#build_arch("ios", "i386", param_apple_ios("SIMULATOR", "i386"))
build_arch("ios", "x86_64", param_apple_ios("SIMULATOR64", "x86_64", ["-DCMAKE_CXX_FLAGS=-std=c++11"]))
else:
build_arch("mac", "mac_arm64", param_apple("MAC_ARM64", "arm64"))
build_arch("mac", "mac_64", param_apple("MAC", "x86_64"))
@ -144,7 +147,7 @@ def make():
#copy include
prefix_dir = current_dir + "/IXWebSocket/build/ios/"
postfix_dir = ""
if base.is_dir(prefix_dir + "armv7/usr"):
if base.is_dir(prefix_dir + "arm64/usr"):
postfix_dir = "/usr"
if base.is_dir(prefix_dir + "armv7" + postfix_dir + "/include"):
@ -157,10 +160,16 @@ def make():
base.cmd("cp", [ "-r", prefix_dir + "x86_64" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
# Create fat lib
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
if (True):
base.cmd("lipo", ["IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
else:
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
elif (-1 != config.option("platform").find("linux")):

View File

@ -0,0 +1,124 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
def docker_build(image_name, dockerfile_dir, base_dir):
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
vlc_dir = base_dir + "/vlc"
base.cmd("docker", ["run", "--rm", "-v", vlc_dir + ":/vlc", image_name])
base.cmd("docker", ["image", "rm", image_name])
return
def form_build_win(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/sdk/include", dest_dir + "/include")
# form lib dir
base.create_dir(dest_dir + "/lib")
base.copy_file(src_dir + "/sdk/lib/libvlc.lib", dest_dir + "/lib/vlc.lib")
base.copy_file(src_dir + "/sdk/lib/libvlccore.lib", dest_dir + "/lib/vlccore.lib")
base.copy_dir(src_dir + "/plugins", dest_dir + "/lib/plugins")
base.copy_file(src_dir + "/libvlc.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/libvlccore.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/vlc-cache-gen.exe", dest_dir + "/lib")
# generate cache file 'plugins.dat' for plugins loading
base.cmd_exe(dest_dir + "/lib/vlc-cache-gen", [dest_dir + "/lib/plugins"])
return
def form_build_linux(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
def form_build_mac(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.cmd("find", [dest_dir + "/lib", "-name", "\"*.la\"", "-type", "f", "-delete"])
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
# generate cache file 'plugins.dat' for plugins loading
base.run_command("DYLD_LIBRARY_PATH=" + dest_dir + "/lib " + dest_dir + "/lib/vlc/vlc-cache-gen " + dest_dir + "/lib/vlc/plugins")
return
def make():
print("[fetch & build]: libvlc")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/libvlc"
vlc_dir = base_dir + "/vlc"
vlc_version = "3.0.18"
tools_dir = base.get_script_dir() + "/../tools"
old_cur = os.getcwd()
os.chdir(base_dir)
if not base.is_dir(vlc_dir):
# temporary disable auto CRLF for Windows
if "windows" == base.host_platform():
autocrlf_old = base.run_command("git config --global core.autocrlf")['stdout']
base.cmd("git", ["config", "--global", "core.autocrlf", "false"])
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
if "windows" == base.host_platform():
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
base.create_dir("build")
base.copy_file("tools/ignore-cache-time.patch", "vlc")
# windows
if "windows" == base.host_platform():
if config.check_option("platform", "win_64"):
base.copy_file("tools/win_64/build.patch", "vlc")
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
if config.check_option("platform", "win_32"):
base.copy_file("tools/win_32/build.patch", "vlc")
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
form_build_win(vlc_dir + "/build/win32/vlc-" + vlc_version, base_dir + "/build/win_32")
# linux
if config.check_option("platform", "linux_64"):
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
# mac
if "mac" == base.host_platform():
os.chdir(vlc_dir)
base.cmd("git", ["restore", "src/modules/bank.c"])
base.cmd("patch", ["-p1", "src/modules/bank.c", "../tools/ignore-cache-time.patch"])
if config.check_option("platform", "mac_64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_64/build.patch"])
base.create_dir("build/mac_64")
os.chdir("build/mac_64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_64/vlc_install_dir", base_dir + "/build/mac_64")
if config.check_option("platform", "mac_arm64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_arm64/build.patch"])
base.create_dir("build/mac_arm64")
os.chdir("build/mac_arm64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_arm64/vlc_install_dir", base_dir + "/build/mac_arm64")
os.chdir(old_cur)
return

View File

@ -19,15 +19,16 @@ def make():
print("[fetch & build]: openssl")
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
openssl_mobile.make()
return
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(base_dir)
base.common_check_version("openssl", "3", clean)
base.common_check_version("openssl", "4", clean)
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
os.chdir(old_cur)
openssl_mobile.make()
return
if not base.is_dir("openssl"):
base.cmd("git", ["clone", "--depth=1", "--branch", "OpenSSL_1_1_1f", "https://github.com/openssl/openssl.git"])
@ -41,7 +42,7 @@ def make():
base.create_dir("./../build/win_64")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm enable-md2")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -49,7 +50,7 @@ def make():
base.create_dir("./../build/win_32")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm enable-md2")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -62,7 +63,7 @@ def make():
base.create_dir("./../build/win_64_xp")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async enable-md2")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -70,7 +71,7 @@ def make():
base.create_dir("./../build/win_32_xp")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async enable-md2")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -79,11 +80,12 @@ def make():
return
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
base.cmd("./config", ["no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
base.cmd("./config", ["enable-md2", "no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
base.cmd("make")
base.cmd("make", ["install"])
base.cmd("make", ["clean"], True)
# TODO: support x86
if (-1 != config.option("platform").find("linux_arm64")) and not base.is_dir("../build/linux_arm64"):
@ -94,14 +96,14 @@ def make():
if ("" == cross_compiler_arm64):
cross_compiler_arm64 = "/usr/bin"
cross_compiler_arm64_prefix = cross_compiler_arm64 + "/" + base.get_prefix_cross_compiler_arm64()
base.cmd("./Configure", ["linux-aarch64", "--cross-compile-prefix=" + cross_compiler_arm64_prefix, "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
base.cmd("./Configure", ["linux-aarch64", "--cross-compile-prefix=" + cross_compiler_arm64_prefix, "enable-md2", "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
base.cmd("make", [], True)
base.cmd("make", ["install"], True)
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
base.cmd("make", ["build_libs", "install"])
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_arm64"):
@ -119,7 +121,7 @@ def make():
},\n\
\"darwin64-x86_64-cc\" => {"
base.replaceInFile(base_dir + "/openssl2/Configurations/10-main.conf", replace1, replace2)
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
base.cmd("make", ["build_libs", "install"])
os.chdir(old_cur)

View File

@ -1,18 +1,19 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
sys.path.append('android')
import base
import config
import os
import subprocess
import openssl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(path)
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
subprocess.call(["./build-android-openssl.sh"])
if (-1 != config.option("platform").find("android")):
openssl_android.make()
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
subprocess.call(["./build-ios-openssl.sh"])

View File

@ -0,0 +1,62 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
import subprocess
import glob
def correct_namespace(dir):
folder = dir
if ("/" != folder[-1:]):
folder += "/"
folder += "*"
for file in glob.glob(folder):
if base.is_file(file):
base.replaceInFile(file, "namespace sio", "namespace sio_no_tls")
base.replaceInFile(file, "asio::", "asio_no_tls::")
base.replaceInFile(file, "sio::", "sio_no_tls::")
base.replaceInFile(file, "asio_no_tls::", "asio::")
elif base.is_dir(file):
correct_namespace(file)
return
def make():
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
# patches
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
# no tls realization (remove if socket.io fix this)
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"
base.copy_dir(base_dir + "/socket.io-client-cpp/src", dst_dir)
correct_namespace(dst_dir)
base.replaceInFile(dst_dir + "/internal/sio_client_impl.h", "SIO_TLS", "SIO_TLS_NO")
base.replaceInFile(dst_dir + "/internal/sio_client_impl.cpp", "SIO_TLS", "SIO_TLS_NO")
base.replaceInFile(dst_dir + "/sio_socket.h", "SIO_SOCKET_H", "SIO_SOCKET_NO_TLS_H")
base.replaceInFile(dst_dir + "/sio_client.h", "SIO_CLIENT_H", "SIO_CLIENT_NO_TLS_H")
base.replaceInFile(dst_dir + "/sio_message.h", "__SIO_MESSAGE_H__", "__SIO_MESSAGE_NO_TLS_H__")
base.replaceInFile(dst_dir + "/internal/sio_packet.h", "SIO_PACKET_H", "SIO_PACKET_NO_TLS_H")
old_ping = " m_ping_timeout_timer->expires_from_now(milliseconds(m_ping_interval + m_ping_timeout), ec);"
new_ping = "#if defined(PING_TIMEOUT_INTERVAL)\n"
new_ping += " m_ping_timeout_timer->expires_from_now(milliseconds(PING_TIMEOUT_INTERVAL), ec);\n"
new_ping += "#else\n"
new_ping += old_ping
new_ping += "\n#endif"
base.replaceInFile(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", old_ping, new_ping)
base.replaceInFile(base_dir + "/socket.io-client-cpp/src_no_tls/internal/sio_client_impl.cpp", old_ping, new_ping)
return

View File

@ -10,13 +10,17 @@ import config
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketrocket"
def buildIOS():
# Build for iphone
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-devices.xcarchive", "-sdk", "iphoneos", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
base.cmd("xcodebuild", ["-sdk", "iphoneos", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
# Build for simulator
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-simulators.xcarchive", "-sdk", "iphonesimulator", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
base.cmd("xcodebuild", ["-sdk", "iphonesimulator", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
# Package xcframework
base.cmd("xcodebuild", ["-create-xcframework", "-library", current_dir + "/build/SocketRocket-devices.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-library", current_dir + "/build/SocketRocket-simulators.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-output", current_dir + "/build/SocketRocket.xcframework"])
# Remove arm64 for simulator for SDK 14
base.cmd("lipo", ["-remove", "arm64", "-output", "build/Release-iphonesimulator/libSocketRocket.a", "build/Release-iphonesimulator/libSocketRocket.a"])

View File

@ -68,9 +68,9 @@ def make():
if ("mac" == base.host_platform()) and (-1 == config.option("config").find("use_v8")):
return
use_v8_89 = False
if (-1 != config.option("config").lower().find("v8_version_89")):
use_v8_89 = True
use_v8_89 = True
if config.check_option("config", "v8_version_60"):
use_v8_89 = False
if (use_v8_89):
v8_89.make()
@ -93,6 +93,7 @@ def make():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
@ -118,7 +119,7 @@ def make():
# windows hack (delete later) ----------------------
if ("windows" == base.host_platform()):
base.delete_dir_with_access_error("v8/buildtools/win")
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
base.cmd("gclient", ["sync", "--force"], True)
else:
base.cmd("gclient", ["sync"], True)
@ -179,6 +180,7 @@ def make():
base.cmd2("gn", ["gen", "out.gn/mac_64", "--args=\"is_debug=false " + base_args64 + "\""])
base.cmd("ninja", ["-C", "out.gn/mac_64"])
# add enable_iterator_debugging=false for disable _ITERATOR_DEBUG_LEVEL
if config.check_option("platform", "win_64"):
if (-1 != config.option("config").lower().find("debug")):
base.cmd2("gn", ["gen", "out.gn/win_64/debug", "--args=\"is_debug=true " + base_args64 + " is_clang=false\""])
@ -224,13 +226,17 @@ def make_xp():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
base.replaceInFile("depot_tools/cipd.ps1", "windows-386", "windows-amd64")
# old variant
#path_to_python2 = "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin"
path_to_python2 = "/depot_tools/bootstrap-2@3_8_10_chromium_23_bin/python/bin"
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
base_dir + "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin",
base_dir + path_to_python2,
config.option("vs-path") + "/../Common7/IDE",
os.environ["PATH"]])
@ -240,7 +246,7 @@ def make_xp():
base.cmd("./depot_tools/fetch", ["v8"], True)
base.cmd("./depot_tools/gclient", ["sync", "-r", "4.10.253"], True)
base.delete_dir_with_access_error("v8/buildtools/win")
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
base.cmd("gclient", ["sync", "--force"], True)
# save common py script
@ -265,6 +271,14 @@ def make_xp():
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
]);
programFilesDir = base.get_env("ProgramFiles")
if ("" != base.get_env("ProgramFiles(x86)")):
programFilesDir = base.get_env("ProgramFiles(x86)")
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
if (base.is_dir(dev_path)):
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
if config.check_option("platform", "win_64_xp"):
if not base.is_dir("win_64/release"):
base.run_as_bat(["call python v8/build/gyp_v8 -Dtarget_arch=x64", "call python v8/build/common_xp.py", "call devenv v8/tools/gyp/v8.sln /Rebuild Release"])

View File

@ -0,0 +1,37 @@
class StrongRootBlockAllocator {
public:
using pointer = Address*;
using const_pointer = const Address*;
using reference = Address&;
using const_reference = const Address&;
using value_type = Address;
using size_type = size_t;
using difference_type = ptrdiff_t;
template <class U>
struct rebind;
explicit StrongRootBlockAllocator(Heap* heap) : heap_(heap) {}
Address* allocate(size_t n);
void deallocate(Address* p, size_t n) noexcept;
private:
Heap* heap_;
};
// Rebinding to Address gives another StrongRootBlockAllocator.
template <>
struct StrongRootBlockAllocator::rebind<Address> {
using other = StrongRootBlockAllocator;
};
// Rebinding to something other than Address gives a std::allocator that
// is copy-constructable from StrongRootBlockAllocator.
template <class U>
struct StrongRootBlockAllocator::rebind {
class other : public std::allocator<U> {
public:
// NOLINTNEXTLINE
other(const StrongRootBlockAllocator&) {}
};
};

View File

@ -7,6 +7,26 @@ import base
import os
import subprocess
def change_bootstrap():
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
content = "# changed by build_tools\n\n"
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
content += "@Subdir python\n"
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
content += "@Subdir python3\n"
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
content += "@Subdir git\n"
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
"raise subprocess.CalledProcessError(proc.returncode, argv, None)", "return")
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
return
def make_args(args, platform, is_64=True, is_debug=False):
args_copy = args[:]
if is_64:
@ -24,6 +44,8 @@ def make_args(args, platform, is_64=True, is_debug=False):
if is_debug:
args_copy.append("is_debug=true")
if (platform == "windows"):
args_copy.append("enable_iterator_debugging=true")
else:
args_copy.append("is_debug=false")
@ -31,7 +53,7 @@ def make_args(args, platform, is_64=True, is_debug=False):
args_copy.append("is_clang=true")
args_copy.append("use_sysroot=false")
if (platform == "windows"):
args_copy.append("is_clang=false")
args_copy.append("is_clang=false")
return "--args=\"" + " ".join(args_copy) + "\""
@ -44,12 +66,40 @@ def ninja_windows_make(args, is_64=True, is_debug=False):
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
base.cmd("ninja", ["-C", directory_out])
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
base.move_file("./" + directory_out + "/obj/v8_wrappers.ninja.bak", "./" + directory_out + "/obj/v8_wrappers.ninja")
return
# patch v8 for build ---------------------------------------------------
def patch_windows_debug():
# v8 8.9 version does not built with enable_iterator_debugging flag
# patch heap.h file:
file_patch = "./src/heap/heap.h"
base.copy_file(file_patch, file_patch + ".bak")
content_old = base.readFile(file_patch)
posStart = content_old.find("class StrongRootBlockAllocator {")
posEnd = content_old.find("};", posStart + 1)
posEnd = content_old.find("};", posEnd + 1)
content = content_old[0:posStart]
content += base.readFile("./../../../../../build_tools/scripts/core_common/modules/v8_89.patch")
content += content_old[posEnd + 2:]
base.writeFile(file_patch, content)
return
def unpatch_windows_debug():
file_patch = "./src/heap/heap.h"
base.move_file(file_patch + ".bak", file_patch)
return
# ----------------------------------------------------------------------
def make():
old_env = dict(os.environ)
old_cur = os.getcwd()
@ -58,9 +108,13 @@ def make():
if not base.is_dir(base_dir):
base.create_dir(base_dir)
if ("mac" == base.host_platform()):
base.cmd("git", ["config", "--global", "http.postBuffer", "157286400"], True)
os.chdir(base_dir)
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
change_bootstrap()
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
@ -70,18 +124,33 @@ def make():
if not base.is_dir("v8"):
base.cmd("./depot_tools/fetch", ["v8"], True)
base.copy_dir("./v8/third_party", "./v8/third_party_new")
if ("windows" == base.host_platform()):
os.chdir("v8")
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
os.chdir("../")
base.cmd("./depot_tools/gclient", ["sync", "-r", "remotes/branch-heads/8.9"], True)
v8_branch_version = "remotes/branch-heads/8.9"
if ("mac" == base.host_platform()):
v8_branch_version = "remotes/branch-heads/9.9"
base.cmd("./depot_tools/gclient", ["sync", "-r", v8_branch_version], True)
base.cmd("gclient", ["sync", "--force"], True)
base.copy_dir("./v8/third_party_new/ninja", "./v8/third_party/ninja")
if ("windows" == base.host_platform()):
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
if not base.is_file("v8/src/base/platform/wrappers.cc"):
base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n")
else:
base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "")
if ("mac" == base.host_platform()):
if not base.is_file("v8/build/config/compiler/BUILD.gn.bak"):
base.copy_file("v8/build/config/compiler/BUILD.gn", "v8/build/config/compiler/BUILD.gn.bak")
base.replaceInFile("v8/build/config/compiler/BUILD.gn", "\"-Wloop-analysis\",", "\"-Wloop-analysis\", \"-D_Float16=short\",")
if not base.is_file("v8/third_party/jinja2/tests.py.bak"):
base.copy_file("v8/third_party/jinja2/tests.py", "v8/third_party/jinja2/tests.py.bak")
base.replaceInFile("v8/third_party/jinja2/tests.py", "from collections import Mapping", "try:\n from collections.abc import Mapping\nexcept ImportError:\n from collections import Mapping")
os.chdir("v8")
@ -112,7 +181,9 @@ def make():
if config.check_option("platform", "win_64"):
if (-1 != config.option("config").lower().find("debug")):
if not base.is_file("out.gn/win_64/debug/obj/v8_monolith.lib"):
patch_windows_debug()
ninja_windows_make(gn_args, True, True)
unpatch_windows_debug()
if not base.is_file("out.gn/win_64/release/obj/v8_monolith.lib"):
ninja_windows_make(gn_args)
@ -120,7 +191,9 @@ def make():
if config.check_option("platform", "win_32"):
if (-1 != config.option("config").lower().find("debug")):
if not base.is_file("out.gn/win_32/debug/obj/v8_monolith.lib"):
patch_windows_debug()
ninja_windows_make(gn_args, False, True)
unpatch_windows_debug()
if not base.is_file("out.gn/win_32/release/obj/v8_monolith.lib"):
ninja_windows_make(gn_args, False)

View File

@ -1,16 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import ixwebsocket
import socketrocket
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
def make():
ixwebsocket.make()
socketrocket.make()
return

View File

@ -0,0 +1,16 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
#import ixwebsocket
#import socketrocket
import socket_io
def make():
#ixwebsocket.make()
#socketrocket.make()
socket_io.make()
return

View File

@ -7,6 +7,7 @@ import deploy_builder
import deploy_server
import deploy_core
import deploy_mobile
import deploy_osign
def make():
if config.check_option("module", "desktop"):
@ -19,4 +20,6 @@ def make():
deploy_core.make()
if config.check_option("module", "mobile"):
deploy_mobile.make()
if config.check_option("module", "osign"):
deploy_osign.make()
return

View File

@ -36,23 +36,24 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
if ("ios" == platform):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
if (native_platform == "linux_64"):
base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
# icu
if (0 == platform.find("win")):
@ -76,12 +77,21 @@ def make():
if (0 == platform.find("win")):
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
# python wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
# java wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
# app
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.copy_dir(git_dir + "/document-templates/new/en-US", root_dir + "/empty")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
# js
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", root_dir + "/sdkjs")
base.create_dir(root_dir + "/sdkjs/vendor")
@ -92,22 +102,42 @@ def make():
base.create_dir(root_dir + "/include")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/common_deploy.h", root_dir + "/include/common.h")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.h", root_dir + "/include/docbuilder.h")
if (0 == platform.find("win")):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder_midl.h", root_dir + "/include/docbuilder_midl.h")
base.replaceInFile(root_dir + "/include/docbuilder.h", "Q_DECL_EXPORT", "BUILDING_DOCBUILDER")
if ("win_64" == platform):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/x64/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_64/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_64/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
elif ("win_32" == platform):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/Win32/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_32/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_32/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
# correct ios frameworks
if ("ios" == platform):
base.generate_plist(root_dir)
if (0 == platform.find("linux")):
base.linux_correct_rpath_docbuilder(root_dir)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
base.mac_correct_rpath_docbuilder(root_dir)
base.create_x2t_js_cache(root_dir, "builder")
# delete unnecessary builder files
def delete_files(files):
for file in files:
base.delete_file(file)
delete_files(base.find_files(root_dir, "*.wasm"))
delete_files(base.find_files(root_dir, "*_ie.js"))
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/cmap.bin")
if 0 != platform.find("mac"):
delete_files(base.find_files(root_dir, "sdk-all.js"))
delete_files(base.find_files(root_dir, "sdk-all-min.js"))
return

View File

@ -30,16 +30,18 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", archive_dir + "/sdkjs")
@ -55,11 +57,20 @@ def make():
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allfontsgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allthemesgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "pluginsmanager")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "standardtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2ttester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
if base.is_file(archive_dir + "/core.7z"):
base.delete_file(archive_dir + "/core.7z")
base.archive_folder(archive_dir, archive_dir + "/core.7z")
# js cache
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.create_x2t_js_cache(archive_dir, "core")
base.delete_file(archive_dir + "/DoctRenderer.config")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
return

View File

@ -4,6 +4,19 @@ import config
import base
import os
import platform
import glob
def copy_lib_with_links(src_dir, dst_dir, lib, version):
lib_full_name = lib + "." + version
major_version = version[:version.find(".")]
lib_major_name = lib + "." + major_version
base.copy_file(src_dir + "/" + lib_full_name, dst_dir + "/" + lib_full_name)
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_full_name, "./" + lib_major_name])
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_major_name, "./" + lib])
return
def make():
base_dir = base.get_script_dir() + "/../out"
@ -41,35 +54,37 @@ def make():
platform_postfix = platform + base.qt_dst_postfix()
build_libraries_path = core_build_dir + "/lib/" + platform_postfix
# x2t
base.create_dir(root_dir + "/converter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DocxRenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel_network")
base.copy_lib(build_libraries_path, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(build_libraries_path, root_dir + "/converter", "graphics")
base.copy_lib(build_libraries_path, root_dir + "/converter", "PdfFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DjVuFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "XpsFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlFile2")
base.copy_lib(build_libraries_path, root_dir + "/converter", "Fb2File")
base.copy_lib(build_libraries_path, root_dir + "/converter", "EpubFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "IWorkFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HWPFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DocxRenderer")
if ("ios" == platform):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "x2t")
base.copy_lib(build_libraries_path, root_dir + "/converter", "x2t")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "x2t")
if (native_platform == "linux_64"):
base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
# icu
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/converter/icudt58.dll")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/converter/icuuc58.dll")
base.copy_file(git_dir + "/desktop-apps/common/converter/package.config", root_dir + "/converter/package.config")
#base.copy_file(git_dir + "/desktop-apps/common/converter/package.config", root_dir + "/converter/package.config")
if (0 == platform.find("linux")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/converter/libicudata.so.58")
@ -81,37 +96,49 @@ def make():
# doctrenderer
if isWindowsXP:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir + "/converter", "doctrenderer")
base.copy_lib(build_libraries_path + "/xp", root_dir + "/converter", "doctrenderer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "doctrenderer")
base.copy_v8_files(core_dir, root_dir + "/converter", platform, isWindowsXP)
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop")
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop", "", "../dictionaries")
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
base.copy_dir(git_dir + "/desktop-apps/common/templates", root_dir + "/converter/templates")
# dictionaries
base.create_dir(root_dir + "/dictionaries")
base.copy_dir_content(git_dir + "/dictionaries", root_dir + "/dictionaries", "", ".git")
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries")
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
base.copy_dir(git_dir + "/desktop-apps/common/package/fonts", root_dir + "/fonts")
base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE")
# cef
build_dir_name = "build"
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
build_dir_name = "build_107"
elif (0 == platform.find("mac")) and (config.check_option("config", "use_v8")):
build_dir_name = "build_103"
if not isWindowsXP:
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", root_dir)
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/" + build_dir_name + "/*", root_dir)
else:
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/build/*", root_dir)
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/" + build_dir_name + "/*", root_dir)
isUseQt = True
if (0 == platform.find("mac")) or (0 == platform.find("ios")):
isUseQt = False
# libraries
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "hunspell")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
base.copy_lib(build_libraries_path, root_dir, "hunspell")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
if (0 != platform.find("mac")):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
if (0 == platform.find("mac")):
base.copy_dir(core_build_dir + "/bin/" + platform_postfix + "/editors_helper.app", root_dir + "/editors_helper.app")
@ -123,9 +150,7 @@ def make():
base.qt_copy_lib("Qt5Gui", root_dir)
base.qt_copy_lib("Qt5PrintSupport", root_dir)
base.qt_copy_lib("Qt5Svg", root_dir)
base.qt_copy_lib("Qt5Widgets", root_dir)
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_lib("Qt5Widgets", root_dir)
base.qt_copy_lib("Qt5Network", root_dir)
base.qt_copy_lib("Qt5OpenGL", root_dir)
@ -134,13 +159,17 @@ def make():
base.qt_copy_plugin("imageformats", root_dir)
base.qt_copy_plugin("platforms", root_dir)
base.qt_copy_plugin("platforminputcontexts", root_dir)
base.qt_copy_plugin("printsupport", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("printsupport", root_dir)
base.qt_copy_plugin("platformthemes", root_dir)
base.qt_copy_plugin("xcbglintegrations", root_dir)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("styles", root_dir)
if (0 == platform.find("linux")):
@ -148,61 +177,54 @@ def make():
base.qt_copy_lib("Qt5X11Extras", root_dir)
base.qt_copy_lib("Qt5XcbQpa", root_dir)
base.qt_copy_icu(root_dir)
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
if (0 == platform.find("win")):
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/projicons/" + apps_postfix + "/projicons.exe", root_dir + "/DesktopEditors.exe")
if not isWindowsXP:
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/update-daemon/" + apps_postfix + "/updatesvc.exe", root_dir + "/updatesvc.exe")
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/online-installer/" + apps_postfix + "/online-installer.exe", root_dir + "/online-installer.exe")
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors.exe", root_dir + "/editors.exe")
base.copy_file(git_dir + "/desktop-apps/win-linux/res/icons/desktopeditors.ico", root_dir + "/app.ico")
elif (0 == platform.find("linux")):
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors", root_dir + "/DesktopEditors")
if ("" != base.get_env("VIDEO_PLAYER_VLC_DIR")):
vlc_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc/"
if base.check_congig_option_with_platfom(platform, "libvlc"):
vlc_dir = git_dir + "/core/Common/3dParty/libvlc/build/" + platform + "/lib"
if (0 == platform.find("win")):
base.copy_file(vlc_dir + platform + "/bin/libvlc.dll", root_dir + "/libvlc.dll")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.dll", root_dir + "/libvlccore.dll")
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.dll", root_dir + "/VLCQtCore.dll")
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.dll", root_dir + "/VLCQtWidgets.dll")
else:
base.copy_file(vlc_dir + platform + "/bin/libvlc.so", root_dir + "/libvlc.so")
base.copy_file(vlc_dir + platform + "/bin/libvlc.so.5", root_dir + "/libvlc.so.5")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so", root_dir + "/libvlccore.so")
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so.8", root_dir + "/libvlccore.so.8")
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.so", root_dir + "/VLCQtCore.so")
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.so", root_dir + "/VLCQtWidgets.so")
base.copy_dir(vlc_dir + "/plugins", root_dir + "/plugins")
base.copy_files(vlc_dir + "/*.dll", root_dir)
base.copy_file(vlc_dir + "/vlc-cache-gen.exe", root_dir + "/vlc-cache-gen.exe")
elif (0 == platform.find("linux")):
base.copy_dir(vlc_dir + "/vlc/plugins", root_dir + "/plugins")
base.copy_file(vlc_dir + "/vlc/libcompat.a", root_dir + "/libcompat.a")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_pulse.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_vdpau.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_xcb_events.so", "0.0.0")
copy_lib_with_links(vlc_dir, root_dir, "libvlc.so", "5.6.1")
copy_lib_with_links(vlc_dir, root_dir, "libvlccore.so", "9.0.1")
base.copy_file(vlc_dir + "/vlc/vlc-cache-gen", root_dir + "/vlc-cache-gen")
if isWindowsXP:
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer/xp", root_dir, "videoplayer")
base.copy_lib(build_libraries_path + "/mediaplayer/xp", root_dir, "videoplayer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer", root_dir, "videoplayer")
base.copy_dir(vlc_dir + platform + "/bin/plugins", root_dir + "/plugins")
base.copy_lib(build_libraries_path + "/mediaplayer", root_dir, "videoplayer")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.create_dir(root_dir + "/editors")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
base.delete_file(file)
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
# desktopeditors-help
root_help_dir = root_dir + "-help"
if (base.is_dir(root_help_dir)):
base.delete_dir(root_help_dir)
for i in ["documenteditor", "presentationeditor", "spreadsheeteditor"]:
base.copy_dir(
base_dir + "/js/" + branding + "/desktop/web-apps/apps/%s/main/resources/help" % i,
root_help_dir + "/editors/web-apps/apps/%s/main/resources/help" % i)
if ("1" != config.option("preinstalled-help") and not isWindowsXP):
# remove help from install until web-apps containes help
base.delete_dir(root_dir + "/editors/web-apps/apps/documenteditor/main/resources/help")
base.delete_dir(root_dir + "/editors/web-apps/apps/presentationeditor/main/resources/help")
base.delete_dir(root_dir + "/editors/web-apps/apps/spreadsheeteditor/main/resources/help")
base.create_dir(root_dir + "/editors/sdkjs-plugins")
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
if not isWindowsXP:
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
# remove some default plugins
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
@ -214,14 +236,20 @@ def make():
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", root_dir + "/editors/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(root_dir + "/editors/sdkjs-plugins")
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "manager", True)
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt", root_dir + "/editors/sdkjs-plugins", "advanced2", True)
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/common/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}", root_dir + "/editors/sdkjs-plugins/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}")
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/engine/database/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}", root_dir + "/editors/sdkjs-plugins/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}")
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
base.create_dir(root_dir + "/editors/webext")
base.copy_file(base_dir + "/js/" + branding + "/desktop/noconnect.html", root_dir + "/editors/webext/noconnect.html")
if isWindowsXP:
base.create_dir(root_dir + "/providers")
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers/onlyoffice", root_dir + "/providers/onlyoffice")
else:
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
isUseJSC = False
if (0 == platform.find("mac")):
@ -233,8 +261,9 @@ def make():
if isUseJSC:
base.delete_file(root_dir + "/converter/icudtl.dat")
base.create_x2t_js_cache(root_dir + "/converter", "desktop")
if (0 == platform.find("win")):
base.copy_lib(git_dir + "/desktop-apps/win-linux/3dparty/WinSparkle/" + platform, root_dir, "WinSparkle")
base.delete_file(root_dir + "/cef_sandbox.lib")
base.delete_file(root_dir + "/libcef.lib")

View File

@ -6,6 +6,18 @@ import base
def exclude_arch(directory, frameworks):
for lib in frameworks:
base.cmd("lipo", ["-remove", "arm64", directory + "/" + lib + ".framework/" + lib, "-o", directory + "/" + lib + ".framework/" + lib])
return
def deploy_fonts(git_dir, root_dir, platform=""):
base.create_dir(root_dir + "/fonts")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
if (platform == "android"):
base.copy_dir(git_dir + "/core-fonts/dejavu", root_dir + "/fonts/dejavu")
base.copy_dir(git_dir + "/core-fonts/liberation", root_dir + "/fonts/liberation")
return
def make():
@ -42,28 +54,23 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
if (0 == platform.find("win") or 0 == platform.find("linux") or 0 == platform.find("mac")):
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
if ("ios" == platform) and config.check_option("config", "bundle_dylibs") and config.check_option("config", "simulator"):
exclude_arch(root_dir, ["kernel", "kernel_network", "UnicodeConverter", "graphics", "PdfWriter",
"PdfReader", "DjVuFile", "XpsFile", "HtmlFile2", "HtmlRenderer", "doctrenderer",
"Fb2File", "EpubFile", "x2t"])
# icu
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
@ -88,6 +95,8 @@ def make():
# correct ios frameworks
if ("ios" == platform):
base.generate_plist(root_dir)
deploy_fonts(git_dir, root_dir)
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
@ -101,8 +110,11 @@ def make():
base.create_dir(root_dir)
# js
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
# fonts
deploy_fonts(git_dir, root_dir, "android")
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
# app
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so", libs_dir + "/arm64-v8a")

60
scripts/deploy_osign.py Normal file
View File

@ -0,0 +1,60 @@
#!/usr/bin/env python
import config
import base
def make():
base_dir = base.get_script_dir() + "/../out"
git_dir = base.get_script_dir() + "/../.."
core_dir = git_dir + "/core"
branding = config.branding()
platforms = config.option("platform").split()
for native_platform in platforms:
if not native_platform in config.platforms:
continue
root_dir = base_dir + "/" + native_platform + "/" + branding + "/osign"
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
return
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
qt_dir = base.qt_setup(native_platform)
platform = native_platform
core_build_dir = core_dir + "/build"
if ("" != config.option("branding")):
core_build_dir += ("/" + config.option("branding"))
platform_postfix = platform + base.qt_dst_postfix()
# x2t
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "osign")
# correct ios frameworks
if ("ios" == platform):
base.generate_plist(root_dir)
for native_platform in platforms:
if native_platform == "android":
# make full version
root_dir = base_dir + "/android/" + branding + "/osign"
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/osign/*.so", libs_dir + "/arm64-v8a")
base.create_dir(libs_dir + "/armeabi-v7a")
base.copy_files(base_dir + "/android_armv7/" + branding + "/osign/*.so", libs_dir + "/armeabi-v7a")
base.create_dir(libs_dir + "/x86")
base.copy_files(base_dir + "/android_x86/" + branding + "/osign/*.so", libs_dir + "/x86")
base.create_dir(libs_dir + "/x86_64")
base.copy_files(base_dir + "/android_x86_64/" + branding + "/osign/*.so", libs_dir + "/x86_64")
break
return

View File

@ -5,6 +5,7 @@ import base
import re
import shutil
import glob
from tempfile import mkstemp
def make():
@ -40,24 +41,22 @@ def make():
build_server_dir = root_dir + '/server'
server_dir = base.get_script_dir() + "/../../server"
bin_server_dir = server_dir + "/build/server"
base.create_dir(build_server_dir + '/DocService')
base.copy_dir(bin_server_dir + '/Common/config', build_server_dir + '/Common/config')
base.copy_dir(server_dir + '/Common/config', build_server_dir + '/Common/config')
base.create_dir(build_server_dir + '/DocService')
base.copy_exe(bin_server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.copy_exe(server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.create_dir(build_server_dir + '/FileConverter')
base.copy_exe(bin_server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.copy_exe(server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.create_dir(build_server_dir + '/Metrics')
base.copy_exe(bin_server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(bin_server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.copy_exe(server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.create_dir(build_server_dir + '/Metrics/node_modules/modern-syslog/build/Release')
base.copy_file(bin_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
base.copy_file(server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
qt_dir = base.qt_setup(native_platform)
platform = native_platform
@ -75,22 +74,23 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DocxRenderer")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", converter_dir + "/cmap.bin")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "x2t")
if (native_platform == "linux_64"):
base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server")
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server", "", "../../../dictionaries")
# icu
if (0 == platform.find("win")):
@ -115,37 +115,43 @@ def make():
js_dir = root_dir
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
base.delete_file(file)
base.create_x2t_js_cache(converter_dir, "server")
# add embed worker code
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
# plugins
base.create_dir(js_dir + "/sdkjs-plugins")
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
base.copy_marketplace_plugin(js_dir + "/sdkjs-plugins", False, True)
if ("1" == config.option("preinstalled-plugins")):
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
else:
base.generate_sdkjs_plugin_list(js_dir + "/sdkjs-plugins/plugin-list-default.json")
base.create_dir(js_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", js_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", js_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", js_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(js_dir + "/sdkjs-plugins")
base.clone_marketplace_plugin(root_dir + "/sdkjs-plugins")
# tools
tools_dir = root_dir + "/server/tools"
base.create_dir(tools_dir)
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allfontsgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allthemesgen")
if ("1" != config.option("preinstalled-plugins")):
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "pluginsmanager")
branding_dir = server_dir + "/branding"
if("" != config.option("branding") and "onlyoffice" != config.option("branding")):
branding_dir = git_dir + '/' + config.option("branding") + '/server'
#dictionaries
spellchecker_dictionaries = root_dir + '/dictionaries'
spellchecker_dictionaries_files = server_dir + '/../dictionaries/*_*'
base.create_dir(spellchecker_dictionaries)
base.copy_files(spellchecker_dictionaries_files, spellchecker_dictionaries)
base.copy_dictionaries(server_dir + "/../dictionaries", root_dir + "/dictionaries")
if (0 == platform.find("win")):
exec_ext = '.exe'
@ -204,15 +210,15 @@ def make():
base.delete_dir(root_dir_snap)
base.create_dir(root_dir_snap)
base.copy_dir(root_dir, root_dir_snap)
base.copy_dir(bin_server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(bin_server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(bin_server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.copy_dir(server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.delete_file(root_dir_snap + '/server/DocService/docservice')
base.copy_dir(bin_server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(bin_server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.copy_dir(server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.delete_file(root_dir_snap + '/server/FileConverter/converter')
base.copy_dir(bin_server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(bin_server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
base.copy_dir(server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
if (base.is_dir(root_dir_snap_example)):
base.delete_dir(root_dir_snap_example)
base.create_dir(root_dir_snap_example)

View File

@ -5,9 +5,6 @@ import base
import os
import json
def get_core_url(arch, branch):
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
def make():
git_dir = base.get_script_dir() + "/../.."
old_cur = os.getcwd()
@ -18,16 +15,10 @@ def make():
os.chdir(work_dir)
arch = "x64"
arch2 = "_64"
if ("windows" == base.host_platform()) and not base.host_platform_is64():
arch = "x86"
arch2 = "_32"
url = get_core_url(arch, config.option("branch"))
url = base.get_autobuild_version("core", "", config.option("branch"))
data_url = base.get_file_last_modified_url(url)
if (data_url == "" and config.option("branch") != "develop"):
url = get_core_url(arch, "develop")
url = base.get_autobuild_version("core", "", "develop")
data_url = base.get_file_last_modified_url(url)
old_data_url = base.readFile("./core.7z.data")
@ -49,25 +40,25 @@ def make():
base.extract("./core.7z", "./")
base.writeFile("./core.7z.data", data_url)
platform = ""
if ("windows" == base.host_platform()):
platform = "win" + arch2
else:
platform = base.host_platform() + arch2
base.copy_files("./core/*", "./")
else:
print("-----------------------------------------------------------")
print("Core is up to date. ---------------------------------------")
print("-----------------------------------------------------------")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
if not base.is_dir(git_dir + "/sdkjs-plugins"):
base.create_dir(git_dir + "/sdkjs-plugins")
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
base.create_dir(git_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
base.clone_marketplace_plugin(git_dir + "/sdkjs-plugins")
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
if not base.is_dir(git_dir + "/fonts"):
base.create_dir(git_dir + "/fonts")
@ -101,7 +92,8 @@ def make():
server_addons = []
if (config.option("server-addons") != ""):
server_addons = config.option("server-addons").rsplit(", ")
if ("server-lockstorage" in server_addons):
#server-lockstorage is private
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
server_config["editorDataStorage"] = "editorDataRedis"
sdkjs_addons = []
@ -125,6 +117,8 @@ def make():
sql["type"] = config.option("sql-type")
if (config.option("db-port") != ""):
sql["dbPort"] = config.option("db-port")
if (config.option("db-name") != ""):
sql["dbName"] = config.option("db-name")
if (config.option("db-user") != ""):
sql["dbUser"] = config.option("db-user")
if (config.option("db-pass") != ""):
@ -137,12 +131,17 @@ def make():
#site url
example_config = {}
example_config["port"] = 80
if (base.host_platform() == "linux"):
example_config["port"] = 3000
else:
example_config["port"] = 80
example_config["siteUrl"] = "http://" + config.option("siteUrl") + ":8000/"
example_config["apiUrl"] = "web-apps/apps/api/documents/api.js"
example_config["preloaderUrl"] = "web-apps/apps/api/documents/cache-scripts.html"
json_file = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/local-development-" + base.host_platform() + ".json"
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
json_dir = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/"
json_file = json_dir + "/local-development-" + base.host_platform() + ".json"
if base.is_exist(json_dir):
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
os.chdir(old_cur)
return

View File

@ -66,6 +66,28 @@ class CDependencies:
res += ['--remove-path', item]
return res
def check__docker_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
if (host_platform == 'mac'):
return True
checksResult = CDependencies()
checksResult.append(check_nodejs())
checksResult.append(check_7z())
if (len(checksResult.install) > 0):
install_args = ['install.py']
install_args += checksResult.get_uninstall()
install_args += checksResult.get_removepath()
install_args += checksResult.get_install()
base_dir = base.get_script_dir(__file__)
install_args[0] = './scripts/develop/' + install_args[0]
if (host_platform == 'windows'):
code = libwindows.sudo(unicode(sys.executable), install_args)
elif (host_platform == 'linux'):
get_updates()
base.cmd_in_dir(base_dir + "/../../", 'python', install_args, False)
def check_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
@ -168,21 +190,21 @@ def check_nodejs():
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
print('Installed Node.js version: ' + nodejs_version[1:])
nodejs_min_version = '14.14'
nodejs_min_version = '22'
nodejs_min_version_minor = 0
major_minor_min_version = nodejs_min_version.split('.')
nodejs_min_version_major = int(major_minor_min_version[0])
if len(major_minor_min_version) > 1:
nodejs_min_version_minor = int(major_minor_min_version[1])
nodejs_max_version = '14'
nodejs_max_version = ""
nodejs_max_version_minor = float("inf")
major_minor_max_version = nodejs_max_version.split('.')
nodejs_max_version_major = int(major_minor_max_version[0])
# nodejs_max_version_major = int(major_minor_max_version[0])
nodejs_max_version_major = float("inf")
if len(major_minor_max_version) > 1:
nodejs_max_version_minor = int(major_minor_max_version[1])
if (nodejs_min_version_major > nodejs_cur_version_major or nodejs_cur_version_major > nodejs_max_version_major):
print('Installed Node.js version must be 14.14 to 14.x')
isNeedReinstall = True
elif (nodejs_min_version_major == nodejs_cur_version_major):
if (nodejs_min_version_minor > nodejs_cur_version_minor):
@ -192,7 +214,7 @@ def check_nodejs():
isNeedReinstall = True
if (True == isNeedReinstall):
print('Installed Node.js version must be 14.14 to 14.x')
print('Installed Node.js version must be 22 or higher.')
if (host_platform == 'windows'):
dependence.append_uninstall('Node.js')
dependence.append_install('Node.js')
@ -209,18 +231,24 @@ def check_java():
dependence = CDependencies()
base.print_info('Check installed Java')
java_version = base.run_command('java -version')['stderr']
java_info = base.run_command('java -version')['stderr']
if (java_version.find('64-Bit') != -1):
version_pos = java_info.find('version "')
java_v = 0
if (version_pos != -1):
try:
java_v = float(java_info[version_pos + len('version "'): version_pos + len('version "') + 2])
except:
pass
if (java_info.find('64-Bit') != -1 and java_v >= 11):
print('Installed Java is valid')
return dependence
if (java_version.find('32-Bit') != -1):
print('Installed Java must be x64')
else:
print('Java not found')
dependence.append_install('Java')
else:
print('Requires Java version 11+ x64-bit')
dependence.append_install('Java')
if (version_pos != -1):
dependence.append_uninstall('Java')
return dependence
def get_erlang_path_to_bin():
@ -455,8 +483,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
mysqlPath += 'bin'
return mysqlPath
def get_mysqlLoginSrting():
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
def get_mysqlLoginString():
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
def get_mysqlServersInfo():
arrInfo = []
@ -483,14 +511,14 @@ def get_mysqlServersInfo():
def check_mysqlServer():
base.print_info('Check MySQL Server')
dependence = CDependencies()
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
if (host_platform != 'windows'):
result = os.system(mysqlLoginSrt + ' -e "exit"')
if (result == 0):
connectionResult = base.run_command(connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print('MySQL configuration is valid')
dependence.sqlPath = 'mysql'
return dependence
@ -507,11 +535,13 @@ def check_mysqlServer():
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
if (connectionResult.find('port') != -1 and connectionResult.find(config.option("db-port")) != -1):
print(mysql_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
print(mysql_full_name + 'configuration is not valid')
print(mysql_full_name + 'configuration is not valid:' + connectionResult)
# if path exists, then further removal and installation fails(according to startup statistics). it is better to fix issue manually.
return dependence
print('Valid MySQL Server not found')
dependence.append_uninstall('MySQL Server')
@ -531,23 +561,43 @@ def check_mysqlServer():
return dependence
def check_MySQLConfig(mysqlPath = ''):
result = True
mysqlLoginSrt = get_mysqlLoginSrting()
mysqlLoginSrt = get_mysqlLoginString()
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
print('Database onlyoffice not found')
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
print('Database "' + config.option("db-name") + '" not found')
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
if (not result):
return False
print('Creating ' + config.option("db-name") + ' tables ...')
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
print('Password encryption is not valid')
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
return result
def execMySQLScript(mysql_path_to_bin, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginSrting()
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
mysqlLoginSrt = get_mysqlLoginString()
print('CREATE DATABASE ' + dbName + ';')
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
print('failed CREATE DATABASE ' + dbName + ';')
return False
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# return False
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# return False
return True
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginString()
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
return False
@ -556,7 +606,7 @@ def execMySQLScript(mysql_path_to_bin, scriptPath):
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
print('Setting MySQL password encrypting...')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
if (code != 0):
print('Setting password encryption failed!')
return False
@ -583,7 +633,7 @@ def get_postrgre_path_to_bin(postgrePath = ''):
def get_postgreLoginSrting(userName):
if (host_platform == 'windows'):
return 'psql -U' + userName + ' '
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U' + userName + ' -hlocalhost '
def get_postgreSQLInfoByFlag(flag):
arrInfo = []
@ -619,7 +669,7 @@ def check_postgreSQL():
result = os.system(postgreLoginSrt + ' -c "\q"')
connectionResult = base.run_command(connectionString)['stdout']
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
if (result != 0 or connectionResult.find(config.option("db-port")) == -1):
print('Valid PostgreSQL not found!')
dependence.append_install('PostgreSQL')
dependence.append_uninstall('PostgreSQL')
@ -629,7 +679,7 @@ def check_postgreSQL():
return dependence
arrInfo = get_postgreSQLInfo()
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
for info in arrInfo:
if (base.is_dir(info['Location']) == False):
continue
@ -637,7 +687,7 @@ def check_postgreSQL():
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
if (connectionResult.find(config.option("db-port")) != -1):
print(postgre_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
@ -655,12 +705,12 @@ def check_postgreSQL():
def check_postgreConfig(postgrePath = ''):
result = True
if (host_platform == 'windows'):
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
base.set_env('PGPASSWORD', config.option("db-pass"))
rootUser = install_params['PostgreSQL']['root']
dbUser = install_params['PostgreSQL']['dbUser']
dbName = install_params['PostgreSQL']['dbName']
dbPass = install_params['PostgreSQL']['dbPass']
dbUser = config.option("db-user")
dbName = config.option("db-name")
dbPass = config.option("db-pass")
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
postgreLoginRoot = get_postgreLoginSrting(rootUser)
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
@ -677,7 +727,7 @@ def check_postgreConfig(postgrePath = ''):
base.print_info('Creating ' + dbName + ' user...')
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find(config.option("db-name")) == -1):
print('Database ' + dbName + ' not found')
base.print_info('Creating ' + dbName + ' database...')
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
@ -825,6 +875,7 @@ def installProgram(sName):
print(install_command)
code = os.system(install_command)
base.delete_file(file_name)
elif (host_platform == 'linux'):
if (sName in install_special):
code = install_special[sName]()
@ -855,13 +906,13 @@ def install_gruntcli():
def install_mysqlserver():
if (host_platform == 'windows'):
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
elif (host_platform == 'linux'):
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
code = os.system('sudo apt-get -y install zsh htop') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
return os.system('yes | sudo apt install mysql-server') and code
return 1
@ -883,7 +934,7 @@ def install_postgresql():
file_name = "install.exe"
base.download(download_url, file_name)
base.print_info("Install PostgreSQL...")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
else:
base.print_info("Install PostgreSQL...")
install_command = 'sudo apt install postgresql -y'
@ -894,12 +945,12 @@ def install_postgresql():
if (host_platform == 'windows'):
base.delete_file(file_name)
else:
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
return code
def install_nodejs():
os.system('curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -')
os.system('curl -sSL https://deb.nodesource.com/setup_22.x | sudo -E bash -')
base.print_info("Install node.js...")
install_command = 'yes | sudo apt install nodejs'
print(install_command)
@ -908,8 +959,8 @@ def install_nodejs():
downloads_list = {
'Windows': {
'Git': 'https://github.com/git-for-windows/git/releases/download/v2.29.0.windows.1/Git-2.29.0-64-bit.exe',
'Node.js': 'https://nodejs.org/download/release/v14.17.2/node-v14.17.2-x64.msi',
'Java': 'https://javadl.oracle.com/webapps/download/AutoDL?BundleId=242990_a4634525489241b9a9e1aa73d9e118e6',
'Node.js': 'https://nodejs.org/dist/v22.13.1/node-v22.13.1-x64.msi',
'Java': 'https://aka.ms/download-jdk/microsoft-jdk-11.0.18-windows-x64.msi',
'RabbitMQ': 'https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.9/rabbitmq-server-3.8.9.exe',
'Erlang': 'http://erlang.org/download/otp_win64_23.1.exe',
'VC2019x64': 'https://aka.ms/vs/17/release/vc_redist.x64.exe',
@ -944,23 +995,14 @@ uninstall_special = {
install_params = {
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
'Git': '/VERYSILENT /NORESTART',
'Java': '/s',
'MySQLServer': {
'port': '3306',
'user': 'root',
'pass': 'onlyoffice',
'version': '8.0.21'
},
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
'PostgreSQL': {
'root': 'postgres',
'dbPort': '5432',
'dbName': 'onlyoffice',
'dbUser': 'onlyoffice',
'dbPass': 'onlyoffice'
'root': 'postgres'
}
}
uninstall_params = {
'PostgreSQL': '--mode unattended --unattendedmodeui none'
}

View File

@ -10,11 +10,15 @@ import config_server as develop_config_server
base_dir = base.get_script_dir(__file__)
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
def build_docker_server():
dependence.check__docker_dependencies()
build_develop_server()
def build_docker_sdk_web_apps(dir):
dependence.check__docker_dependencies()
build_js.build_js_develop(dir)
def build_develop_server():
build_server.build_server_develop()
build_js.build_js_develop(base_dir + "/../../..")
develop_config_server.make()
@ -22,5 +26,12 @@ def make():
branding_develop_script_dir = base_dir + "/../../../" + config.option("branding") + "/build_tools/scripts"
if base.is_file(branding_develop_script_dir + "/develop.py"):
base.cmd_in_dir(branding_develop_script_dir, "python", ["develop.py"], True)
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
build_develop_server()
exit(0)

View File

@ -6,6 +6,9 @@ import os
import base
import dependence
import traceback
import develop
base_dir = base.get_script_dir(__file__)
def install_module(path):
base.print_info('Install: ' + path)
@ -41,56 +44,98 @@ def start_linux_services():
os.system('sudo service rabbitmq-server restart')
def run_integration_example():
base.cmd_in_dir('../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
if base.is_exist(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs'):
base.cmd_in_dir(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
def start_linux_services():
base.print_info('Restart MySQL Server')
def update_config(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
def make_start():
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
def make_configure(args):
base.print_info('Build modules')
update_config(args)
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
def make_install():
platform = base.host_platform()
run_integration_example()
base.create_dir(base_dir + '/../../../server/App_Data')
install_module(base_dir + '/../../../server/DocService')
install_module(base_dir + '/../../../server/Common')
install_module(base_dir + '/../../../server/FileConverter')
def make_run():
platform = base.host_platform()
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module(base_dir + '/../../../server/DocService', ['sources/server.js'])
#run_module(base_dir + '/../../../server/DocService', ['sources/gc.js'])
run_module(base_dir + '/../../../server/FileConverter', ['sources/convertermaster.js'])
#run_module(base_dir + '/../../../server/SpellChecker', ['sources/server.js'])
def run_docker_server(args = []):
try:
make_start()
develop.build_docker_server()
make_install()
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def run_docker_sdk_web_apps(dir):
try:
develop.build_docker_sdk_web_apps(dir)
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def make(args = []):
try:
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
make_start()
make_configure(args)
make_install()
make_run()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
base.print_info('Build modules')
if ("linux" == platform):
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
base.cmd_in_dir('../../', 'python', ['make.py'])
run_integration_example()
base.create_dir('../../../server/App_Data')
install_module('../../../server/DocService')
install_module('../../../server/Common')
install_module('../../../server/FileConverter')
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module('../../../server/DocService', ['sources/server.js'])
# run_module('../../../server/DocService', ['sources/gc.js'])
run_module('../../../server/FileConverter', ['sources/convertermaster.js'])
# run_module('../../../server/SpellChecker', ['sources/server.js'])
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)

View File

@ -0,0 +1,162 @@
# license_checker
## Overview
**license_checker** allow you to automatically check
licenses inside specified code files.
## How to use
### Running
**Note**: Pyhton 3.9 and above required
(otherwise `TypeError: 'type' object is not subscriptable`)
* Linux
```bash
python3 license_checker.py
```
* Windows
```bash
python license_checker.py
```
## How to configure
The checker settings are specified in the `config.json`.
The path to the license template is indicated there.
### How to specify a license template
The license template is a plain text
file where the license text is indicated
as you would like to see the license at
the beginning of the file.
### How to configure `config.json`
#### Сonfig parameters
* `basePath` specifies which folder the
paths will be relative to.
**For example:**
```json
"basePath": "../../../"
```
* `reportFolder` specifies in which folder to
save text files with reports.
**For example:**
```json
"reportFolder": "build_tools/scripts/license_checker/reports"
```
* `printChecking` specifies whether to output
information about which file is
being checked to the console.
**For example:**
```json
"printChecking": false
```
* `printReports` specifies whether to output
reports to the console.
**For example:**
```json
"printReports": false
```
* `fix` specifies which categories of reports
should be repaired automatically.
Possible array values:
`"OUTDATED"`,
`"NO_LICENSE"`,
`"INVALID_LICENSE"`,
`"LEN_MISMATCH"`.
**For example:**
```json
"fix": ["OUTDATED", "NO_LICENSE"],
```
Automatically repair files where the license is outdated or not found.
* `configs` license check and repair configurations.
* `dir` folder to check.
**For example:**
```json
"dir": "sdkjs"
```
* `fileExtensions` file extensions to check.
**For example:**
```json
"fileExtensions": [".js"]
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "header.license"
```
* `ignoreListDir` folder paths to ignore.
**For example:**
```json
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
```
* `ignoreListDirName` folder names to ignore.
**For example:**
```json
"ignoreListDirName": [
"node_modules",
"vendor"
]
```
* `ignoreListFile` file paths to ignore.
**For example:**
```json
"ignoreListFile": [
"sdkjs/develop/awesomeFileToIgnore.js",
]
```
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
**For example:**
```json
"ignoreListDir": [
"sdkjs/develop"
],
"allowListFile": [
"sdkjs/develop/awesomeFileToAllow.js",
]
```
Any number of configurations can be
specified, they can overlap
if we need to check
files in the same folder in different ways.

View File

@ -0,0 +1,203 @@
{
"basePath": "../../../",
"reportFolder": "build_tools/scripts/license_checker/reports",
"printChecking": false,
"printReports": false,
"fix": ["OUTDATED"],
"configs": [
{
"dir": "core",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
"licensePath": "header.license",
"ignoreListDir": [
"core/build",
"core/Common/cfcpp/test",
"core/Common/js",
"core/DesktopEditor/agg-2.4",
"core/DesktopEditor/cximage",
"core/DesktopEditor/freetype_names/freetype-2.5.3",
"core/DesktopEditor/freetype-2.5.2",
"core/DesktopEditor/freetype-2.10.4",
"core/DesktopEditor/raster/JBig2",
"core/DesktopEditor/raster/Jp2",
"core/DesktopEditor/xml/libxml2",
"core/DesktopEditor/xmlsec",
"core/DjVuFile/libdjvu",
"core/DjVuFile/wasm",
"core/EpubFile",
"core/OOXML/PPTXFormat/Limit/pri",
"core/Fb2File",
"core/HtmlFile2",
"core/Apple",
"core/HwpFile",
"core/OdfFile/Common/utf8cpp",
"core/OfficeUtils/js/emsdk",
"core/OfficeUtils/src/zlib-1.2.11",
"core/PdfFile/lib",
"core/UnicodeConverter/icubuilds-mac",
"core/UnicodeConverter/icubuilds-win32"
],
"ignoreListDirName": [
"node_modules",
"vendor",
"3dParty"
],
"ignoreListFile": [
"core/Test/CoAuthoring/settings.js",
"core/OdfFile/Projects/Linux/precompiled.h",
"core/MsBinaryFile/Projects/XlsFormatLib/Linux/precompiled.h"
],
"allowListFile": [
"core/DesktopEditor/freetype_names/FontMaps/FontMaps.cpp",
"core/Common/3dParty/openssl/test/main.cpp ",
"core/Common/3dParty/openssl/common/common_openssl.h",
"core/Common/3dParty/openssl/common/common_openssl.cpp"
]
},
{
"dir": "core-ext",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDir": [
"core-ext/AutoTester",
"core-ext/cell_android",
"core-ext/cell_android",
"core-ext/desktop-sdk-private",
"core-ext/docbuilder",
"core-ext/Registration",
"core-ext/slide_android",
"core-ext/test",
"core-ext/word_android",
"core-ext/word_ios"
],
"ignoreListFile": [
"core-ext/native_base/json.hpp",
"core-ext/native_base/android_base/libeditors/src/main/cpp/workaround/swab/swab.h"
]
},
{
"dir": "sdkjs",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs"
],
"ignoreListDirName": [
"node_modules",
"vendor"
],
"ignoreListFile": [
"sdkjs/common/externs/jquery-3.2.js",
"sdkjs/common/externs/socket.io.js",
"sdkjs/common/Native/jquery_native.js",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
},
{
"dir": "sdkjs-forms",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "sdkjs-ooxml",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "web-apps",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor",
"search"
],
"ignoreListDir": [
"web-apps/apps/common/mobile",
"web-apps/apps/common/main/lib/mods",
"web-apps/apps/documenteditor/mobile",
"web-apps/apps/spreadsheeteditor/mobile",
"web-apps/apps/presentationeditor/mobile",
"web-apps/build/plugins/grunt-inline"
],
"ignoreListFile": [
"web-apps/apps/api/documents/api.js",
"web-apps/apps/common/main/lib/core/application.js",
"web-apps/apps/common/main/lib/core/keymaster.js",
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
]
},
{
"dir": "web-apps-mobile",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "server",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"server/FileConverter/bin"
],
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-lockstorage",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license-key",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "editors-ios",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDirName": [
"vendor",
"Vendor",
"3dParty"
],
"allowListFile": [
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.m",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m"
]
}
]
}

View File

@ -0,0 +1,31 @@
/*
* (c) Copyright Ascensio System SIA 2010-2024
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/

View File

@ -0,0 +1,339 @@
import os
import re
import enum
import json
import codecs
CONFIG_PATH = 'config.json'
class ErrorType(enum.Enum):
INVALID_LICENSE = 1
NO_LICENSE = 2
OUTDATED = 3
LEN_MISMATCH = 4
FIX_TYPES = {
'OUTDATED': ErrorType.OUTDATED,
'NO_LICENSE': ErrorType.NO_LICENSE,
'INVALID_LICENSE': ErrorType.INVALID_LICENSE,
'LEN_MISMATCH': ErrorType.LEN_MISMATCH
}
class Config(object):
"""
License checker configuration.
Attributes:
dir: Directory to check.
fileExtensions: file extensions to check.
ignoreListDir: Ignored folder paths.
ignoreListDirName: Ignored folder names.
ignoreListFile: Ignored file paths.
allowListFile: allow file paths.
"""
def __init__(self,
dir: str,
fileExtensions: list[str],
licensePath: str = 'header.license',
allowListFile: list[str] = [],
ignoreListDir: list[str] = [],
ignoreListDirName: list[str] = [],
ignoreListFile: list[str] = []) -> None:
self._dir = dir
self._fileExtensions = fileExtensions
self._allowListFile = allowListFile
self._ignoreListDir = ignoreListDir
self._ignoreListDirName = ignoreListDirName
self._ignoreListFile = ignoreListFile
"""Read license template."""
with open(licensePath, 'r', encoding="utf8") as file:
lines = file.readlines()
if not lines:
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
non_empty_lines = [s for s in lines if not s.isspace()]
self._startMultiComm = non_empty_lines[0]
self._endMultiComm = non_empty_lines[-1]
self._license_lines = lines
def getDir(self) -> str:
return self._dir
def getFileExtensions(self) -> list[str]:
return self._fileExtensions
def getStartMultiComm(self) -> str:
return self._startMultiComm
def getEndMultiComm(self) -> str:
return self._endMultiComm
def getLicense(self) -> list[str]:
return self._license_lines
def getAllowListFile(self) -> list[str]:
return self._allowListFile
def getIgnoreListDir(self) -> list[str]:
return self._ignoreListDir
def getIgnoreListDirName(self) -> list[str]:
return self._ignoreListDirName
def getIgnoreListFile(self) -> list[str]:
return self._ignoreListFile
with open(CONFIG_PATH, 'r') as j:
_json: dict = json.load(j)
BASE_PATH: str = _json.get('basePath') or '../../../'
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
if (_json.get('fix')):
try:
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
except KeyError:
raise Exception(f'KeyError. "fix" cannot process value. It must be an array of strings. Check {CONFIG_PATH}. Possible array values: "OUTDATED", "NO_LICENSE", "INVALID_LICENSE", "LEN_MISMATCH"')
else:
FIX = False
PRINT_CHECKING: bool = _json.get('printChecking')
PRINT_REPORTS: bool = _json.get('printReports')
CONFIGS: list[Config] = []
for i in _json.get('configs'):
CONFIGS.append(Config(**i))
os.chdir(BASE_PATH)
class Error(object):
def __init__(self, errorType: ErrorType) -> None:
self._errorType = errorType
self._errorMessages = {
ErrorType.INVALID_LICENSE: 'Detected license is invalid',
ErrorType.NO_LICENSE: 'The license was not found',
ErrorType.OUTDATED: 'Detected license is outdated',
ErrorType.LEN_MISMATCH: 'Detected license length does not match pattern'
}
def getErrorType(self) -> ErrorType:
return self._errorType
def getErrorMessage(self) -> str:
return self._errorMessages.get(self._errorType)
class Report(object):
def __init__(self, pathToFile: str, error: Error, message:str = '') -> None:
self._pathToFile = pathToFile
self._error = error
self._message = message
def getPathToFile(self) -> str:
return self._pathToFile
def getError(self) -> Error:
return self._error
def getMessage(self) -> str:
return self._message
def report(self) -> str:
return f'{self.getPathToFile()}: {self.getError().getErrorMessage()}. {self.getMessage()}.'
class Checker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._reports: list[Report] = []
def getReports(self):
return self._reports
def _checkLine(self, line: str, prefix: str) -> bool:
"""Checks if a line has a prefix."""
"""Trim to catch invalid license without leading spaces"""
prefix = prefix.lstrip()
if (re.search(re.escape(prefix), line)):
return True
else:
return False
def findLicense(self, lines: list[str]) -> list[str]:
"""Looks for consecutive comments in a list of strings."""
result = []
isStarted = False
for line in lines:
if line == '\n': continue
if (self._checkLine(line=line, prefix=self._config.getStartMultiComm())):
result.append(line)
isStarted = True
elif(self._checkLine(line=line, prefix=self._config.getEndMultiComm())):
result.append(line)
break
elif (isStarted):
result.append(line)
else:
break
return result
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
license = self._config.getLicense()
if len(license) != len(test):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.LEN_MISMATCH),
message=f'Found {len(test)} lines, expected {len(license)}')
invalidLinesCount = 0
lastWrongLine = 0
for i in range(len(license)):
if (license[i] != test[i]):
invalidLinesCount += 1
lastWrongLine = i
if (invalidLinesCount == 1):
r = r'\d\d\d\d'
testDate = re.findall(r, test[lastWrongLine])
licenseDate = re.findall(r, license[lastWrongLine])
if not (testDate and licenseDate):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Something wrong...')
testLastYear = int(testDate[-1])
licenseLastYear = int(licenseDate[-1])
if (testLastYear < licenseLastYear):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.OUTDATED),
message=f'Found date {testLastYear}, expected {licenseLastYear}')
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
elif (invalidLinesCount > 0):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Found {invalidLinesCount} wrong lines out of {len(license)}')
def checkFile(self, pathToFile: str) -> None:
"""Checks a file for a valid license."""
with open(pathToFile, 'r', encoding="utf-8-sig") as file:
test = self.findLicense(lines=file.readlines())
if test:
result = self._checkLicense(test=test, pathToFile=pathToFile)
if result:
self._reports.append(result)
else:
self._reports.append(Report(pathToFile=pathToFile, error=Error(errorType=ErrorType.NO_LICENSE)))
return
class Walker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._checker = Checker(config=self._config)
def getChecker(self):
return self._checker
def getConfig(self):
return self._config
def _getFiles(self) -> list[str]:
result = []
for address, dirs, files in os.walk(self._config.getDir()):
for i in files:
if (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getAllowListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
else:
for i in self._config.getIgnoreListDirName():
if(re.search(re.escape(i), address)):
break
else:
for i in self._config.getIgnoreListDir():
if(re.search(re.escape(os.path.normpath(i)), address)):
break
else:
for i in files:
if not (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getIgnoreListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
return result
def checkFiles(self) -> list[Report]:
files = self._getFiles()
for file in files:
if (PRINT_CHECKING):
print(f'Checking {file}...')
# self._checker.checkFile(file)
try:
self._checker.checkFile(file)
except Exception as e:
print(file)
print(e)
return self._checker.getReports()
class Fixer(object):
def __init__(self, walker: Walker) -> int:
self._walker = walker
self._checker = self._walker.getChecker()
self._config = self._walker.getConfig()
def fix(self):
count = 0
for report in self._checker.getReports():
if ((not FIX and report.getError().getErrorType() == ErrorType.NO_LICENSE) or (report.getError().getErrorType() == ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._addLicense(report.getPathToFile())
count += 1
elif ((not FIX and report.getError().getErrorType() != ErrorType.NO_LICENSE) or (report.getError().getErrorType() != ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._fixLicense(report.getPathToFile())
count += 1
return count
def _addLicense(self, pathToFile: str):
buffer = []
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
with open(pathToFile, 'w', encoding="utf8") as file:
license = self._config.getLicense()
file.writelines(license)
file.write('\n')
file.writelines(buffer)
return
def _fixLicense(self, pathToFile: str):
buffer = []
writeEncoding = "utf8"
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
if buffer and buffer[0].startswith(codecs.decode(codecs.BOM_UTF8)):
writeEncoding = "utf-8-sig"
oldLicense = self._checker.findLicense(buffer)
for i in oldLicense:
buffer.remove(i)
with open(pathToFile, 'w', encoding=writeEncoding) as file:
license = self._config.getLicense()
file.writelines(license)
file.writelines(buffer)
return
walkers: list[Walker] = []
reports: list[Report] = []
def fix(walkers):
count = 0
if FIX:
print(f'Fixing selected files...')
else:
print(f'Fixing all {len(reports)} files...')
for walker in walkers:
fixer = Fixer(walker=walker)
count += fixer.fix()
print(f'Fixed {count} files.')
def writeReports(reports: list[Report]) -> None:
files: dict[str, list[Report]] = dict()
for i in ErrorType:
files[i.name] = []
for i in reports:
files[i.getError().getErrorType().name].append(i)
for i in ErrorType:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
for config in CONFIGS:
walkers.append(Walker(config=config))
print('Checking files...')
for walker in walkers:
reports = reports + walker.checkFiles()
if reports:
if not os.path.exists(REPORT_FOLDER):
os.mkdir(REPORT_FOLDER)
if PRINT_REPORTS:
print('\n'.join(map(lambda report: report.report(), reports)))
print(f'{len(reports)} invalid licenses were found.')
print(f'Saving reports in {REPORT_FOLDER}')
writeReports(reports=reports)
if FIX:
fix(walkers=walkers)
# else:
# choice = str(input(f'Fix it automatically? [Y/N] ')).lower()
# if choice == 'y':
# fix(walkers=walkers)
else:
print('All licenses are ok.')
# os.system('pause')

View File

@ -1,12 +0,0 @@
#!/usr/bin/env python
import package_desktop
import package_server
import package_builder
def make(product):
if product == 'desktop': package_desktop.make()
elif product == 'server': package_server.make()
elif product == 'builder': package_builder.make()
else: exit(1)
return

View File

@ -1,44 +1,88 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from package_utils import *
import package_utils as utils
onlyoffice = True
company_name = 'ONLYOFFICE'
company_name = "ONLYOFFICE"
company_name_l = company_name.lower()
publisher_name = 'Ascensio System SIA'
cert_name = 'Ascensio System SIA'
publisher_name = "Ascensio System SIA"
cert_name = "Ascensio System SIA"
if product == 'desktop':
s3_bucket = "repo-doc-onlyoffice-com"
s3_region = "eu-west-1"
s3_base_url = "https://s3.eu-west-1.amazonaws.com/repo-doc-onlyoffice-com"
if system == 'windows':
build_dir = get_path("desktop-apps/win-linux/package/windows")
# branding_dir = get_path(branding, build_dir)
product_name = 'Desktop Editors'
product_name_s = product_name.replace(' ','')
package_name = company_name + '_' + product_name_s
vcredist_list = ['2022', '2013']
update_changes_list = {
'en': "changes",
'ru': "changes_ru"
if utils.is_windows():
desktop_product_name = "Desktop Editors"
desktop_product_name_s = desktop_product_name.replace(" ","")
desktop_package_name = company_name + "-" + desktop_product_name_s
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
if utils.is_macos():
desktop_package_name = "ONLYOFFICE"
desktop_build_dir = "desktop-apps/macos"
desktop_branding_dir = "desktop-apps/macos"
desktop_updates_dir = "build/update"
desktop_changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
builder_product_name = "Document Builder"
if utils.is_linux():
builder_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "builder/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "builder/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "builder/linux/rhel/"
}
elif system == 'darwin':
build_dir = "desktop-apps/macos"
branding_build_dir = "desktop-apps/macos"
package_name = company_name
updates_dir = "build/update"
changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
update_changes_list = {
'en': "ReleaseNotes",
'ru': "ReleaseNotesRU"
]
desktop_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "desktop/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "desktop/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "desktop/linux/rhel/"
},
{
"make": "rpm-suse",
"src": "rpm-suse/build/RPMS/*/*.rpm",
"dst": "desktop/linux/suse/"
}
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
if product == 'builder':
if system == 'windows':
build_dir = "document-builder-package"
product_name = 'Document Builder'
product_name_s = product_name.replace(' ','')
package_name = company_name + '_' + product_name_s
]
server_make_targets = [
{
"make": "deb",
"src": "deb/*.deb",
"dst": "server/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "server/linux/rhel/"
},
{
"make": "tar",
"src": "*.tar*",
"dst": "server/linux/snap/"
}
]

View File

@ -1,105 +1,180 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from package_utils import *
from package_branding import *
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
if system == 'windows':
utils.log_h1("BUILDER")
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
utils.log("Unsupported host OS")
return
if common.deploy:
make_archive()
if utils.is_windows():
make_windows()
elif system == 'linux':
if 'packages' in targets:
set_cwd(build_dir)
log("Clean")
cmd("make", ["clean"])
log("Build packages")
cmd("make", ["packages"])
else:
exit(1)
elif utils.is_macos():
make_macos()
elif utils.is_linux():
make_linux()
return
#
# Windows
#
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
utils.log_h2("builder archive build")
utils.delete_file("builder.7z")
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("builder archive build", ret)
utils.log_h2("builder archive deploy")
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("builder archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
return
def make_windows():
global package_version, sign, machine, arch, source_dir, base_dir, \
innosetup_file, portable_zip_file, isxdl_file
base_dir = "base"
isxdl_file = "exe/scripts/isxdl/isxdl.dll"
global package_version, arch
utils.set_cwd("document-builder-package")
set_cwd(get_abspath(git_dir, build_dir))
package_version = common.version + "." + common.build
arch = {
"windows_x64": "x64",
"windows_x86": "x86"
}[common.platform]
if 'clean' in targets:
log("\n=== Clean\n")
delete_dir(base_dir)
delete_files(isxdl_file)
delete_files("exe/*.exe")
delete_files("zip/*.zip")
if common.clean:
utils.log_h2("builder clean")
utils.delete_dir("build")
utils.delete_files("exe\\*.exe")
utils.delete_files("zip\\*.msi")
package_version = version + '.' + build
sign = 'sign' in targets
if make_prepare():
make_zip()
make_inno()
else:
utils.set_summary("builder zip build", False)
utils.set_summary("builder inno build", False)
for target in targets:
if not (target.startswith('innosetup') or target.startswith('portable')):
continue
machine = get_platform(target)['machine']
arch = get_platform(target)['arch']
suffix = arch
source_prefix = "win_" + machine
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
log("\n=== Copy arifacts\n")
create_dir(base_dir)
copy_dir_content(source_dir, base_dir + '\\')
if target.startswith('innosetup'):
download_isxdl()
innosetup_file = "exe/%s_%s_%s.exe" % (package_name, package_version, suffix)
make_innosetup()
if target.startswith('portable'):
portable_zip_file = "zip/%s_%s_%s.zip" % (package_name, package_version, suffix)
make_win_portable()
utils.set_cwd(common.workspace_dir)
return
def download_isxdl():
log("\n=== Download isxdl\n")
log("--- " + isxdl_file)
if is_file(isxdl_file):
log("! file exist, skip")
return
create_dir(get_dirname(isxdl_file))
download_file(isxdl_link, isxdl_file)
return
def make_innosetup():
log("\n=== Build innosetup project\n")
iscc_args = [
"/Qp",
"/DVERSION=" + package_version,
"/DARCH=" + arch
def make_prepare():
args = [
"-Version", package_version,
"-Arch", arch
]
if not onlyoffice:
iscc_args.append("/DBRANDING_DIR=" + get_abspath(git_dir, branding, build_dir, "exe"))
if sign:
iscc_args.append("/DSIGN")
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
log("--- " + innosetup_file)
if is_file(innosetup_file):
log("! file exist, skip")
return
set_cwd("exe")
cmd("iscc", iscc_args + ["builder.iss"])
set_cwd("..")
if common.sign:
args += ["-Sign"]
utils.log_h2("builder prepare")
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("builder prepare", ret)
return ret
def make_zip():
args = [
"-Version", package_version,
"-Arch", arch
]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("builder zip build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("builder zip build", ret)
if common.deploy and ret:
utils.log_h2("builder zip deploy")
ret = s3_upload(utils.glob_path("zip/*.zip"), "builder/win/generic/")
utils.set_summary("builder zip deploy", ret)
return
def make_win_portable():
log("\n=== Build portable\n")
log("--- " + portable_zip_file)
if is_file(portable_zip_file):
log("! file exist, skip")
return
cmd("7z", ["a", "-y", portable_zip_file, get_path(base_dir, "*")])
def make_inno():
args = [
"-Version", package_version,
"-Arch", arch
]
if not branding.onlyoffice:
args += ["-Branding", common.branding]
if common.sign:
args += ["-Sign"]
utils.log_h2("builder inno build")
ret = utils.ps1("make_inno.ps1", args, verbose=True)
utils.set_summary("builder inno build", ret)
if common.deploy and ret:
utils.log_h2("builder inno deploy")
ret = s3_upload(utils.glob_path("exe/*.exe"), "builder/win/inno/")
utils.set_summary("builder inno deploy", ret)
return
def make_macos():
utils.set_cwd("document-builder-package")
utils.log_h2("builder tar build")
make_args = ["tar"]
if common.platform == "darwin_arm64":
make_args += ["-e", "UNAME_M=arm64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("builder tar build", ret)
if common.deploy:
utils.log_h2("builder tar deploy")
ret = s3_upload(utils.glob_path("tar/*.tar.xz"), "builder/mac/generic/")
utils.set_summary("builder tar deploy", ret)
utils.set_cwd(common.workspace_dir)
return
def make_linux():
utils.set_cwd("document-builder-package")
utils.log_h2("builder build")
make_args = [t["make"] for t in branding.builder_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("builder build", ret)
if common.deploy:
for t in branding.builder_make_targets:
utils.log_h2("builder " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("builder " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

17
scripts/package_common.py Normal file
View File

@ -0,0 +1,17 @@
#!/usr/bin/env python
platformPrefixes = {
"windows_x64": "win_64",
"windows_x86": "win_32",
"windows_x64_xp": "win_64_xp",
"windows_x86_xp": "win_32_xp",
"darwin_x86_64": "mac_64",
"darwin_arm64": "mac_arm64",
"darwin_x86_64_v8": "mac_64",
"linux_x86_64": "linux_64",
"linux_aarch64": "linux_arm64",
"linux_x86_64_cef": "linux_64",
}
out_dir = "build_tools/out"
tsa_server = "http://timestamp.digicert.com"

93
scripts/package_core.py Normal file
View File

@ -0,0 +1,93 @@
#!/usr/bin/env python
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
utils.log_h1("CORE")
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
utils.log("Unsupported host OS")
return
if common.deploy:
make_archive()
return
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
utils.log_h2("core archive build")
utils.delete_file("core.7z")
args = ["7z", "a", "-y", "core.7z", "./core/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("core archive build", ret)
utils.log_h2("core archive deploy")
dest = "core-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("core archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
return
def deploy_closuremaps_sdkjs(license):
if not common.deploy: return
utils.log_h1("SDKJS CLOSURE MAPS")
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
return
utils.log_h2("sdkjs closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_basename(f)
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
return
def deploy_closuremaps_webapps(license):
if not common.deploy: return
utils.log_h1("WEB-APPS CLOSURE MAPS")
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("web-apps closure maps %s deploy" % license, False)
return
utils.log_h2("web-apps closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
return

View File

@ -2,302 +2,192 @@
# -*- coding: utf-8 -*-
import os
from package_utils import *
from package_branding import *
import re
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
if system == 'windows':
utils.log_h1("DESKTOP")
if utils.is_windows():
make_windows()
elif system == 'darwin':
elif utils.is_macos():
make_macos()
elif system == 'linux':
if 'packages' in targets:
set_cwd(build_dir)
log("Clean")
cmd("make", ["clean"])
log("Build packages")
cmd("make", ["packages"])
elif utils.is_linux():
make_linux()
else:
exit(1)
utils.log("Unsupported host OS")
return
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
#
# Windows
#
def make_windows():
global package_version, sign, machine, arch, xp, iscc_args, \
source_dir, source_help_dir, innosetup_file, innosetup_help_file, \
innosetup_update_file, advinst_file, portable_zip_file
global package_name, package_version, arch, xp, suffix
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
set_cwd(get_abspath(git_dir, build_dir))
package_name = branding.desktop_package_name
package_version = common.version + "." + common.build
arch = {
"windows_x64": "x64",
"windows_x64_xp": "x64",
"windows_x86": "x86",
"windows_x86_xp": "x86"
}[common.platform]
xp = common.platform.endswith("_xp")
suffix = arch + ("-xp" if xp else "")
if 'clean' in targets:
log("\n=== Clean\n")
delete_dir(get_path("data/vcredist"))
delete_dir("DesktopEditors-cache")
delete_files("*.exe")
delete_files("*.msi")
delete_files("*.aic")
delete_files("*.tmp")
delete_files("*.zip")
delete_files(get_path("update/*.exe"))
delete_files(get_path("update/*.xml"))
delete_files(get_path("update/*.html"))
if common.clean:
utils.log_h2("desktop clean")
utils.delete_dir("DesktopEditors-cache")
utils.delete_files("*.exe")
utils.delete_files("*.msi")
utils.delete_files("*.aic")
utils.delete_files("*.tmp")
utils.delete_files("*.zip")
utils.delete_files("data\\*.exe")
package_version = version + '.' + build
sign = 'sign' in targets
for target in targets:
if not (target.startswith('innosetup') or target.startswith('advinst') or
target.startswith('portable')):
continue
machine = get_platform(target)['machine']
arch = get_platform(target)['arch']
xp = get_platform(target)['xp']
suffix = arch + ("_xp" if xp else "")
source_prefix = "win_" + machine + ("_xp" if xp else "")
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
source_help_dir = source_dir + "-help"
if target.startswith('innosetup'):
for year in vcredist_list:
download_vcredist(year)
innosetup_file = "%s_%s_%s.exe" % (package_name, package_version, suffix)
make_innosetup()
if 'winsparkle-update' in targets:
innosetup_update_file = get_path("update/editors_update_%s.exe" % suffix)
make_innosetup_update()
if 'winsparkle-files' in targets:
make_winsparkle_files()
if target.startswith('innosetup-help'):
innosetup_help_file = "%s_Help_%s_%s.exe" % (package_name, package_version, suffix)
make_innosetup_help()
if target.startswith('advinst'):
advinst_file = "%s_%s_%s.msi" % (package_name, package_version, suffix)
make_advinst()
if target.startswith('portable'):
portable_zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix)
make_win_portable()
make_prepare()
make_zip()
make_inno()
make_advinst()
make_online()
utils.set_cwd(common.workspace_dir)
return
def download_vcredist(year):
log("\n=== Download vcredist " + year + "\n")
vcredist = get_path("data/vcredist/vcredist_%s_%s.exe" % (year, arch))
log("--- " + vcredist)
if is_file(vcredist):
log("! file exist, skip")
return
create_dir(get_dirname(vcredist))
download_file(vcredist_links[year][machine], vcredist)
return
def make_innosetup():
log("\n=== Build innosetup project\n")
global iscc_args
iscc_args = [
"/Qp",
"/DsAppVersion=" + package_version,
"/DDEPLOY_PATH=" + source_dir,
"/D_ARCH=" + machine
def make_prepare():
args = [
"-Version", package_version,
"-Arch", arch
]
if onlyoffice:
iscc_args.append("/D_ONLYOFFICE=1")
else:
iscc_args.append("/DsBrandingFolder=" + get_abspath(git_dir, branding_dir))
if xp:
iscc_args.append("/D_WIN_XP=1")
if sign:
iscc_args.append("/DENABLE_SIGNING=1")
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
log("--- " + innosetup_file)
if is_file(innosetup_file):
log("! file exist, skip")
return
cmd("iscc", iscc_args + ["common.iss"])
args += ["-Target", "xp"]
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop prepare")
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("desktop prepare", ret)
return
def make_innosetup_help():
log("\n=== Build innosetup help project\n")
global iscc_args
iscc_args = [
"/Qp",
"/DsAppVersion=" + package_version,
"/DDEPLOY_PATH=" + source_help_dir,
"/D_ARCH=" + machine
def make_zip():
zip_file = "%s-%s-%s.zip" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
if onlyoffice:
iscc_args.append("/D_ONLYOFFICE=1")
else:
iscc_args.append("/DsBrandingFolder=" + get_abspath(git_dir, branding_dir))
if sign:
iscc_args.append("/DENABLE_SIGNING=1")
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
log("--- " + innosetup_help_file)
if is_file(innosetup_help_file):
log("! file exist, skip")
return
cmd("iscc", iscc_args + ["help.iss"])
if xp:
args += ["-Target", "xp"]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("desktop zip build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("desktop zip build", ret)
if common.deploy and ret:
utils.log_h2("desktop zip deploy")
ret = s3_upload([zip_file], "desktop/win/generic/")
utils.set_summary("desktop zip deploy", ret)
return
def make_innosetup_update():
log("\n=== Build innosetup update project\n")
log("--- " + innosetup_update_file)
if is_file(innosetup_update_file):
log("! file exist, skip")
return
cmd("iscc", iscc_args + ["/DTARGET_NAME=" + innosetup_file, "update_common.iss"])
return
def make_winsparkle_files():
log("\n=== Build winsparkle files\n")
awk_branding = "update/branding.awk"
if not onlyoffice:
build_branding_dir = get_abspath(git_dir, branding_dir, "win-linux/package/windows")
else:
build_branding_dir = get_path(".")
awk_args = [
"-v", "Version=" + version,
"-v", "Build=" + build,
"-v", "Branch=" + get_env("RELEASE_BRANCH"),
"-v", "Timestamp=" + timestamp,
"-i", get_path(build_branding_dir, awk_branding)
def make_inno():
inno_file = "%s-%s-%s.exe" % (package_name, package_version, suffix)
inno_sa_file = "%s-Standalone-%s-%s.exe" % (package_name, package_version, suffix)
inno_update_file = "%s-Update-%s-%s.exe" % (package_name, package_version, suffix)
update_wrapper = not (hasattr(branding, 'desktop_updates_skip_iss_wrapper') and branding.desktop_updates_skip_iss_wrapper)
args = [
"-Version", package_version,
"-Arch", arch
]
if common.sign:
args += ["-Sign"]
appcast = get_path("update/appcast.xml")
log("--- " + appcast)
if is_file(appcast):
log("! file exist, skip")
utils.log_h2("desktop inno build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp"], verbose=True)
else:
command = "env LANG=en_US.UTF-8 awk " + \
' '.join(awk_args) + " -f update/appcast.xml.awk"
appcast_result = proc_open(command)
if appcast_result['stderr'] != "":
log("! error: " + appcast_result['stderr'])
write_file(appcast, appcast_result['stdout'])
ret = utils.ps1("make_inno.ps1", args, verbose=True)
utils.set_summary("desktop inno build", ret)
appcast_prod = get_path("update/appcast-prod.xml")
log("--- " + appcast_prod)
if is_file(appcast_prod):
log("! file exist, skip")
else:
command = "env LANG=en_US.UTF-8 awk -v Prod=1 " + \
' '.join(awk_args) + " -f update/appcast.xml.awk"
appcast_result = proc_open(command)
if appcast_result['stderr'] != "":
log("! error: " + appcast_result['stderr'])
write_file(appcast_prod, appcast_result['stdout'])
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone")
ret = utils.ps1("make_inno.ps1", args + ["-Target", "standalone"], verbose=True)
utils.set_summary("desktop inno standalone build", ret)
changes_dir = get_path(build_branding_dir, "update/changes", version)
for lang, base in update_changes_list.items():
changes = get_path("update/" + base + ".html")
if lang == 'en': encoding = 'en_US.UTF-8'
elif lang == 'ru': encoding = 'ru_RU.UTF-8'
log("--- " + changes)
if is_file(changes):
log("! file exist, skip")
if update_wrapper:
utils.log_h2("desktop inno update build")
if xp:
ret = utils.ps1("make_inno.ps1", args + ["-Target", "xp_update"], verbose=True)
else:
command = "env LANG=" + encoding + " awk " + ' '.join(awk_args) + \
" -f update\\changes.html.awk " + changes_dir + "\\" + lang + ".html"
changes_result = proc_open(command)
if changes_result['stderr'] != "":
log("! error: " + changes_result['stderr'])
write_file(changes, changes_result['stdout'])
ret = utils.ps1("make_inno.ps1", args + ["-Target", "update"], verbose=True)
utils.set_summary("desktop inno update build", ret)
if common.deploy:
utils.log_h2("desktop inno deploy")
ret = s3_upload([inno_file], "desktop/win/inno/")
utils.set_summary("desktop inno deploy", ret)
if branding.onlyoffice and not xp:
utils.log_h2("desktop inno standalone deploy")
ret = s3_upload([inno_sa_file], "desktop/win/inno/")
utils.set_summary("desktop inno standalone deploy", ret)
utils.log_h2("desktop inno update deploy")
if utils.is_file(inno_update_file):
ret = s3_upload([inno_update_file], "desktop/win/inno/")
elif utils.is_file(inno_file):
ret = s3_upload([inno_file], "desktop/win/inno/" + inno_update_file)
else:
ret = False
utils.set_summary("desktop inno update deploy", ret)
return
def make_advinst():
log("\n=== Build advanced installer project\n")
log("--- " + advinst_file)
if is_file(advinst_file):
log("! file exist, skip")
if not common.platform in ["windows_x64", "windows_x86"]:
return
if not onlyoffice:
branding_path = get_abspath(git_dir, branding_dir)
copy_dir_content(
branding_path + "\\win-linux\\package\\windows\\data", "data", ".bmp")
copy_dir_content(
branding_path + "\\win-linux\\package\\windows\\data", "data", ".png")
copy_dir_content(
branding_path + "\\win-linux\\extras\\projicons\\res",
"..\\..\\extras\\projicons\\res", ".ico")
copy_file(
branding_path + "\\win-linux\\package\\windows\\dictionary.ail",
"dictionary.ail")
copy_file(
branding_path + "\\common\\package\\license\\eula_" + branding + ".rtf",
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
copy_file(
branding_path + "\\..\\multimedia\\videoplayer\\icons\\" + branding + ".ico",
"..\\..\\extras\\projicons\\res\\media.ico")
copy_file(
branding_path + "\\..\\multimedia\\imageviewer\\icons\\ico\\" + branding + ".ico",
"..\\..\\extras\\projicons\\res\\gallery.ico")
aic_content = [";aic"]
if not sign:
aic_content += [
"ResetSig"
]
if machine == '32':
aic_content += [
"SetPackageType x86",
"SetAppdir -buildname DefaultBuild -path [ProgramFilesFolder][MANUFACTURER_INSTALL_FOLDER]\\[PRODUCT_INSTALL_FOLDER]",
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x64)"',
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x64)"'
]
if machine == '64':
aic_content += [
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x86)"',
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x86)"'
]
if onlyoffice:
aic_content += [
"DelFolder CUSTOM_PATH"
]
else:
aic_content += [
"DelLanguage 1029 -buildname DefaultBuild",
"DelLanguage 1031 -buildname DefaultBuild",
"DelLanguage 1041 -buildname DefaultBuild",
"DelLanguage 1046 -buildname DefaultBuild",
"DelLanguage 2070 -buildname DefaultBuild",
"DelLanguage 1060 -buildname DefaultBuild",
"DelLanguage 1036 -buildname DefaultBuild",
"DelLanguage 3082 -buildname DefaultBuild",
"DelLanguage 1033 -buildname DefaultBuild",
"NewSync CUSTOM_PATH " + source_dir + "\\..\\MediaViewer",
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + source_dir + "\\..\\MediaViewer\\ImageViewer.exe",
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + source_dir + "\\..\\MediaViewer\\VideoPlayer.exe",
"SetProperty ASCC_REG_PREFIX=" + ascc_reg_prefix
]
aic_content += [
"AddOsLc -buildname DefaultBuild -arch " + arch,
"NewSync APPDIR " + source_dir,
"UpdateFile APPDIR\\DesktopEditors.exe " + source_dir + "\\DesktopEditors.exe",
"SetVersion " + package_version,
"SetPackageName " + advinst_file + " -buildname DefaultBuild",
"Rebuild -buildslist DefaultBuild"
advinst_file = "%s-%s-%s.msi" % (package_name, package_version, suffix)
args = [
"-Version", package_version,
"-Arch", arch
]
write_file("DesktopEditors.aic", "\r\n".join(aic_content), 'utf-8-sig')
cmd("AdvancedInstaller.com",
["/execute", "DesktopEditors.aip", "DesktopEditors.aic"])
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop advinst build")
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
utils.set_summary("desktop advinst build", ret)
if common.deploy and ret:
utils.log_h2("desktop advinst deploy")
ret = s3_upload([advinst_file], "desktop/win/advinst/")
utils.set_summary("desktop advinst deploy", ret)
return
def make_win_portable():
log("\n=== Build portable\n")
log("--- " + portable_zip_file)
if is_file(portable_zip_file):
log("! file exist, skip")
def make_online():
if not common.platform in ["windows_x64", "windows_x86"]:
return
cmd("7z", ["a", "-y", portable_zip_file, get_path(source_dir, "*")])
online_file = "%s-%s-%s.exe" % ("OnlineInstaller", package_version, suffix)
ret = utils.is_file(online_file)
utils.set_summary("desktop online installer build", ret)
if common.deploy and ret:
utils.log_h2("desktop online installer deploy")
ret = s3_upload([online_file], "desktop/win/online/")
utils.set_summary("desktop online installer deploy", ret)
return
#
@ -305,105 +195,161 @@ def make_win_portable():
#
def make_macos():
global suffix, lane, scheme
global package_name, build_dir, branding_dir, updates_dir, changes_dir, \
suffix, lane, scheme, released_updates_dir
package_name = branding.desktop_package_name
build_dir = branding.desktop_build_dir
branding_dir = branding.desktop_branding_dir
updates_dir = branding.desktop_updates_dir
changes_dir = branding.desktop_changes_dir
suffix = {
"darwin_x86_64": "x86_64",
"darwin_x86_64_v8": "v8",
"darwin_arm64": "arm"
}[common.platform]
lane = "release_" + suffix
scheme = package_name + "-" + suffix
sparkle_updates = False
set_cwd(git_dir + "/" + branding_build_dir)
utils.set_cwd(branding_dir)
if 'clean' in targets:
log("\n=== Clean\n")
delete_dir(get_env("HOME") + "/Library/Developer/Xcode/Archives")
delete_dir(get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
if common.clean:
utils.log_h2("clean")
utils.delete_dir(utils.get_env("HOME") + "/Library/Developer/Xcode/Archives")
utils.delete_dir(utils.get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
for target in targets:
if not target.startswith('diskimage'):
continue
utils.log_h2("build")
source_dir = "%s/build_tools/out/%s/%s" \
% (common.workspace_dir, common.prefix, branding.company_name)
if branding.onlyoffice:
for path in utils.glob_path(source_dir \
+ "/desktopeditors/editors/web-apps/apps/*/main/resources/help"):
utils.delete_dir(path)
if target.startswith('diskimage'):
if (target == 'diskimage-x86_64'): suffix = 'x86_64'
elif (target == 'diskimage-x86_64-v8'): suffix = 'v8'
elif (target == 'diskimage-arm64'): suffix = 'arm'
else: exit(1)
lane = "release_" + suffix
scheme = package_name + '-' + suffix
if utils.get_env("ARCHIVES_DIR"):
sparkle_updates = True
released_updates_dir = "%s/%s/_updates" % (utils.get_env("ARCHIVES_DIR"), scheme)
plistbuddy = "/usr/libexec/PlistBuddy"
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
% (common.workspace_dir, branding_dir, package_name, suffix)
make_diskimage(target)
appcast = utils.sh_output('%s -c "Print :SUFeedURL" %s' \
% (plistbuddy, plist_path), verbose=True).rstrip()
appcast = released_updates_dir + "/" + appcast[appcast.rfind("/")+1:]
if ('sparkle-updates' in targets):
make_sparkle_updates()
release_version_string = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:shortVersionString\']/text()" ' + appcast,
verbose=True).rstrip()
release_version = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:version\']/text()" ' + appcast,
verbose=True).rstrip()
bundle_version = str(int(release_version) + 1)
help_url = "https://download.onlyoffice.com/install/desktop/editors/help/v" + common.version + "/apps"
utils.sh('%s -c "Set :CFBundleShortVersionString %s" %s' \
% (plistbuddy, common.version, plist_path), verbose=True)
utils.sh('%s -c "Set :CFBundleVersion %s" %s' \
% (plistbuddy, bundle_version, plist_path), verbose=True)
utils.sh('%s -c "Set :ASCBundleBuildNumber %s" %s' \
% (plistbuddy, common.build, plist_path), verbose=True)
utils.sh('%s -c "Add :ASCWebappsHelpUrl string %s" %s' \
% (plistbuddy, help_url, plist_path), verbose=True)
utils.log("RELEASE=" + release_version_string + "(" + release_version + ")" \
+ "\nCURRENT=" + common.version + "(" + bundle_version + ")")
dmg = make_dmg()
if dmg and sparkle_updates:
make_sparkle_updates()
utils.set_cwd(common.workspace_dir)
return
def make_diskimage(target):
log("\n=== Build package " + scheme + "\n")
log("--- build/" + package_name + ".app")
cmd("bundler", ["exec", "fastlane", lane, "skip_git_bump:true"])
return
def make_dmg():
utils.log_h2("desktop dmg build")
utils.log_h3(scheme)
utils.log_h3("build/" + package_name + ".app")
dmg = utils.sh(
"bundler exec fastlane " + lane + " skip_git_bump:true",
verbose=True
)
utils.set_summary("desktop dmg build", dmg)
if common.deploy and dmg:
utils.log_h2("desktop dmg deploy")
ret = s3_upload(
utils.glob_path("build/*.dmg"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop dmg deploy", ret)
utils.log_h2("desktop zip deploy")
ret = s3_upload(
["build/%s-%s.zip" % (scheme, common.version)],
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop zip deploy", ret)
return dmg
def make_sparkle_updates():
log("\n=== Build sparkle updates\n")
utils.log_h2("desktop sparkle files build")
app_version = proc_open("/usr/libexec/PlistBuddy \
-c 'print :CFBundleShortVersionString' \
build/" + package_name + ".app/Contents/Info.plist")['stdout']
zip_filename = scheme + '-' + app_version
zip_filename = scheme + '-' + common.version
macos_zip = "build/" + zip_filename + ".zip"
updates_storage_dir = "%s/%s/_updates" % (get_env('ARCHIVES_DIR'), scheme)
create_dir(updates_dir)
copy_dir_content(updates_storage_dir, updates_dir, ".zip")
# copy_dir_content(updates_storage_dir, updates_dir, ".html")
copy_file(macos_zip, updates_dir)
utils.create_dir(updates_dir)
utils.copy_file(macos_zip, updates_dir)
utils.sh(
"ls -1t " + released_updates_dir + "/*.zip" \
+ " | head -n 3" \
+ " | while read f; do cp -fv \"$f\" " + updates_dir + "/; done",
verbose=True)
if "en" in update_changes_list:
notes_src = "%s/%s/%s.html" % (changes_dir, app_version, update_changes_list["en"])
notes_dst = "%s/%s.html" % (updates_dir, zip_filename)
if is_file(notes_src):
copy_file(notes_src, notes_dst)
cur_date = sh_output("env LC_ALL=en_US.UTF-8 date -u \"+%B %e, %Y\"", verbose=True)
replace_in_file(notes_dst,
r"(<span class=\"releasedate\">).+(</span>)",
"\\1 - " + cur_date + "\\2")
else:
write_file(notes_dst, '<html></html>\n')
for ext in [".html", ".ru.html"]:
changes_src = changes_dir + "/" + common.version + "/changes" + ext
changes_dst = updates_dir + "/" + zip_filename + ext
if not utils.copy_file(changes_src, changes_dst):
utils.write_file(changes_dst, "<!DOCTYPE html>placeholder")
if "ru" in update_changes_list:
notes_src = "%s/%s/%s.html" % (changes_dir, app_version, update_changes_list["ru"])
if update_changes_list["ru"] != "ReleaseNotes":
notes_dst = "%s/%s.ru.html" % (updates_dir, zip_filename)
else:
notes_dst = "%s/%s.html" % (updates_dir, zip_filename)
if is_file(notes_src):
copy_file(notes_src, notes_dst)
cur_date = sh_output("env LC_ALL=ru_RU.UTF-8 date -u \"+%e %B %Y\"", verbose=True)
replace_in_file(notes_dst,
r"(<span class=\"releasedate\">).+(</span>)",
"\\1 - " + cur_date + "\\2")
else:
write_file(notes_dst, '<html></html>\n')
sparkle_base_url = "%s/%s/updates/" % (branding.sparkle_base_url, suffix)
ret = utils.sh(
common.workspace_dir \
+ "/desktop-apps/macos/Vendor/Sparkle/bin/generate_appcast " \
+ updates_dir \
+ " --download-url-prefix " + sparkle_base_url \
+ " --release-notes-url-prefix " + sparkle_base_url,
verbose=True
)
utils.set_summary("desktop sparkle files build", ret)
sparkle_download_url = "%s/%s/updates/" % (sparkle_base_url, suffix)
sparkle_release_notes_url = "%s/%s/updates/changes/%s/" % (sparkle_base_url, suffix, app_version)
cmd(git_dir + "/" + build_dir + "/Vendor/Sparkle/bin/generate_appcast", [
updates_dir,
"--download-url-prefix", sparkle_download_url,
"--release-notes-url-prefix", sparkle_release_notes_url
])
log("\n=== Edit Sparkle appcast links\n")
appcast_url = sparkle_base_url + "/" + suffix
appcast = "%s/%s.xml" % (updates_dir, package_name.lower())
for lang, base in update_changes_list.items():
if base == "ReleaseNotes":
replace_in_file(appcast,
r'(<sparkle:releaseNotesLink>.+/).+(\.html</sparkle:releaseNotesLink>)',
"\\1" + base + "\\2")
else:
replace_in_file(appcast,
r'(<sparkle:releaseNotesLink xml:lang="' + lang + r'">).+(\.html</sparkle:releaseNotesLink>)',
"\\1" + sparkle_release_notes_url + base + "\\2")
log("\n=== Delete unnecessary files\n")
for file in os.listdir(updates_dir):
if (-1 == file.find(app_version)) and (file.endswith(".zip") or
file.endswith(".html")):
delete_file(updates_dir + '/' + file)
if common.deploy:
utils.log_h2("desktop sparkle files deploy")
ret = s3_upload(
utils.glob_path("build/update/*.delta") \
+ utils.glob_path("build/update/*.xml") \
+ utils.glob_path("build/update/*.html"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop sparkle files deploy", ret)
return
#
# Linux
#
def make_linux():
utils.set_cwd("desktop-apps/win-linux/package/linux")
utils.log_h2("desktop build")
make_args = [t["make"] for t in branding.desktop_make_targets]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("desktop build", ret)
if common.deploy:
for t in branding.desktop_make_targets:
utils.log_h2("desktop " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("desktop " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

38
scripts/package_mobile.py Normal file
View File

@ -0,0 +1,38 @@
#!/usr/bin/env python
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
utils.log_h1("MOBILE")
if not utils.is_linux():
utils.log("Unsupported host OS")
return
make_mobile()
return
def make_mobile():
utils.set_cwd("build_tools/out")
zip_file = "build-" + common.version + "-" + common.build + ".zip"
if common.clean:
utils.log_h2("mobile clean")
utils.sh("rm -rfv *.zip", verbose=True)
utils.log_h2("mobile build")
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
utils.set_summary("mobile build", ret)
if common.deploy:
if ret:
utils.log_h2("mobile deploy")
key = "mobile/android/" + zip_file
ret = utils.s3_upload(zip_file, "s3://" + branding.s3_bucket + "/" + key)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("mobile deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -1,25 +1,82 @@
#!/usr/bin/env python3
#!/usr/bin/env python
import base
import os
def make(platform, targets):
base_dir = base.get_script_dir() + "/../out"
git_dir = base.get_script_dir() + "/../.."
package_dir = os.path.abspath(git_dir + "/document-server-package")
if ("windows" == platform) or ("linux" == platform):
if ("packages" in targets):
print("Make clean")
base.cmd_in_dir(package_dir, "make", ["clean"])
print("Make packages")
base.cmd_in_dir(package_dir, "make", ["packages"])
import package_utils as utils
import package_common as common
import package_branding as branding
def make(edition):
utils.log_h1("SERVER (" + edition.upper() + ")")
if utils.is_windows():
make_windows(edition)
elif utils.is_linux():
make_linux(edition)
else:
exit(1)
utils.log("Unsupported host OS")
return
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
def make_windows(edition):
if edition == "enterprise":
product_name = "DocumentServer-EE"
elif edition == "developer":
product_name = "DocumentServer-DE"
else:
product_name = "DocumentServer"
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
ret = utils.cmd("make", "clean", verbose=True)
if edition == "prerequisites":
make_args = ["exe-pr"]
else:
make_args = ["exe", "-e", "PRODUCT_NAME=" + product_name]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
ret &= utils.cmd("make", *make_args, verbose=True)
utils.set_summary("server " + edition + " build", ret)
if common.deploy and ret:
utils.log_h2("server " + edition + " inno deploy")
ret = s3_upload(utils.glob_path("exe/*.exe"), "server/win/inno/")
utils.set_summary("server " + edition + " inno deploy", ret)
utils.set_cwd(common.workspace_dir)
return
def make_linux(edition):
if edition == "enterprise":
product_name = "documentserver-ee"
elif edition == "developer":
product_name = "documentserver-de"
else:
product_name = "documentserver"
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
make_args = [t["make"] for t in branding.server_make_targets]
make_args += ["-e", "PRODUCT_NAME=" + product_name]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("server " + edition + " build", ret)
if common.deploy:
for t in branding.server_make_targets:
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import codecs
import glob
import hashlib
import os
import platform
import re
@ -11,71 +11,84 @@ import shutil
import subprocess
import sys
import time
import base
def parse():
parser = argparse.ArgumentParser(description="Build packages.")
parser.add_argument('-P', '--product', dest='product', type=str,
action='store', help="Defines product")
parser.add_argument('-S', '--system', dest='system', type=str,
action='store', help="Defines system")
parser.add_argument('-R', '--branding', dest='branding', type=str,
action='store', help="Provides branding path")
parser.add_argument('-V', '--version', dest='version', type=str,
action='store', help="Defines version")
parser.add_argument('-B', '--build', dest='build', type=str,
action='store', help="Defines build")
parser.add_argument('-T', '--targets', dest='targets', type=str, nargs='+',
action='store', help="Defines targets")
args = parser.parse_args()
global product, system, targets, version, build, branding, sign, clean
product = args.product
system = args.system if (args.system is not None) else host_platform()
targets = args.targets
version = args.version if (args.version is not None) else get_env('PRODUCT_VERSION', '0.0.0')
build = args.build if (args.build is not None) else get_env('BUILD_NUMBER', '0')
branding = args.branding
return
import package_common as common
def host_platform():
return platform.system().lower()
def log(string, end='\n', bold=False):
if bold:
out = '\033[1m' + string + '\033[0m' + end
else:
out = string + end
sys.stdout.write(out)
def is_windows():
return host_platform() == "windows"
def is_macos():
return host_platform() == "darwin"
def is_linux():
return host_platform() == "linux"
def log(string, end='\n'):
sys.stdout.write(string + end)
sys.stdout.flush()
return
def get_env(name, default=''):
return os.getenv(name, default)
def set_env(name, value):
os.environ[name] = value
def log_h1(string):
line = "#" * (len(string) + 8)
log("\n" + line + "\n### " + string + " ###\n" + line + "\n")
return
def set_cwd(dir):
log("- change working dir: " + dir)
os.chdir(dir)
def log_h2(string):
log("\n### " + string + "\n")
return
def get_path(*paths):
arr = []
for path in paths:
if host_platform() == 'windows':
arr += path.split('/')
else:
arr += [path]
return os.path.join(*arr)
def log_h3(string):
log("# " + string)
return
def get_abspath(*paths):
arr = []
for path in paths:
arr += path.split('/')
return os.path.abspath(os.path.join(*arr))
def log_err(string):
log("!!! " + string)
return
def get_timestamp():
return "%.f" % time.time()
def get_env(key, default=None):
return os.getenv(key, default)
def set_env(key, value):
os.environ[key] = value
return
def get_cwd():
return os.getcwd()
def set_cwd(path, verbose=True):
if verbose:
log("- change working dir:")
log(" path: " + path)
os.chdir(path)
return
def get_path(path):
if is_windows():
return path.replace("/", "\\")
return path
def get_relpath(path, rel_path):
return os.path.relpath(get_path(path), get_path(rel_path))
def get_abspath(path):
return os.path.abspath(get_path(path))
def get_basename(path):
return os.path.basename(path)
def get_dirname(path):
return os.path.dirname(path)
def get_file_size(path):
return os.path.getsize(path)
def get_script_dir(path):
return get_dirname(os.path.realpath(path))
def is_file(path):
return os.path.isfile(path)
@ -88,200 +101,295 @@ def is_exist(path):
return True
return False
def get_dirname(path):
return os.path.dirname(path)
def glob_path(path):
return glob.glob(path)
def create_dir(path):
log("- create dir: " + path)
def glob_file(path):
if glob.glob(path) and is_file(glob.glob(path)[0]):
return glob.glob(path)[0]
return
def get_hash_sha256(path):
if os.path.exists(path):
h = hashlib.sha256()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_sha1(path):
if os.path.exists(path):
h = hashlib.sha1()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_md5(path):
if os.path.exists(path):
h = hashlib.md5()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def create_dir(path, verbose=True):
if verbose:
log("- create_dir:")
log(" path: " + path)
if not is_exist(path):
os.makedirs(path)
else:
log("! dir exist")
log_err("dir exist")
return
def write_file(path, data, encoding='utf-8'):
def write_file(path, data, encoding='utf-8', verbose=True):
if is_file(path):
delete_file(path)
log("- write file: " + path)
if verbose:
log("- write_file:")
log(" path: " + path)
log(" encoding: " + encoding)
log(" data: |\n" + data)
with codecs.open(path, 'w', encoding) as file:
file.write(data)
return
def write_template(src, dst, encoding='utf-8', **kwargs):
template = Template(open(src).read())
if is_file(dst):
os.remove(dst)
log("- write template: " + dst + " < " + src)
with codecs.open(dst, 'w', encoding) as file:
file.write(template.render(**kwargs))
return
def replace_in_file(path, pattern, textReplace, encoding='utf-8'):
log("- replace in file: " + path + \
"\n pattern: " + pattern + \
"\n replace: " + textReplace)
filedata = ""
def replace_in_file(path, pattern, text_replace, encoding='utf-8', verbose=True):
if verbose:
log("- replace_in_file:")
log(" path: " + path)
log(" pattern: " + pattern)
log(" replace: " + text_replace)
log(" encoding: " + encoding)
file_data = ""
with codecs.open(get_path(path), "r", encoding) as file:
filedata = file.read()
filedata = re.sub(pattern, textReplace, filedata)
file_data = file.read()
file_data = re.sub(pattern, text_replace, file_data)
delete_file(path)
with codecs.open(get_path(path), "w", encoding) as file:
file.write(filedata)
file.write(file_data)
return
def copy_file(src, dst):
log("- copy file: " + dst + " < " + src)
def copy_file(src, dst, verbose=True):
if verbose:
log("- copy_file:")
log(" src: " + src)
log(" dst: " + dst)
if is_file(dst):
delete_file(dst)
delete_file(dst, False)
if not is_file(src):
log("! file not exist: " + src)
log_err("file not exist: " + src)
return
return shutil.copy2(get_path(src), get_path(dst))
def copy_files(src, dst, override=True):
log("- copy files: " + dst + " < " + src)
def copy_files(src, dst, override=True, verbose=True):
if verbose:
log("- copy_files:")
log(" src: " + src)
log(" dst: " + dst)
log(" override: " + str(override))
for file in glob.glob(src):
file_name = os.path.basename(file)
if is_file(file):
if override and is_file(dst + "/" + file_name):
delete_file(dst + "/" + file_name)
if not is_file(dst + "/" + file_name):
copy_file(file, dst)
if verbose:
log(file + " : " + get_path(dst))
shutil.copy2(file, get_path(dst))
elif is_dir(file):
if not is_dir(dst + "/" + file_name):
create_dir(dst + "/" + file_name)
copy_files(file + "/*", dst + "/" + file_name, override)
return
def copy_dir(src, dst):
if is_dir(dst):
delete_dir(dst)
try:
shutil.copytree(get_path(src), get_path(dst))
except OSError as e:
log('! Directory not copied. Error: %s' % e)
def copy_dir(src, dst, verbose=True):
if verbose:
log("- copy_dir:")
log(" src: " + src)
log(" dst: " + dst)
shutil.copytree(src, dst)
return
def copy_dir_content(src, dst, filterInclude = "", filterExclude = ""):
log("- copy dir content: " + src + " " + dst + " " + filterInclude + " " + filterExclude)
src_folder = src
if ("/" != src[-1:]):
src_folder += "/"
src_folder += "*"
for file in glob.glob(src_folder):
basename = os.path.basename(file)
if ("" != filterInclude) and (-1 == basename.find(filterInclude)):
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
if verbose:
log("- copy_dir_content:")
log(" src: " + src)
log(" dst: " + dst)
log(" include: " + filter_include)
log(" exclude: " + filter_exclude)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if ("" != filter_include) and (-1 == item.find(filter_include)):
continue
if ("" != filterExclude) and (-1 != basename.find(filterExclude)):
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
continue
if is_file(file):
copy_file(file, dst)
elif is_dir(file):
copy_dir(file, dst + "/" + basename)
if os.path.isdir(s):
shutil.copytree(s, d)
else:
shutil.copy2(s, d)
log(item)
return
def delete_file(path):
log("- delete file: " + path)
def delete_file(path, verbose=True):
if verbose:
log("- delete_file:")
log(" path: " + path)
if not is_file(path):
log("! file not exist")
log_err("file not exist")
return
return os.remove(path)
def delete_dir(path):
log("- delete dir: " + path)
def delete_dir(path, verbose=True):
if verbose:
log("- delete_dir:")
log(" path: " + path)
if not is_dir(path):
log("! dir not exist")
log_err("dir not exist")
return
shutil.rmtree(path, ignore_errors=True)
return
def delete_files(src):
def delete_files(src, verbose=True):
if verbose:
log("- delete_files:")
log(" pattern: " + src)
for path in glob.glob(src):
if verbose:
log(path)
if is_file(path):
delete_file(path)
os.remove(path)
elif is_dir(path):
delete_dir(path)
shutil.rmtree(path, ignore_errors=True)
return
def download_file(url, path):
log("- download file: " + path + " < " + url)
if is_file(path):
os.remove(path)
powershell(["Invoke-WebRequest", url, "-OutFile", path])
def set_summary(target, status):
common.summary.append({target: status})
return
def proc_open(command):
log("- open process: " + command)
popen = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
ret = {'stdout' : '', 'stderr' : ''}
try:
stdout, stderr = popen.communicate()
popen.wait()
ret['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
ret['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
finally:
popen.stdout.close()
popen.stderr.close()
def cmd(*args, **kwargs):
if kwargs.get("verbose"):
log("- cmd:")
log(" command: " + " ".join(args))
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
log_err("creates exist")
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
[i for i in args], stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def cmd(prog, args=[], is_no_errors=False):
log("- cmd: " + prog + " " + ' '.join(args))
ret = 0
if host_platform() == 'windows':
sub_args = args[:]
sub_args.insert(0, get_path(prog))
ret = subprocess.call(sub_args, stderr=subprocess.STDOUT, shell=True)
else:
command = prog
for arg in args:
command += (" \"%s\"" % arg)
ret = subprocess.call(command, stderr=subprocess.STDOUT, shell=True)
if ret != 0 and True != is_no_errors:
sys.exit("! error (" + prog + "): " + str(ret))
def cmd_output(*args, **kwargs):
if kwargs.get("verbose"):
log("- cmd_output:")
log(" command: " + " ".join(args))
return subprocess.check_output(
[i for i in args], stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
def powershell(*args, **kwargs):
if kwargs.get("verbose"):
log("- powershell:")
log(" command: " + " ".join(args))
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return False
args = ["powershell", "-Command"] + [i for i in args]
ret = subprocess.call(
args, stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def powershell(cmd):
log("- pwsh: " + ' '.join(cmd))
ret = subprocess.call(['powershell', '-Command'] + cmd,
stderr=subprocess.STDOUT, shell=True)
if ret != 0:
sys.exit("! error: " + str(ret))
def ps1(file, args=[], **kwargs):
if kwargs.get("verbose"):
log("- ps1: " + file + " " + " ".join(args))
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return True
ret = subprocess.call(
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def sh(command, **kwargs):
if kwargs.get("verbose"):
log("- sh:")
log(" command: " + command)
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
log_err("creates exist")
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
command, stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def sh_output(command, **kwargs):
if kwargs.get("verbose"):
log("- sh output: " + command)
log("- sh_output:")
log(" command: " + command)
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.check_output(
command, stderr=subprocess.STDOUT, shell=True
)
return ret.decode("utf-8").strip()
).decode("utf-8")
log(ret)
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def get_platform(target):
xp = (-1 != target.find('-xp'))
if (-1 != target.find('-x64')):
return {'machine': "64", 'arch': "x64", 'xp': xp}
elif (-1 != target.find('-x86')):
return {'machine': "32", 'arch': "x86", 'xp': xp}
return
def s3_upload(src, dst, **kwargs):
if not is_file(src):
log_err("file not exist: " + src)
return False
metadata = "sha256=" + get_hash_sha256(src) \
+ ",sha1=" + get_hash_sha1(src) \
+ ",md5=" + get_hash_md5(src)
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
args += ["--metadata", metadata, src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
else:
ret = sh(" ".join(args), verbose=True)
return ret
global git_dir, out_dir, tsa_server, vcredist_links
git_dir = get_abspath(get_dirname(__file__), '../..')
out_dir = get_abspath(get_dirname(__file__), '../out')
timestamp = "%.f" % time.time()
tsa_server = "http://timestamp.digicert.com"
vcredist_links = {
'2022': {
'64': "https://aka.ms/vs/17/release/vc_redist.x64.exe",
'32': "https://aka.ms/vs/17/release/vc_redist.x86.exe"
},
'2015': {
'64': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x64.exe",
'32': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x86.exe"
},
'2013': {
'64': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x64.exe",
'32': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x86.exe"
}
}
isxdl_link = "https://raw.githubusercontent.com/jrsoftware/ispack/is-5_6_1/isxdlfiles/isxdl.dll"
def s3_copy(src, dst, **kwargs):
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
args += [src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
else:
ret = sh(" ".join(args), verbose=True)
return ret

150
scripts/qmake.py Normal file
View File

@ -0,0 +1,150 @@
#!/usr/bin/env python
import os
import sys
__dir__name__ = os.path.dirname(__file__)
sys.path.append(__dir__name__ + '/core_common/modules/android')
import base
import config
import android_ndk
import multiprocessing
def get_make_file_suffix(platform):
suffix = platform
if config.check_option("config", "debug"):
suffix += "_debug_"
suffix += config.option("branding")
return suffix
def get_j_num():
if ("0" != config.option("multiprocess")):
return ["-j" + str(multiprocessing.cpu_count())]
return []
def check_support_platform(platform):
qt_dir = base.qt_setup(platform)
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
return False
return True
def make(platform, project, qmake_config_addon="", is_no_errors=False):
# check platform
if not check_support_platform(platform):
print("THIS PLATFORM IS NOT SUPPORTED")
return
old_env = dict(os.environ)
# qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# pro & makefile
file_pro = os.path.abspath(project)
pro_dir = os.path.dirname(file_pro)
if (pro_dir.endswith("/.")):
pro_dir = pro_dir[:-2]
if (pro_dir.endswith("/")):
pro_dir = pro_dir[:-1]
makefile_name = "Makefile." + get_make_file_suffix(platform)
makefile = pro_dir + "/" + makefile_name
stash_file = pro_dir + "/.qmake.stash"
old_cur = os.getcwd()
os.chdir(pro_dir)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
if (base.is_file(makefile)):
base.delete_file(makefile)
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
# setup android env
if (-1 != platform.find("android")):
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
# setup ios env
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
if base.is_file(makefile):
base.delete_file(makefile)
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
clean_params = ["clean", "-f", makefile]
distclean_params = ["distclean", "-f", makefile]
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
qmake_app = qt_dir + "/bin/qmake"
# non windows platform
if not base.is_windows():
if base.is_file(qt_dir + "/onlyoffice_qt.conf"):
build_params.append("-qtconf")
build_params.append(qt_dir + "/onlyoffice_qt.conf")
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd("make", clean_params, True)
base.cmd_and_return_cwd("make", distclean_params, True)
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
else:
config_params_array = base.qt_config_as_param(config_param)
config_params_string = ""
for item in config_params_array:
config_params_string += (" \"" + item + "\"")
qmake_addon_string = " ".join(qmake_addon)
if ("" != qmake_addon_string):
qmake_addon_string = " " + qmake_addon_string
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake " + " ".join(clean_params))
qmake_bat.append("call nmake " + " ".join(distclean_params))
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefile)
base.run_as_bat(qmake_bat, is_no_errors)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
os.chdir(old_cur)
os.environ.clear()
os.environ.update(old_env)
return
def make_all_platforms(project, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
make(platform, project, qmake_config_addon)
return

View File

@ -1,9 +1,11 @@
#!/usr/bin/env python
import os
import shutil
import re
import argparse
def readFile(path):
with open(path, "r") as file:
with open(path, "r", errors='replace') as file:
filedata = file.read()
return filedata
@ -46,12 +48,12 @@ class EditorApi(object):
if -1 != retParam.find("[]"):
isArray = True
retParam = retParam.replace("[]", "")
retType = retParam.replace("|", " ").split(" ")[0]
retType = retParam.replace("|", " ").replace(".", " ").split(" ")[0]
retTypeLower = retType.lower()
retValue = ""
if -1 != retType.find("\""):
retValue = "\"\""
elif "bool" == retTypeLower:
elif "boolean" == retTypeLower or "bool" == retTypeLower:
retValue = "true"
elif "string" == retTypeLower:
retValue = "\"\""
@ -61,6 +63,12 @@ class EditorApi(object):
retValue = "undefined"
elif "null" == retTypeLower:
retValue = "null"
elif "array" == retTypeLower:
retValue = "[]"
elif "base64img" == retTypeLower:
retValue = "base64img"
elif "error" == retTypeLower:
retValue = "undefined"
else:
retValue = "new " + retType + "()"
if isArray:
@ -72,30 +80,42 @@ class EditorApi(object):
rec = rec.replace("\t", "")
rec = rec.replace('\n ', '\n')
indexEndDecoration = rec.find("*/")
indexOfStartPropName = rec.find('Object.defineProperty(')
if indexOfStartPropName != -1:
propName = re.search(r'"([^\"]*)"', rec[indexOfStartPropName:])[0]
else:
propName = None
decoration = "/**" + rec[0:indexEndDecoration + 2]
decoration = decoration.replace("Api\n", "ApiInterface\n")
decoration = decoration.replace("Api ", "ApiInterface ")
decoration = decoration.replace("{Api}", "{ApiInterface}")
decoration = decoration.replace("@return ", "@returns ")
decoration = decoration.replace("@returns {?", "@returns {")
decoration = decoration.replace("?}", "}")
if -1 != decoration.find("@name ApiInterface"):
self.append_record(decoration, "var ApiInterface = function() {};\nvar Api = new ApiInterface();\n", True)
return
code = rec[indexEndDecoration + 2:]
code = code.strip("\t\n\r ")
code = code.replace("=\n", "= ").strip("\t\n\r ")
lines = code.split("\n")
codeCorrect = ""
sFuncName = ""
sMethodName = re.search(r'.prototype.(.*)=', code)
is_found_function = False
addon_for_func = "{}"
if -1 != decoration.find("@return"):
addon_for_func = "{ return null; }"
for line in lines:
line = line.strip("\t\n\r ")
line = line.replace("{", "")
line = line.replace("}", "")
lineWithoutSpaces = line.replace(" ", "")
if not is_found_function and 0 == line.find("function "):
if -1 == decoration.find("@constructor"):
return
codeCorrect += (line + addon_for_func + "\n")
is_found_function = True
if not is_found_function and -1 != line.find(".prototype."):
@ -107,6 +127,20 @@ class EditorApi(object):
codeCorrect += (line + "\n")
codeCorrect = codeCorrect.replace("Api.prototype", "ApiInterface.prototype")
self.append_record(decoration, codeCorrect)
className = codeCorrect[0:codeCorrect.find('.')]
# если свойство определено сразу под методом (без декорации)
if propName is not None and sMethodName is not None:
prop_define = f'{className}.prototype.{propName[1:-1]} = {className}.prototype.{sMethodName.group(1)}();\n'
self.append_record(decoration, prop_define)
#иначе
elif propName is not None:
className = re.search(r'.defineProperty\((.*).prototype', code).group(1).strip()
returnValue = 'undefined' if decoration.find('@return') == -1 else self.getReturnValue(decoration)
if (returnValue != 'undefined'):
returnValue = re.search(r'{ return (.*); }', returnValue).group(1).strip()
prop_define = f'{className}.prototype.{propName[1:-1]} = {returnValue};\n'
self.append_record(decoration, prop_define)
return
def append_record(self, decoration, code, init=False):
@ -147,7 +181,7 @@ class EditorApi(object):
def generate(self):
for file in self.files:
file_content = readFile(file)
file_content = readFile(f'{sdkjs_dir}/{file}')
arrRecords = file_content.split("/**")
arrRecords = arrRecords[1:-1]
for record in arrRecords:
@ -155,8 +189,8 @@ class EditorApi(object):
self.numfile += 1
correctContent = ''.join(self.records)
correctContent += "\n"
os.mkdir('deploy/api_builder/' + self.folder)
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
os.mkdir(args.destination + self.folder)
writeFile(args.destination + self.folder + "/api.js", correctContent)
return
def convert_to_interface(arrFiles, sEditorType):
@ -165,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType):
editor.generate()
return
old_cur = os.getcwd()
os.chdir("../../../sdkjs")
if True == os.path.isdir('deploy/api_builder'):
shutil.rmtree('deploy/api_builder', ignore_errors=True)
os.mkdir('deploy/api_builder')
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)
sdkjs_dir = "../../../sdkjs"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value
)
args = parser.parse_args()
old_cur = os.getcwd()
if True == os.path.isdir(args.destination):
shutil.rmtree(args.destination, ignore_errors=True)
os.mkdir(args.destination)
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)

View File

@ -0,0 +1,80 @@
# Documentation Generation Guide
This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors.
## Requirements
```bash
Node.js v20 and above
Python v3.10 and above
```
## Installation
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
cd build_tools/scripts/sdkjs_common/jsdoc
npm install
```
## Scripts Overview
### `generate_docs_json.py`
This script generates JSON documentation based on the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api`.
### `generate_docs_plugins_json.py`
This script generates JSON documentation based on the `api_plugins.js` files.
- **Usage**:
```bash
python generate_docs_plugins_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `../../../../office-js-api-declarations/office-js-api-plugins`.
### `generate_docs_md.py`
This script generates Markdown documentation from the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `../../../../office-js-api/`.
## Example
To generate JSON documentation with the default output path:
```bash
python generate_docs_json.py /path/to/save/json
```
To generate JSON documentation with the default output path:
```bash
python generate_docs_plugins_json.py /path/to/save/json
```
To generate Markdown documentation and specify a custom output path:
```bash
python generate_docs_md.py /path/to/save/markdown
```
## Notes
- Make sure to have all necessary permissions to run these scripts and write to the specified directories.
- The output directories will be created if they do not exist.

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,216 @@
exports.handlers = {
processingComplete: function(e) {
// array for filtered doclets
let filteredDoclets = [];
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
const classesDocletsMap = {}; // doclets for classes write at the end
let passedClasses = []; // passed classes for current editor
// Remove dublicates doclets
const latestDoclets = {};
e.doclets.forEach(doclet => {
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
latestDoclets[doclet.longname] = doclet;
}
});
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
// check available classess for current editor
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAdd =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' &&
(!isMethod || hasTypeofEditorsTag);
if (shouldAdd) {
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
passedClasses.push(cleanName(doclet.memberof));
}
}
else if (doclet.kind == 'class') {
classesDocletsMap[cleanName(doclet.name)] = doclet;
}
}
// remove unavailave classes in current editor
passedClasses = passedClasses.filter(className => {
const doclet = classesDocletsMap[className];
if (!doclet) {
return true;
}
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
// class is passes if there is no editor tag or the current editor is among the tags
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
return isPassed;
});
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod) {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
continue;
}
// We leave only the necessary fields
doclet.memberof = cleanName(doclet.memberof);
doclet.longname = cleanName(doclet.longname);
doclet.name = cleanName(doclet.name);
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
memberof: cleanName(doclet.memberof),
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
name: doclet.name,
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: doclet.scope,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
see: doclet.see
};
// Add the filtered doclet to the array
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'class') {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.name))) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
augments: doclet.augments || undefined,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see || undefined
};
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'typedef') {
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type
};
filteredDoclets.push(filteredDoclet);
}
}
// Replace doclets with a filtered array
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/cell/api_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,85 @@
exports.handlers = {
processingComplete: function(e) {
const filteredDoclets = [];
function checkNullProps(oDoclet) {
for (let key of Object.keys(oDoclet)) {
if (oDoclet[key] == null) {
delete oDoclet[key];
}
if (typeof(oDoclet[key]) == "object") {
checkNullProps(oDoclet[key]);
}
}
}
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
if (true == doclet.undocumented || doclet.kind == 'package') {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
kind: doclet.kind,
since: doclet.since,
name: doclet.name,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
description: doclet.description,
memberof: doclet.memberof,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
longname: doclet.longname,
scope: doclet.scope,
alias: doclet.alias,
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
see: doclet.see
};
checkNullProps(filteredDoclet)
filteredDoclets.push(filteredDoclet);
}
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs-forms/apiPlugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/slide/api_plugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,16 @@
{
"source": {
"include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -0,0 +1,110 @@
import os
import subprocess
import json
import argparse
import re
import platform
root = '../../../..'
# Configuration files
configs = [
"./config/builder/word.json",
"./config/builder/cell.json",
"./config/builder/slide.json",
"./config/builder/forms.json"
]
editors_maps = {
"word": "CDE",
"cell": "CSE",
"slide": "CPE",
"forms": "CFE"
}
def generate(output_dir, md=False):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command_set_env = "export"
if (platform.system().lower() == "windows"):
command_set_env = "set"
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = f'{root}/' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
if md == True:
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
if md == False:
document_type = editor_name
if "forms" == document_type:
document_type = "pdf"
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation for builder completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default=f"{root}/office-js-api-declarations/office-js-api"
)
args = parser.parse_args()
generate(args.destination)

View File

@ -0,0 +1,399 @@
import os
import json
import re
import shutil
import argparse
import generate_docs_json
# Configuration files
editors = [
"word",
"cell",
"slide",
"forms"
]
missing_examples = []
def load_json(file_path):
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
def write_markdown_file(file_path, content):
with open(file_path, 'w', encoding='utf-8') as md_file:
md_file.write(content)
def remove_js_comments(text):
text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) # single-line
text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) # multi-line
return text.strip()
def correct_description(string):
"""
Cleans up or transforms certain tags in a doclet description:
- <b> => **
- <note>...</note> => 💡 ...
- Provide a default if None.
"""
if string is None:
return 'No description provided.'
# Replace <b> tags with markdown bold
string = re.sub(r'<b>', '**', string)
string = re.sub(r'</b>', '**', string)
# Convert <note>...</note> to a little icon + text
string = re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
return string
def correct_default_value(value, enumerations, classes):
if value is None:
return ''
if value == True:
value = "true"
elif value == False:
value = "false"
else:
value = str(value)
return generate_data_types_markdown([value], enumerations, classes)
def remove_line_breaks(string):
return re.sub(r'[\r\n]+', ' ', string)
# Convert Array.<T> => T[] (including nested arrays).
def convert_jsdoc_array_to_ts(type_str: str) -> str:
"""
Recursively replaces 'Array.<T>' with 'T[]',
handling nested arrays like 'Array.<Array.<string>>' => 'string[][]'.
"""
pattern = re.compile(r'Array\.<([^>]+)>')
while True:
match = pattern.search(type_str)
if not match:
break
inner_type = match.group(1).strip()
# Recursively convert inner parts
inner_type = convert_jsdoc_array_to_ts(inner_type)
# Replace the outer Array.<...> with ...[]
type_str = (
type_str[:match.start()]
+ f"{inner_type}[]"
+ type_str[match.end():]
)
return type_str
def escape_text_outside_code_blocks(markdown: str) -> str:
"""
Splits content by fenced code blocks, escapes MDX-unsafe characters
(<, >, {, }) only in the text outside those code blocks.
"""
# A regex to capture fenced code blocks with ```
parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL)
# Even indices (0, 2, 4, ...) are outside code blocks,
# odd indices (1, 3, 5, ...) are actual code blocks.
for i in range(0, len(parts), 2):
# Only escape in parts outside code blocks
parts[i] = (parts[i]
.replace('<', '&lt;')
.replace('>', '&gt;')
.replace('{', '&#123;')
.replace('}', '&#125;')
)
return "".join(parts)
def get_base_type(ts_type: str) -> str:
"""
Given a TypeScript-like type (e.g. "Drawing[][]"), return the
'base' portion by stripping trailing "[]". For "Drawing[][]",
returns "Drawing". For "Array.<Drawing>", you'd convert it first
to "Drawing[]" then return "Drawing".
"""
while ts_type.endswith('[]'):
ts_type = ts_type[:-2]
return ts_type
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
"""
1) Convert each raw JSDoc type from Array.<T> to T[].
2) Split union types if needed (usually they're provided as separate
elements in 'types' already, but let's be safe).
3) For each type, extract the base type (e.g. "Drawing" from "Drawing[]").
4) If the base type matches an enumeration or class, link the entire
T[]-based string.
5) Join with " | ".
"""
# Convert each raw type from JSDoc to TS
converted = [convert_jsdoc_array_to_ts(t) for t in types] # e.g. ["Drawing[]", "Foo[]", ...]
# For each converted type (like "Drawing[]"), see if the base is in enumerations or classes
def link_if_known(ts_type):
base = get_base_type(ts_type) # e.g. "Drawing" from "Drawing[]"
# Check enumerations first
for enum in enumerations:
if enum['name'] == base:
# Replace the entire token with a link
return f"[{ts_type}]({root}Enumeration/{base}.md)"
# Check classes
if base in classes:
return f"[{ts_type}]({root}{base}/{base}.md)"
# Otherwise just return as-is
return ts_type
# Build final list of possibly-linked types
linked = [link_if_known(ts_t) for ts_t in converted]
# Join them with " | "
param_types_md = ' | '.join(linked)
# If there's still leftover angle brackets for generics, gently escape or link them
# e.g. "Object.<string, number>" => "Object.&lt;string, number&gt;"
# or do more specialized linking if you want to handle them deeper.
def replace_leftover_generics(match):
element = match.group(1).strip()
return f"&lt;{element}&gt;"
param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md)
return param_types_md
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
content += generate_properties_markdown(properties, enumerations, classes)
content += "## Methods\n\n"
for method in methods:
method_name = method['name']
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
# Escape just before returning
return escape_text_outside_code_blocks(content)
def generate_method_markdown(method, enumerations, classes):
method_name = method['name']
description = method.get('description', 'No description provided.')
description = correct_description(description)
params = method.get('params', [])
returns = method.get('returns', [])
example = method.get('example', '')
memberof = method.get('memberof', '')
content = f"# {method_name}\n\n{description}\n\n"
# Syntax
param_list = ', '.join([param['name'] for param in params]) if params else ''
content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n"
if memberof:
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
# Parameters
content += "## Parameters\n\n"
if params:
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
for param in params:
param_name = param.get('name', 'Unnamed')
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
param_required = "Required" if not param.get('optional') else "Optional"
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
else:
content += "This method doesn't have any parameters.\n"
# Returns
content += "\n## Returns\n\n"
if returns:
return_type_list = returns[0].get('type', {}).get('names', [])
return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes)
content += return_type_md
else:
content += "This method doesn't return any data."
# Example
if example:
# Separate comment and code, remove JS comments
if '```js' in example:
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
else:
# If there's no triple-backtick structure, just show it as code
cleaned_example = remove_js_comments(example)
content += f"\n\n## Example\n\n```javascript\n{cleaned_example}\n```\n"
return escape_text_outside_code_blocks(content)
def generate_properties_markdown(properties, enumerations, classes, root='../'):
if properties is None:
return ''
content = "## Properties\n\n"
content += "| Name | Type | Description |\n"
content += "| ---- | ---- | ----------- |\n"
for prop in properties:
prop_name = prop['name']
prop_description = prop.get('description', 'No description provided.')
prop_description = remove_line_breaks(correct_description(prop_description))
prop_types = prop['type']['names'] if prop.get('type') else []
param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root)
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
# Escape outside code blocks
return escape_text_outside_code_blocks(content)
def generate_enumeration_markdown(enumeration, enumerations, classes):
enum_name = enumeration['name']
description = enumeration.get('description', 'No description provided.')
description = correct_description(description)
example = enumeration.get('example', '')
content = f"# {enum_name}\n\n{description}\n\n"
ptype = enumeration['type']['parsedType']
if ptype['type'] == 'TypeUnion':
enum_empty = True # is empty enum
content += "## Type\n\nEnumeration\n\n"
content += "## Values\n\n"
# Each top-level name in the union
for raw_t in enumeration['type']['names']:
ts_t = convert_jsdoc_array_to_ts(raw_t)
# Attempt linking: we compare the raw type to enumerations/classes
if any(enum['name'] == raw_t for enum in enumerations):
content += f"- [{ts_t}](../Enumeration/{raw_t}.md)\n"
enum_empty = False
elif raw_t in classes:
content += f"- [{ts_t}](../{raw_t}/{raw_t}.md)\n"
enum_empty = False
elif ts_t.find('Api') == -1:
content += f"- {ts_t}\n"
enum_empty = False
if enum_empty == True:
return None
elif enumeration['properties'] is not None:
content += "## Type\n\nObject\n\n"
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
else:
content += "## Type\n\n"
# If it's not a union and has no properties, simply print the type(s).
types = enumeration['type']['names']
t_md = generate_data_types_markdown(types, enumerations, classes)
content += t_md + "\n\n"
# Example
if example:
if '```js' in example:
comment, code = example.split('```js', 1)
comment = remove_js_comments(comment)
content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n"
else:
# If there's no triple-backtick structure
cleaned_example = remove_js_comments(example)
content += f"\n\n## Example\n\n```javascript\n{cleaned_example}\n```\n"
return escape_text_outside_code_blocks(content)
def process_doclets(data, output_dir, editor_name):
classes = {}
classes_props = {}
enumerations = []
editor_dir = os.path.join(output_dir, editor_name)
for doclet in data:
if doclet['kind'] == 'class':
class_name = doclet['name']
classes[class_name] = []
classes_props[class_name] = doclet.get('properties', None)
elif doclet['kind'] == 'function':
class_name = doclet.get('memberof')
if class_name:
if class_name not in classes:
classes[class_name] = []
classes[class_name].append(doclet)
elif doclet['kind'] == 'typedef':
enumerations.append(doclet)
# Process classes
for class_name, methods in classes.items():
class_dir = os.path.join(editor_dir, class_name)
methods_dir = os.path.join(class_dir, 'Methods')
os.makedirs(methods_dir, exist_ok=True)
# Write class file
class_content = generate_class_markdown(
class_name,
methods,
classes_props[class_name],
enumerations,
classes
)
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
# Write method files
for method in methods:
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
method_content = generate_method_markdown(method, enumerations, classes)
write_markdown_file(method_file_path, method_content)
if not method.get('example', ''):
missing_examples.append(os.path.relpath(method_file_path, output_dir))
# Process enumerations
enum_dir = os.path.join(editor_dir, 'Enumeration')
os.makedirs(enum_dir, exist_ok=True)
for enum in enumerations:
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
if enum_content is None:
continue
write_markdown_file(enum_file_path, enum_content)
if not enum.get('example', ''):
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
def generate(output_dir):
print('Generating Markdown documentation...')
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
for editor_name in editors:
input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json")
shutil.rmtree(output_dir + f'/{editor_name.title()}')
os.makedirs(output_dir + f'/{editor_name.title()}')
data = load_json(input_file)
process_doclets(data, output_dir, editor_name.title())
shutil.rmtree(output_dir + 'tmp_json')
print('Done')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../office-js-api/" # Default value
)
args = parser.parse_args()
generate(args.destination)
print("START_MISSING_EXAMPLES")
print(",".join(missing_examples))
print("END_MISSING_EXAMPLES")

View File

@ -0,0 +1,111 @@
import os
import subprocess
import json
import argparse
import re
# Configuration files
configs = [
"./config/plugins/common.json",
"./config/plugins/word.json",
"./config/plugins/cell.json",
"./config/plugins/slide.json",
"./config/plugins/forms.json"
]
root = '../../../..'
def generate(output_dir, md=False):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command = f"npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
common_doclets_file = os.path.join(output_dir, 'common.json')
with open(common_doclets_file, 'r', encoding='utf-8') as f:
common_doclets_json = json.dumps(json.load(f))
os.remove(common_doclets_file)
# Append examples to JSON documentation
for config in configs:
if (config.find('common') != -1):
continue
editor_name = config.split('/')[-1].replace('.json', '')
example_folder_name = editor_name # name of folder with examples
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
start_common_doclet_idx = len(data)
data += json.loads(common_doclets_json)
# Modify JSON data
for idx, doclet in enumerate(data):
if idx == start_common_doclet_idx:
example_folder_name = 'common'
elif editor_name == 'forms':
example_folder_name = 'word'
if 'see' in doclet:
if doclet['see'] is not None:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', example_folder_name.title())
file_path = f'{root}/' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
doclet['examples'] = [remove_js_comments(comment) + code_content]
if md == False:
document_type = editor_name
if "forms" == document_type:
document_type = "pdf"
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation for builder completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default=f"{root}/office-js-api-declarations/office-js-api-plugins"
)
args = parser.parse_args()
generate(args.destination)

View File

@ -0,0 +1,457 @@
import os
import json
import re
import shutil
import argparse
import generate_docs_plugins_json
# Configuration files
editors = [
"word",
"cell",
"slide",
"forms"
]
missing_examples = []
def load_json(file_path):
with open(file_path, 'r', encoding='utf-8') as f:
return json.load(f)
def write_markdown_file(file_path, content):
with open(file_path, 'w', encoding='utf-8') as md_file:
md_file.write(content)
def remove_js_comments(text):
text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) # single-line
text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) # multi-line
return text.strip()
def correct_description(string):
"""
Cleans up or transforms certain tags in a doclet description:
- <b> => **
- <note>...</note> => 💡 ...
- Provide a default if None.
"""
if string is None:
return 'No description provided.'
# Replace <b> tags with markdown bold
string = re.sub(r'<b>', '**', string)
string = re.sub(r'</b>', '**', string)
# Convert <note>...</note> to a little icon + text
string = re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
return string
def correct_default_value(value, enumerations, classes):
if value is None:
return ''
if value == True:
value = "true"
elif value == False:
value = "false"
else:
value = str(value)
return generate_data_types_markdown([value], enumerations, classes)
def remove_line_breaks(string):
return re.sub(r'[\r\n]+', ' ', string)
# Convert Array.<T> => T[] (including nested arrays).
def convert_jsdoc_array_to_ts(type_str: str) -> str:
"""
Recursively replaces 'Array.<T>' with 'T[]',
handling nested arrays like 'Array.<Array.<string>>' => 'string[][]'.
"""
pattern = re.compile(r'Array\.<([^>]+)>')
while True:
match = pattern.search(type_str)
if not match:
break
inner_type = match.group(1).strip()
# Recursively convert inner parts
inner_type = convert_jsdoc_array_to_ts(inner_type)
# Replace the outer Array.<...> with ...[]
type_str = (
type_str[:match.start()]
+ f"{inner_type}[]"
+ type_str[match.end():]
)
return type_str
def escape_text_outside_code_blocks(markdown: str) -> str:
"""
Splits content by fenced code blocks, escapes MDX-unsafe characters
(<, >, {, }) only in the text outside those code blocks.
"""
# A regex to capture fenced code blocks with ```
parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL)
# Even indices (0, 2, 4, ...) are outside code blocks,
# odd indices (1, 3, 5, ...) are actual code blocks.
for i in range(0, len(parts), 2):
# Only escape in parts outside code blocks
parts[i] = (parts[i]
.replace('<', '&lt;')
.replace('>', '&gt;')
.replace('{', '&#123;')
.replace('}', '&#125;')
)
return "".join(parts)
def get_base_type(ts_type: str) -> str:
"""
Given a TypeScript-like type (e.g. "Drawing[][]"), return the
'base' portion by stripping trailing "[]". For "Drawing[][]",
returns "Drawing". For "Array.<Drawing>", you'd convert it first
to "Drawing[]" then return "Drawing".
"""
while ts_type.endswith('[]'):
ts_type = ts_type[:-2]
return ts_type
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
"""
1) Convert each raw JSDoc type from Array.<T> to T[].
2) Split union types if needed (usually they're provided as separate
elements in 'types' already, but let's be safe).
3) For each type, extract the base type (e.g. "Drawing" from "Drawing[]").
4) If the base type matches an enumeration or class, link the entire
T[]-based string.
5) Join with " | ".
"""
# Convert each raw type from JSDoc to TS
converted = [convert_jsdoc_array_to_ts(t) for t in types] # e.g. ["Drawing[]", "Foo[]", ...]
# For each converted type (like "Drawing[]"), see if the base is in enumerations or classes
def link_if_known(ts_type):
base = get_base_type(ts_type) # e.g. "Drawing" from "Drawing[]"
# Check enumerations first
for enum in enumerations:
if enum['name'] == base:
# Replace the entire token with a link
return f"[{ts_type}]({root}Enumeration/{base}.md)"
# Check classes
if base in classes:
return f"[{ts_type}]({root}{base}/{base}.md)"
# Otherwise just return as-is
return ts_type
# Build final list of possibly-linked types
linked = [link_if_known(ts_t) for ts_t in converted]
# Join them with " | "
param_types_md = ' | '.join(linked)
# If there's still leftover angle brackets for generics, gently escape or link them
# e.g. "Object.<string, number>" => "Object.&lt;string, number&gt;"
# or do more specialized linking if you want to handle them deeper.
def replace_leftover_generics(match):
element = match.group(1).strip()
return f"&lt;{element}&gt;"
param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md)
return param_types_md
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n"
content += generate_properties_markdown(properties, enumerations, classes)
content += "## Methods\n\n"
for method in methods:
method_name = method['name']
content += f"- [{method_name}](./Methods/{method_name}.md)\n"
# Escape just before returning
return escape_text_outside_code_blocks(content)
def generate_method_markdown(method, enumerations, classes):
"""
Generates Markdown for a method doclet, relying only on `method['examples']`
(array of strings). Ignores any single `method['example']` field.
"""
method_name = method['name']
description = method.get('description', 'No description provided.')
description = correct_description(description)
params = method.get('params', [])
returns = method.get('returns', [])
memberof = method.get('memberof', '')
# Use the 'examples' array only
examples = method.get('examples', [])
content = f"# {method_name}\n\n{description}\n\n"
# Syntax
param_list = ', '.join([param['name'] for param in params]) if params else ''
content += f"## Syntax\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n"
if memberof:
content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n"
# Parameters
content += "## Parameters\n\n"
if params:
content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n"
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
for param in params:
param_name = param.get('name', 'Unnamed')
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.')))
param_required = "Required" if not param.get('optional') else "Optional"
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
else:
content += "This method doesn't have any parameters.\n"
# Returns
content += "\n## Returns\n\n"
if returns:
return_type_list = returns[0].get('type', {}).get('names', [])
return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes)
content += return_type_md
else:
content += "This method doesn't return any data."
# Process examples array
if examples:
if len(examples) > 1:
content += "\n\n## Examples\n\n"
else:
content += "\n\n## Example\n\n"
for i, ex_line in enumerate(examples, start=1):
# Remove JS comments
cleaned_example = remove_js_comments(ex_line).strip()
# Attempt splitting if the user used ```js
if '```js' in cleaned_example:
comment, code = cleaned_example.split('```js', 1)
comment = comment.strip()
code = code.strip()
if len(examples) > 1:
content += f"**Example {i}:**\n\n{comment}\n\n"
content += f"```javascript\n{code}\n```\n"
else:
if len(examples) > 1:
content += f"**Example {i}:**\n\n{comment}\n\n"
# No special fences, just show as code
content += f"```javascript\n{cleaned_example}\n```\n"
return escape_text_outside_code_blocks(content)
def generate_properties_markdown(properties, enumerations, classes, root='../'):
if properties is None:
return ''
content = "## Properties\n\n"
content += "| Name | Type | Description |\n"
content += "| ---- | ---- | ----------- |\n"
for prop in properties:
prop_name = prop['name']
prop_description = prop.get('description', 'No description provided.')
prop_description = remove_line_breaks(correct_description(prop_description))
prop_types = prop['type']['names'] if prop.get('type') else []
param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root)
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
# Escape outside code blocks
return escape_text_outside_code_blocks(content)
def generate_enumeration_markdown(enumeration, enumerations, classes):
"""
Generates Markdown documentation for a 'typedef' doclet.
This version only works with `enumeration['examples']` (an array of strings),
ignoring any single `enumeration['examples']` field.
"""
enum_name = enumeration['name']
description = enumeration.get('description', 'No description provided.')
description = correct_description(description)
# Only use the 'examples' array
examples = enumeration.get('examples', [])
content = f"# {enum_name}\n\n{description}\n\n"
parsed_type = enumeration['type'].get('parsedType')
if not parsed_type:
# If parsedType is missing, just list 'type.names' if available
type_names = enumeration['type'].get('names', [])
if type_names:
content += "## Type\n\n"
t_md = generate_data_types_markdown(type_names, enumerations, classes)
content += t_md + "\n\n"
else:
ptype = parsed_type['type']
# 1) Handle TypeUnion
if ptype == 'TypeUnion':
content += "## Type\n\nEnumeration\n\n"
content += "## Values\n\n"
for raw_t in enumeration['type']['names']:
# Attempt linking
if any(enum['name'] == raw_t for enum in enumerations):
content += f"- [{raw_t}](../Enumeration/{raw_t}.md)\n"
elif raw_t in classes:
content += f"- [{raw_t}](../{raw_t}/{raw_t}.md)\n"
else:
content += f"- {raw_t}\n"
# 2) Handle TypeApplication (e.g. Object.<string, string>)
elif ptype == 'TypeApplication':
content += "## Type\n\nObject\n\n"
type_names = enumeration['type'].get('names', [])
if type_names:
t_md = generate_data_types_markdown(type_names, enumerations, classes)
content += f"**Type:** {t_md}\n\n"
# 3) If properties are present, treat it like an object
if enumeration.get('properties') is not None:
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
# 4) If it's neither TypeUnion nor TypeApplication, just output the type names
if ptype not in ('TypeUnion', 'TypeApplication'):
type_names = enumeration['type'].get('names', [])
if type_names:
content += "## Type\n\n"
t_md = generate_data_types_markdown(type_names, enumerations, classes)
content += t_md + "\n\n"
# Process examples array
if examples:
if len(examples) > 1:
content += "\n\n## Examples\n\n"
else:
content += "\n\n## Example\n\n"
for i, ex_line in enumerate(examples, start=1):
# Remove JS comments
cleaned_example = remove_js_comments(ex_line).strip()
# Attempt splitting if the user used ```js
if '```js' in cleaned_example:
comment, code = cleaned_example.split('```js', 1)
comment = comment.strip()
code = code.strip()
if len(examples) > 1:
content += f"**Example {i}:**\n\n{comment}\n\n"
content += f"```javascript\n{code}\n```\n"
else:
if len(examples) > 1:
content += f"**Example {i}:**\n\n{comment}\n\n"
# No special fences, just show as code
content += f"```javascript\n{cleaned_example}\n```\n"
return escape_text_outside_code_blocks(content)
def process_doclets(data, output_dir, editor_name):
classes = {}
classes_props = {}
enumerations = []
editor_dir = os.path.join(output_dir, editor_name)
for doclet in data:
if doclet['kind'] == 'class':
class_name = doclet['name']
classes[class_name] = []
classes_props[class_name] = doclet.get('properties', None)
elif doclet['kind'] == 'function':
class_name = doclet.get('memberof')
if class_name:
if class_name not in classes:
classes[class_name] = []
classes[class_name].append(doclet)
elif doclet['kind'] == 'typedef':
enumerations.append(doclet)
# Process classes
for class_name, methods in classes.items():
class_dir = os.path.join(editor_dir, class_name)
methods_dir = os.path.join(class_dir, 'Methods')
os.makedirs(methods_dir, exist_ok=True)
# Write class file
class_content = generate_class_markdown(
class_name,
methods,
classes_props[class_name],
enumerations,
classes
)
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
# Write method files
for method in methods:
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
method_content = generate_method_markdown(method, enumerations, classes)
write_markdown_file(method_file_path, method_content)
if not method.get('examples', ''):
missing_examples.append(os.path.relpath(method_file_path, output_dir))
# Process enumerations
enum_dir = os.path.join(editor_dir, 'Enumeration')
os.makedirs(enum_dir, exist_ok=True)
for enum in enumerations:
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
enum_content = generate_enumeration_markdown(enum, enumerations, classes)
if enum_content is None:
continue
write_markdown_file(enum_file_path, enum_content)
if not enum.get('examples', ''):
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
def generate(output_dir):
print('Generating Markdown documentation...')
if output_dir[-1] == '/':
output_dir = output_dir[:-1]
generate_docs_plugins_json.generate(output_dir + '/tmp_json', md=True)
for editor_name in editors:
input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json")
shutil.rmtree(output_dir + f'/{editor_name.title()}', ignore_errors=True)
os.makedirs(output_dir + f'/{editor_name.title()}')
data = load_json(input_file)
process_doclets(data, output_dir, editor_name.title())
shutil.rmtree(output_dir + '/tmp_json')
print('Done')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../../office-js-api/Plugins/" # Default value
)
args = parser.parse_args()
generate(args.destination)
print("START_MISSING_EXAMPLES")
print(",".join(missing_examples))
print("END_MISSING_EXAMPLES")

View File

@ -0,0 +1,39 @@
import subprocess
def fetch_branches():
#Fetch all branches without tags from the remote.
subprocess.run(['git', 'fetch', '--no-tags', 'origin', '+refs/heads/*:refs/remotes/origin/*'], check=True)
def get_branches():
#Get list of branches in the repository."""
result = subprocess.run(['git', 'branch', '-r'], capture_output=True, text=True)
return [line.strip() for line in result.stdout.splitlines()]
def parse_version(version_str):
#Parse version string and return a tuple of integers (major, minor, patch).
try:
return tuple(map(int, version_str.lstrip('v').split('.')))
except ValueError:
return (0, 0, 0) # Default for non-parsable versions
def get_max_version_branch(branches):
#Find the branch with the highest version.
max_branch = None
max_version = (0, 0, 0)
for branch in branches:
parts = branch.split('/')
if len(parts) >= 2 and (parts[1] == 'hotfix' or parts[1] == 'release'):
version = parse_version(parts[2])
if version > max_version:
max_version = version
max_branch = parts
return max_branch
if __name__ == "__main__":
fetch_branches() # Fetch branches without tags
branches = get_branches()
max_version_branch = get_max_version_branch(branches)
if max_version_branch:
print('/'.join(max_version_branch[1:])) # Print only the branch name without origin

View File

@ -0,0 +1,7 @@
{
"dependencies": {
"jsdoc-to-markdown": "7.1.1",
"dmd": "6.1.0",
"handlebars": "4.7.7"
}
}

177
scripts/sln.py Normal file
View File

@ -0,0 +1,177 @@
#!/usr/bin/env python
import sys
sys.path.append('scripts')
import config
import json
import os
is_log = False
def is_exist_in_array(projects, proj):
for p in projects:
if p == proj:
return True
return False
def get_full_projects_list(json_data, list):
result = []
for rec in list:
if rec in json_data:
result += get_full_projects_list(json_data, json_data[rec])
else:
result.append(rec)
return result
def adjust_project_params(params):
ret_params = params
# check aliases
all_windows = []
all_windows_xp = []
all_linux = []
all_mac = []
all_android = []
for i in config.platforms:
if (0 == i.find("win")):
all_windows.append(i)
if (-1 != i.find("xp")):
all_windows_xp.append(i)
elif (0 == i.find("linux")):
all_linux.append(i)
elif (0 == i.find("mac")):
all_mac.append(i)
elif (0 == i.find("android")):
all_android.append(i)
if is_exist_in_array(params, "win"):
ret_params += all_windows
if is_exist_in_array(params, "!win"):
ret_params += ["!" + x for x in all_windows]
if is_exist_in_array(params, "win_xp"):
ret_params += all_windows_xp
if is_exist_in_array(params, "!win_xp"):
ret_params += ["!" + x for x in all_windows_xp]
if is_exist_in_array(params, "linux"):
ret_params += all_linux
if is_exist_in_array(params, "!linux"):
ret_params += ["!" + x for x in all_linux]
if is_exist_in_array(params, "mac"):
ret_params += all_mac
if is_exist_in_array(params, "!mac"):
ret_params += ["!" + x for x in all_mac]
if is_exist_in_array(params, "android"):
ret_params += all_android
if is_exist_in_array(params, "!android"):
ret_params += ["!" + x for x in all_android]
return ret_params
def get_projects(pro_json_path, platform):
json_path = os.path.abspath(pro_json_path)
data = json.load(open(json_path))
root_dir_json = "../"
if ("root" in data):
root_dir_json = data["root"]
root_dir = os.path.dirname(json_path)
if ("/" != root_dir[-1] and "\\" != root_dir[-1]):
root_dir += "/"
root_dir += root_dir_json
result = []
modules = config.option("module").split(" ")
for module in modules:
if (module == ""):
continue
if not module in data:
continue
# check aliases to modules
records_src = data[module]
records = get_full_projects_list(data, records_src)
print(records)
for rec in records:
params = []
record = rec
if (0 == rec.find("[")):
pos = rec.find("]")
if (-1 == pos):
continue
record = rec[pos+1:]
header = rec[1:pos].replace(" ", "")
params_tmp = rec[1:pos].split(",")
for par in params_tmp:
if (par != ""):
params.append(par)
params = adjust_project_params(params)
if is_exist_in_array(result, record):
continue
if is_log:
print("params: " + ",".join(params))
print("file: " + record)
if is_exist_in_array(params, "!" + platform):
continue
platform_records = []
platform_records += config.platforms
platform_records += ["win", "win_xp", "linux", "mac", "android"]
# if one platform exists => all needed must exists
is_needed_platform_exist = False
for pl in platform_records:
if is_exist_in_array(params, pl):
is_needed_platform_exist = True;
break
# if one config exists => all needed must exists
is_needed_config_exist = False
for item in params:
if (0 == item.find("!")):
continue
if is_exist_in_array(platform_records, item):
continue
is_needed_config_exist = True
break;
if is_needed_platform_exist:
if not is_exist_in_array(params, platform):
continue
config_params = config.option("config").split(" ") + config.option("features").split(" ")
config_params = [x for x in config_params if x]
is_append = True
for conf in config_params:
if is_exist_in_array(params, "!" + conf):
is_append = False
break
if is_needed_config_exist and not is_exist_in_array(params, conf):
is_append = False
break
if is_append:
result.append(root_dir + record)
if is_log:
print(result)
return result
# test example
if __name__ == '__main__':
# test
config.parse()
is_log = True
projects = get_projects("./../sln.json", "win_64")

106
sln.json Normal file
View File

@ -0,0 +1,106 @@
{
"root" : "../",
"spell" : [
"[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro"
],
"core" : [
"core/Common/3dParty/cryptopp/project/cryptopp.pro",
"core/Common/cfcpp/cfcpp.pro",
"core/UnicodeConverter/UnicodeConverter.pro",
"core/Common/kernel.pro",
"core/Common/Network/network.pro",
"core/DesktopEditor/graphics/pro/graphics.pro",
"core/PdfFile/PdfFile.pro",
"core/DjVuFile/DjVuFile.pro",
"core/XpsFile/XpsFile.pro",
"core/HtmlFile2/HtmlFile2.pro",
"core/Fb2File/Fb2File.pro",
"core/EpubFile/CEpubFile.pro",
"core/HwpFile/HWPFile.pro",
"core/Apple/IWork.pro",
"core/DocxRenderer/DocxRenderer.pro",
"core/DesktopEditor/doctrenderer/doctrenderer.pro",
"[!no_x2t]core/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro",
"[!no_x2t]core/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro",
"[!no_x2t]core/TxtFile/Projects/Linux/TxtXmlFormatLib.pro",
"[!no_x2t]core/RtfFile/Projects/Linux/RtfFormatLib.pro",
"[!no_x2t]core/OdfFile/Projects/Linux/OdfFormatLib.pro",
"[!no_x2t]core/OOXML/Projects/Linux/BinDocument/BinDocument.pro",
"[!no_x2t]core/X2tConverter/build/Qt/X2tConverter.pro",
"[win,linux,mac]core/DesktopEditor/AllFontsGen/AllFontsGen.pro",
"[win,linux,mac]core/DesktopEditor/allthemesgen/allthemesgen.pro",
"[win,linux,mac]core/DesktopEditor/doctrenderer/app_builder/docbuilder.pro",
"[win,linux,mac]core/DesktopEditor/pluginsmanager/pluginsmanager.pro",
"[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro",
"spell",
"[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro",
"[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro",
"[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro"
],
"builder" : [
"core",
"core/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder_func_lib.pro"
],
"server" : [
"core"
],
"multimedia" : [
"[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro"
],
"desktop" : [
"core",
"multimedia",
"core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro",
"desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro",
"[win,linux]desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro",
"[win,linux]desktop-apps/win-linux/ASCDocumentEditor.pro",
"[win]desktop-apps/win-linux/extras/projicons/ProjIcons.pro",
"[win,!win_xp]desktop-apps/win-linux/extras/update-daemon/UpdateDaemon.pro",
"[win,!win_xp]desktop-apps/win-linux/extras/online-installer/OnlineInstaller.pro"
],
"mobile" : [
"core"
],
"osign" : [
"[win,linux,mac]core/DesktopEditor/xmlsec/src/osign/lib/osign.pro"
]
}

View File

@ -1,10 +1,10 @@
## Overview
# Overview
**change_autor.py** is a tool for change autor and last modifiend in all documents in folder.
**change_autor.py** is a tool for change autor and last modifiend in all documents in folder.
## How to use
1. Place the files to be changed in a folder, e.g. **input**.
1. Place the files to be changed in a folder, e.g. **input**.
2. Create a folder in which the modified files will be stored, e.g. **output**.
3. Call the file *change_autor.py* as shown below.
@ -17,10 +17,11 @@ ________________________
## How to use
1. Place the files to be changed in a folder, e.g. **input**.
1. Place the files to be changed in a folder, e.g. **input**.
2. Create a folder in which the converted files will be stored, e.g. **output**.
3. Call the file *convert_directory.py* as shown below.
```bash
convert_directory.py path_to_builder_directory path_to_input_folder path_to_output_folder format_ext
convert_directory.py path_to_builder_directory
path_to_input_folder path_to_output_folder format_ext
```

View File

@ -7,6 +7,9 @@ import os
import glob
import shutil
sys.stdin.reconfigure(encoding='utf-8')
sys.stdout.reconfigure(encoding='utf-8')
params = sys.argv[1:]
if (3 > len(params)):
@ -20,8 +23,14 @@ directory_input = params[0].replace("\\", "/")
directory_output = params[1].replace("\\", "/")
author_name = params[2]
if not os.path.exists(directory_output):
os.mkdir(directory_output)
input_files = []
count = 1
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
print(count, file)
count += 1
input_files.append(file.replace("\\", "/"))
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
@ -30,7 +39,7 @@ def change_author_name(file_dist, output_file, author_name):
app = "7za" if ("mac" == base.host_platform()) else "7z"
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
with open(temp_dir + "/docProps/core.xml", 'r') as file:
with open(temp_dir + "/docProps/core.xml", 'r', encoding='utf-8') as file:
data = file.read()
creator_open = "<dc:creator>"
@ -67,7 +76,7 @@ def change_author_name(file_dist, output_file, author_name):
else:
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
with open(temp_dir + "/docProps/core.xml", 'w') as file:
with open(temp_dir + "/docProps/core.xml", 'w', encoding='utf-8') as file:
file.write(data)
shutil.copyfile(file_dist, output_file)
@ -80,7 +89,12 @@ for input_file in input_files:
base.delete_dir(temp_dir)
base.create_dir(temp_dir)
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
change_author_name(input_file, output_file, author_name)
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]).replace(' ', '_') + u"." + input_file.split(".")[-1]
try:
change_author_name(input_file, output_file, author_name)
except:
print("Error in converting document: ", input_file)
continue
base.delete_dir(temp_dir)
output_cur += 1

View File

@ -125,9 +125,20 @@ def getFormatByFile(file_path):
def convertFile(directory_x2t, file_input, file_output, convert_params):
cur_path = os.getcwd()
# fonts directory -----------------------------------
directory_fonts = directory_x2t + "/sdkjs/common"
if not base.is_file(directory_fonts + "/AllFonts.js"):
directory_fonts_local = ""
if "windows" == base.host_platform():
directory_fonts_local = os.getenv("LOCALAPPDATA") + "/ONLYOFFICE/docbuilder"
else:
directory_fonts_local = os.path.expanduser('~') + "/.local/share/ONLYOFFICE/docbuilder"
if not base.is_file(directory_fonts + "/AllFonts.js") and not base.is_file(directory_fonts_local + "/AllFonts.js"):
base.cmd_in_dir(directory_x2t, "docbuilder", [], True)
if base.is_file(directory_fonts_local + "/AllFonts.js"):
directory_fonts = directory_fonts_local
# ---------------------------------------------------
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
if base.is_dir(temp_dir):

View File

@ -0,0 +1,35 @@
#!/usr/bin/env python
import sys
sys.path.append('../../scripts')
import base
import os
import glob
import convert_common
params = sys.argv[1:]
if (5 > len(params)):
print("use: convert_directory.py path_to_builder_directory path_to_sdkjs_directory editor_type path_to_input_files_directory path_to_output_files_directory")
exit(0)
cur_path = os.getcwd()
base.configure_common_apps()
directory_x2t = params[0].replace("\\", "/")
directory_sdkjs = params[1].replace("\\", "/")
editor_type = params[2].replace("\\", "/")
directory_input = params[3].replace("\\", "/")
directory_output = params[4].replace("\\", "/")
input_files = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(directory_input)
for f in files]
output_len = len(input_files)
output_cur = 1
for input_file in input_files:
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
output_file = os.path.join(directory_output, os.path.basename(input_file))
base.cmd_in_dir(directory_x2t, "test", [directory_sdkjs, editor_type, input_file, output_file], True)
output_cur += 1

Some files were not shown because too many files have changed in this diff Show More