mirror of
https://github.com/ONLYOFFICE/build_tools.git
synced 2026-04-07 14:06:31 +08:00
Compare commits
1330 Commits
v7.2.0.162
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
| 55f811af62 | |||
| 057f0bbbf6 | |||
| 67060cc66e | |||
| c5f6c2e02b | |||
| 71b912e7e6 | |||
| 6e7fd50583 | |||
| cf39498098 | |||
| 1f4e593943 | |||
| 3dfd22c735 | |||
| f92fc0f617 | |||
| 0402a5a07a | |||
| dea91ca6f6 | |||
| 4be0e0cbe2 | |||
| 1e8825e15e | |||
| 9d17f87811 | |||
| 2fff3a7391 | |||
| c0bdb1d62b | |||
| 7c97a9b326 | |||
| b33d92a32e | |||
| d480266a5a | |||
| 98af1ed74b | |||
| 9428ce8b33 | |||
| c1dbdc39f1 | |||
| 5d99680cc4 | |||
| 930b11f19e | |||
| 62aa75d82c | |||
| f926970677 | |||
| 4f6908154c | |||
| 151c4e7d2f | |||
| 8adc9021e4 | |||
| 9b97de22df | |||
| 997734860b | |||
| 4473fd7cf9 | |||
| 923d839483 | |||
| 39b1c1e22c | |||
| 849d78fea0 | |||
| 49619cdb40 | |||
| 9bdb69dfa3 | |||
| ceda5ea658 | |||
| ec2d587993 | |||
| 10118f4c68 | |||
| 3cdc164d7f | |||
| b9c9811b9e | |||
| 70bbdfbd43 | |||
| 5dbf27a039 | |||
| 39fb488af8 | |||
| 4866786097 | |||
| 07b1eadc0a | |||
| 9db40b2505 | |||
| 42b57d6b40 | |||
| 506fbd056a | |||
| def11f3134 | |||
| 3285a3e3c5 | |||
| bdcdfa89e7 | |||
| 1835e3ad28 | |||
| 1b33175880 | |||
| 4629471d5b | |||
| a4b29c6acc | |||
| 621b34ffde | |||
| 29ceaa34bf | |||
| 50addf9d18 | |||
| 7269dda75e | |||
| 3f54aec7f8 | |||
| 2d059101ee | |||
| 4dda7dfa7a | |||
| b6d3392571 | |||
| 4001a636db | |||
| cf4c2fc02f | |||
| f94a95df22 | |||
| 7cbaa00356 | |||
| 71cd913944 | |||
| 76fd5a0d37 | |||
| db1b2c01fc | |||
| 9cc6e0b749 | |||
| 8912b82d1f | |||
| d6e215595f | |||
| 6f59b470a3 | |||
| 9241aa8a9b | |||
| 87a7c2922b | |||
| 211348776c | |||
| b2f6b6e2c3 | |||
| 4ea37abdf3 | |||
| ee324a764e | |||
| cee13fbcbe | |||
| 453786b595 | |||
| 9eb7651e84 | |||
| 7b37cc9f83 | |||
| c2a38839dd | |||
| be9f90d220 | |||
| 91937f3d77 | |||
| e70acf9822 | |||
| 243a14ddb9 | |||
| fd7cf76aea | |||
| e48534d14c | |||
| 055ff85f68 | |||
| 12a1765ca0 | |||
| 763dea4889 | |||
| 38a3863213 | |||
| c860de3721 | |||
| 3ce6a41dce | |||
| c69158af8f | |||
| b47b412529 | |||
| 355ae98932 | |||
| 4d812fa6d2 | |||
| ef8153c053 | |||
| 430a9878e5 | |||
| 561c648069 | |||
| f99bcd2ed0 | |||
| df1c566005 | |||
| d548d731ae | |||
| 4e1cfb10b4 | |||
| 4d767c127b | |||
| 71d9e34164 | |||
| d3a2f05945 | |||
| 97b4421e3c | |||
| 73576ff817 | |||
| b4ba33bb69 | |||
| e04df1ba9a | |||
| dd148a105e | |||
| acf75749c2 | |||
| 354e4a6a04 | |||
| b85fb9c07c | |||
| 40b95455a0 | |||
| eef1d728b5 | |||
| cc24ee8e85 | |||
| 62ab1f9935 | |||
| dd26863a2b | |||
| c02a387a64 | |||
| 0fd6852fe8 | |||
| 0dbf009c6b | |||
| 44b6d2d64f | |||
| b2ce2bbcad | |||
| 133192df35 | |||
| c45cf77747 | |||
| 9f898d6873 | |||
| 61bf2d9413 | |||
| 0c8b3e5919 | |||
| 6fd500e55d | |||
| 10f7dcee17 | |||
| 2fcdef2e00 | |||
| 2ec27065f8 | |||
| 4f804ba0e6 | |||
| 7fb05a51f9 | |||
| 30e3202fd5 | |||
| b6b09d17f4 | |||
| 8a70f3d14a | |||
| aaa5096526 | |||
| 668aa88c6e | |||
| ec544ca9ab | |||
| 17ee00a04c | |||
| bc4daa9546 | |||
| 099003b250 | |||
| 4ca5e4bcb5 | |||
| ee4469885e | |||
| 1350955f67 | |||
| de36280085 | |||
| 0399ea1cb1 | |||
| 41ed9824ab | |||
| 842df5ffd0 | |||
| d0e3f36313 | |||
| 2378bb91fd | |||
| 18605dc68f | |||
| 5004a30908 | |||
| 33e41ed704 | |||
| 1f4c88a489 | |||
| 283977f014 | |||
| e71e0bee74 | |||
| fe9847f246 | |||
| f79bfd099b | |||
| 3ec9b4dba5 | |||
| 0c7348fc89 | |||
| 97b615c36a | |||
| 59f95f52ce | |||
| ba04810793 | |||
| aea03d8554 | |||
| 09247281ef | |||
| 7b31d85c7c | |||
| 3ad5b9d05d | |||
| 894f23b292 | |||
| 125d0112e4 | |||
| b46d6075e4 | |||
| c535c411da | |||
| 599db1f8be | |||
| 10ff21a974 | |||
| c60dc81694 | |||
| 6de1859441 | |||
| 70a50da160 | |||
| 3ca2efeb30 | |||
| 496594e77f | |||
| 8ebc7dba86 | |||
| 7da1a18753 | |||
| c16242f25f | |||
| 2f43d90ab9 | |||
| 6f242eee81 | |||
| f19a406ad2 | |||
| cfe69c6bcd | |||
| dcbeec9562 | |||
| 954e3b1ee8 | |||
| d5666b1152 | |||
| c4fa19a1c8 | |||
| 7d9444a727 | |||
| 3646a2560a | |||
| 2f877e0a4c | |||
| 434d6cc33c | |||
| 31386ad676 | |||
| 0d917471a7 | |||
| 4d91ac47ec | |||
| 44e346210b | |||
| e349f3dba2 | |||
| 2ffc1b9347 | |||
| 7a58da1af1 | |||
| f703663342 | |||
| af9db63711 | |||
| 7e9d9051aa | |||
| 73b41a5588 | |||
| 1ab43f0551 | |||
| 79c4fa5042 | |||
| 944caac250 | |||
| 7fe4ade155 | |||
| 3d51a5e648 | |||
| d6af99141d | |||
| 568e84275c | |||
| de5e5fe938 | |||
| 152b524197 | |||
| 142e6541c7 | |||
| 357db616f7 | |||
| 8fc50cc0f2 | |||
| 1feed69b71 | |||
| 6b03a77791 | |||
| 31b4e29e37 | |||
| 08d8a02166 | |||
| e69a5135da | |||
| e768f17744 | |||
| 4e9251e45b | |||
| 2ec2173cad | |||
| 72d6298bbc | |||
| a00302ad1d | |||
| 56c93f97af | |||
| a7432a6b4b | |||
| 75faf391b6 | |||
| 9cefa13362 | |||
| 8f9835a7bd | |||
| 41a4e81c5a | |||
| 83374aa635 | |||
| 4e6b45043a | |||
| 72124a2305 | |||
| b4b84dc462 | |||
| 6cd690d6b6 | |||
| b4f863f00f | |||
| 88d0c54040 | |||
| 46a18695fa | |||
| 05f2f636df | |||
| 506f558eed | |||
| 38cd4e6e77 | |||
| f86376fbc2 | |||
| 3943237a64 | |||
| 37d256acda | |||
| 9cad51e611 | |||
| 3571e02abf | |||
| 123af8265a | |||
| 4f55f8cc90 | |||
| 9a3642572c | |||
| b93773ec78 | |||
| d2eb9b20af | |||
| f2cb4e6a51 | |||
| 5cdb4dae39 | |||
| 9102284b80 | |||
| 1f009d0942 | |||
| 566367daa5 | |||
| c36a390292 | |||
| 4a947d2a3c | |||
| 9b104f358e | |||
| c8a72c53b8 | |||
| 6a3d29bd00 | |||
| 97c8a33e5d | |||
| 39ff7e7692 | |||
| 4f9040d73b | |||
| 9579394788 | |||
| ea8a7bf52e | |||
| a1af0a6804 | |||
| 65e6f05385 | |||
| 08145c0d79 | |||
| c1f60e27c8 | |||
| 5168e20918 | |||
| 0968ca2566 | |||
| e461da3a1e | |||
| c6176a95f4 | |||
| 224db5eb54 | |||
| a6c315302b | |||
| ae3bc78b29 | |||
| 4c48514402 | |||
| e4bc6492be | |||
| 23a526f3a1 | |||
| df87cafbdb | |||
| 595e2eaa86 | |||
| 559837ee13 | |||
| 9440fc3494 | |||
| e700a98963 | |||
| 9635b8c08f | |||
| 56e2042711 | |||
| 30167dbf76 | |||
| 5a403cb3ac | |||
| f22bda85e4 | |||
| 4f09833650 | |||
| d435dd496b | |||
| 252a5c306d | |||
| 9710a074f8 | |||
| e4b91f1b24 | |||
| 6b3f100e7e | |||
| 573612c1c5 | |||
| ba31642a46 | |||
| 00c37bc9dd | |||
| 878057972b | |||
| 1f7a7fc452 | |||
| a8ab257faf | |||
| fc9d060a78 | |||
| a640943c60 | |||
| 6c700fe492 | |||
| afb9b8b61a | |||
| 38c49f70e5 | |||
| bdc40e1572 | |||
| 73bf068cb0 | |||
| 6a4d3bff53 | |||
| dc3ceb6ef8 | |||
| a0cc2123bf | |||
| c71e5c5bde | |||
| 1f84a604d2 | |||
| 648dc2119e | |||
| 4f746cb484 | |||
| 853c760cc4 | |||
| 972bcc8064 | |||
| 2b6557f0ec | |||
| 053c0c2fe9 | |||
| caf79933d8 | |||
| debe284664 | |||
| d4231e0efa | |||
| de99e3f62e | |||
| 16858aa7c2 | |||
| 8beb8b3c84 | |||
| 8cf076aff8 | |||
| 55ddce5904 | |||
| aa5d06a1ec | |||
| 031a1119d6 | |||
| 316c3cec26 | |||
| 584513fb15 | |||
| 9203d68ed8 | |||
| 41abb6b09c | |||
| 7bb5e65810 | |||
| 49ede6a10b | |||
| ac394d8de6 | |||
| abde837a74 | |||
| 834fab5fc7 | |||
| d357abcfc9 | |||
| 119b5f6d33 | |||
| 8a70714eeb | |||
| 90903009f4 | |||
| 6f256be099 | |||
| 5568b7da2e | |||
| 0f89ba4247 | |||
| 4cefdc38fb | |||
| d1481021a7 | |||
| d7eaef6503 | |||
| 0c7b8a2b1c | |||
| 9b5b3eb77c | |||
| 9e31770bfa | |||
| 4d6b9f9463 | |||
| d2c79bb78d | |||
| e0aa6184d6 | |||
| 9c80b95dbe | |||
| bfd3bb009f | |||
| 7ef302fac1 | |||
| fece05de0b | |||
| 71a2981ae8 | |||
| de1d437576 | |||
| 2e179644b3 | |||
| c4551af253 | |||
| 28ca6676a5 | |||
| 31f679a050 | |||
| 64c32043cc | |||
| ea52e70a6d | |||
| 1d721e3e3e | |||
| 3e3b0127a6 | |||
| dcc9f8e669 | |||
| 67c454b469 | |||
| 1cc0528b11 | |||
| c3dce4bc91 | |||
| 990382512b | |||
| b6985ce27e | |||
| 65e36cd01a | |||
| caaebde240 | |||
| 7c130faac2 | |||
| 7ff5d2f40d | |||
| a353e89871 | |||
| c9151cd09d | |||
| 03f99c526d | |||
| 34a54bf88f | |||
| b40c0a0d74 | |||
| 519ea3fb6c | |||
| e4cc090bfd | |||
| c6138b3902 | |||
| 8ec240dcee | |||
| fa589c9523 | |||
| 5f2d8be5dc | |||
| 829228d28c | |||
| a7c9f3a0ce | |||
| e676ebcffd | |||
| 6fe22b14c6 | |||
| d50b171b54 | |||
| 19fc33b7f5 | |||
| d6cbfcbfe3 | |||
| 23db442c82 | |||
| e40f8c9a7e | |||
| 4d4d1612ce | |||
| ded3dfa63c | |||
| 6a2db3d59e | |||
| 48c8a635a8 | |||
| 1ac83e0ffd | |||
| ef6ecbbebd | |||
| f51b841320 | |||
| 7c0099c57e | |||
| 8f49dce1ed | |||
| a9bad0d5b7 | |||
| 702a83c010 | |||
| 227ecbde99 | |||
| ee52dbe5c4 | |||
| 0f0e0a0e52 | |||
| d288d6326c | |||
| b87e305c06 | |||
| 8516b163b4 | |||
| 4460d6ed13 | |||
| ab3165fdaf | |||
| 1539c187e3 | |||
| 78df8eb494 | |||
| eebbd513d3 | |||
| 2ab7616132 | |||
| 3be471b472 | |||
| a151375339 | |||
| ed2ab2d80b | |||
| 35f99ac3a0 | |||
| 836a0401ed | |||
| b8024df7d3 | |||
| 74c02f9d50 | |||
| 25a1e16824 | |||
| 7ee66bbafd | |||
| 2b07d1aa4d | |||
| c6acd6cdcd | |||
| cee122afa5 | |||
| 4c76406f8c | |||
| 6fd89057ec | |||
| ae8b77628e | |||
| 959d919d9e | |||
| bf7df0b45a | |||
| 3589ea0f60 | |||
| ad23ee2803 | |||
| bc59f739f5 | |||
| b8e42184f8 | |||
| 50c312513c | |||
| 427ae97dd2 | |||
| d80f1f1b0f | |||
| 520d779f04 | |||
| c4b938b7db | |||
| 9435cdc99b | |||
| cf90a5ce21 | |||
| 3f4d0cefa8 | |||
| 12ce537781 | |||
| 12d824fe2d | |||
| 8dcf0277ac | |||
| 6664051127 | |||
| 019f10ee86 | |||
| a7ee5d5679 | |||
| 7c31890fc0 | |||
| ad9258710b | |||
| 626dd37312 | |||
| b4d95ccbb9 | |||
| 0d0ae2b5e6 | |||
| d7b3b7f120 | |||
| 7a864171b3 | |||
| 8cfb8f3d84 | |||
| 2f39454d31 | |||
| de33755900 | |||
| d3d1080c89 | |||
| 539597f07c | |||
| 6e87116634 | |||
| 029b16ca68 | |||
| 40b11e192d | |||
| 72cc19f346 | |||
| efb22f741f | |||
| 2458673d3c | |||
| 3881a6659e | |||
| a567cc2222 | |||
| 6a9b2bac4a | |||
| 87542f4a56 | |||
| 32b47cd21e | |||
| bf75e1c062 | |||
| 97fccfa34b | |||
| 1ed32fe71c | |||
| 3ce8f251a1 | |||
| e2ad38f297 | |||
| 993303bfa4 | |||
| 50d9460f63 | |||
| 4b02b57c07 | |||
| 1fc9382ce9 | |||
| ea43e67fe8 | |||
| dd28a41e17 | |||
| b11a273d65 | |||
| d4ee25b004 | |||
| a2b7719100 | |||
| 1e6cde4d98 | |||
| 34f627d146 | |||
| 54accd4394 | |||
| 63557fba56 | |||
| 7a4be158c2 | |||
| 810e12bd22 | |||
| 066f7ad8c1 | |||
| e52a654731 | |||
| 370879f636 | |||
| 170a511654 | |||
| 679afe1bc4 | |||
| 8b5cfff24a | |||
| 27de97031e | |||
| 8ee874da14 | |||
| 11c783f088 | |||
| a3cb31291f | |||
| 6a43b86912 | |||
| 21bb535ee0 | |||
| 9ea948b825 | |||
| fe2fad9378 | |||
| d566ffd9fa | |||
| 370b23f38f | |||
| 253ee696be | |||
| e08c6f79bc | |||
| 4240319fef | |||
| e1aaa2415b | |||
| e71eb56630 | |||
| 38496f2971 | |||
| d1c7d8d9f6 | |||
| 36fdfd672f | |||
| 55c0f61189 | |||
| 053e317850 | |||
| 38296bf292 | |||
| f0ba4564cc | |||
| 21ec70214d | |||
| 6d1a8376ba | |||
| 0ca83fe152 | |||
| 2301c407a2 | |||
| d6096431bd | |||
| d7532d5b83 | |||
| c7d805f8df | |||
| d78ab30cdf | |||
| c123f77195 | |||
| a60bc78e23 | |||
| 78ee107e85 | |||
| 12c3310451 | |||
| d525d8f603 | |||
| 337d1095dc | |||
| fab40cb6b3 | |||
| f4cdc1aecd | |||
| f702e3245a | |||
| d890ba4f43 | |||
| d929ed411f | |||
| 55daa28d74 | |||
| 2bab12aad1 | |||
| 80fb376132 | |||
| 1d557f1065 | |||
| 30df3df8cf | |||
| 02b4655a16 | |||
| debf0158d4 | |||
| 0f730c1948 | |||
| fa7e324fe0 | |||
| e2313e6a3d | |||
| 2ce8c42323 | |||
| 684e65adaa | |||
| a8fc3fb2f1 | |||
| 68bcdb2f88 | |||
| af3627bccb | |||
| 4cbe032363 | |||
| 5e4b3cf0d2 | |||
| 593af1048b | |||
| ae00ecb773 | |||
| da83e42172 | |||
| 2895d53f8e | |||
| 10d1f22ec3 | |||
| 4ed1e64a61 | |||
| 6402936285 | |||
| e01e5c145a | |||
| 56f6d82c8f | |||
| 3e79cf0c12 | |||
| efc09657a8 | |||
| 64390c3e01 | |||
| 513edb802d | |||
| 52c35b8e3c | |||
| cf1c25031c | |||
| 7b9f18867a | |||
| 0985b4dbe8 | |||
| 772fb721ae | |||
| 1ef1c795c1 | |||
| 6d956566c5 | |||
| edec5bb25f | |||
| 3534f65f0e | |||
| 6fbea9c8a4 | |||
| 18bba5da3d | |||
| 952270e1ba | |||
| adc353cdcc | |||
| 0c180e6ee5 | |||
| fdd9c329b1 | |||
| 5b80459b37 | |||
| 1b646a6e00 | |||
| cf970efbec | |||
| 4020cdac69 | |||
| 2415c2ffe8 | |||
| d41502ea19 | |||
| f5d0ef4005 | |||
| c4a89ecf61 | |||
| 71eb25e561 | |||
| 486a6683fd | |||
| 2175d8d87c | |||
| f463bff49e | |||
| a817e2b046 | |||
| 3539e36bde | |||
| 6930a9ffe1 | |||
| e0a44502b1 | |||
| 19e1bd5586 | |||
| ea65ba02f1 | |||
| 8406e48009 | |||
| a8f1d11cbc | |||
| f245a4a9c6 | |||
| 597529a16d | |||
| 9b9dba05c2 | |||
| 2d0bbc824f | |||
| fa523c673f | |||
| da1a4ba393 | |||
| e9c9712e52 | |||
| 78561ca659 | |||
| 1ad87383e3 | |||
| c29ac1549f | |||
| f09eeb19e5 | |||
| 4b7b2c78a2 | |||
| 414af6bdb0 | |||
| df7288b275 | |||
| ce80953086 | |||
| d1344dab71 | |||
| 4f2ba4ae76 | |||
| 6bd525c3b4 | |||
| 341671a612 | |||
| 9161aa1556 | |||
| 70e9fbabce | |||
| a2c00deba2 | |||
| 9b4ef9d1d7 | |||
| 3baee0c14e | |||
| 0508bf43d1 | |||
| bd279d1ad7 | |||
| 4d55a66307 | |||
| 9481e01581 | |||
| fe91bf9620 | |||
| d812ba379b | |||
| e1cc7f3c83 | |||
| f50d5d2cd1 | |||
| b3987b0ad5 | |||
| 243946a189 | |||
| 63fbbc5603 | |||
| fcb857df69 | |||
| dabbc31c09 | |||
| 997bfa3dd5 | |||
| 50eca8aab5 | |||
| 6e4a2e4d5e | |||
| 40e9938885 | |||
| 5bc8ca2266 | |||
| 4cdbfbfb86 | |||
| 01575d1f2e | |||
| 8f75c75b80 | |||
| ebc084f9ea | |||
| 626efaf5cf | |||
| 096ce99588 | |||
| 9ce103b31b | |||
| 13cbd84b58 | |||
| a8912dff41 | |||
| 8b773614ba | |||
| d04f04f382 | |||
| 9a44dae4f9 | |||
| 07665dd93e | |||
| eeca17e78b | |||
| f91264bc94 | |||
| 0983e67f21 | |||
| 8e7db87554 | |||
| 9d000b2284 | |||
| e29fd0ca09 | |||
| dcfde5b5e7 | |||
| 871750d6ae | |||
| d6b5dc0830 | |||
| e99a3e8978 | |||
| 13db6d3155 | |||
| f8845d4fc5 | |||
| efcfb00239 | |||
| 1727313e54 | |||
| f6d55d07c1 | |||
| 331bbadaad | |||
| f012c604b8 | |||
| a8f6b0c599 | |||
| e46d73869c | |||
| 6bf413a008 | |||
| 10b7f63f9f | |||
| f2dff2d173 | |||
| 963c3bf212 | |||
| f7071569d9 | |||
| 4e5eadbf82 | |||
| 113e2e7821 | |||
| 21c8c699dd | |||
| db36b7dc40 | |||
| 38522989d3 | |||
| aa49605ac4 | |||
| 3af65bf276 | |||
| 0a51c3bdea | |||
| ba6c3a8f38 | |||
| 66e196b5ec | |||
| d4a49d7137 | |||
| 1cca8af54f | |||
| 7e925fd931 | |||
| 45448171d4 | |||
| 64ae3d9029 | |||
| edccac17f6 | |||
| 1d36cad17e | |||
| 08e6d5ba53 | |||
| 6505ee1b35 | |||
| 709612090a | |||
| 1af5c373e4 | |||
| 8181d187dd | |||
| 4b448e3305 | |||
| fd579511ae | |||
| e166237e5d | |||
| b934429e41 | |||
| d61c1da666 | |||
| 8f633771d9 | |||
| 684f478c54 | |||
| cb0099d746 | |||
| a72ead91dc | |||
| fd7c3c6cf3 | |||
| 5ef8abacfa | |||
| a01221ffc6 | |||
| cbd4ab2e15 | |||
| e70152b85b | |||
| 8a9c9a587e | |||
| 29c15d9acd | |||
| bf6773f666 | |||
| bba0ff87da | |||
| c9de5278ea | |||
| 6f5a791a1f | |||
| 1e7a720e74 | |||
| 10a7080928 | |||
| 7349c64253 | |||
| 88649507c7 | |||
| cc503473f9 | |||
| 10fcec1dd8 | |||
| 0679c0f6d7 | |||
| a1a69bdbab | |||
| da02b358e2 | |||
| 60dcea6ff4 | |||
| b5796d5e6c | |||
| 6338fd58c3 | |||
| 39b6841557 | |||
| f3a20e8e59 | |||
| 830df65573 | |||
| 2aeb9e1315 | |||
| 696c48c251 | |||
| dcf02e7e93 | |||
| 581091591b | |||
| 0e6f1a064d | |||
| 70975098e2 | |||
| 5b27f9843f | |||
| 71e29a6599 | |||
| 6fd43a4b18 | |||
| 11f207fbe2 | |||
| 6559d589dd | |||
| b7e9acc242 | |||
| bfd1cd0555 | |||
| 590dffdb78 | |||
| 0205dd6853 | |||
| cd03a42c1b | |||
| c1a8d181d2 | |||
| a17d5e04bb | |||
| e719ae24f0 | |||
| b4922e6899 | |||
| d8c2505fb8 | |||
| 02426e413f | |||
| bd05971ebb | |||
| 4e12692325 | |||
| f7ea69acc9 | |||
| 3640cea64d | |||
| f5ac8ac39d | |||
| f801e77208 | |||
| 2a8c5ea9eb | |||
| 181a42e344 | |||
| a0511ca3ac | |||
| 0b48f3a67f | |||
| 15727e83cc | |||
| 7d06432a76 | |||
| 761c47e26d | |||
| edc6a38dfb | |||
| 2b79e127c4 | |||
| 449875d5b8 | |||
| bbdb9e0107 | |||
| 0a613734f7 | |||
| ff2aa0434a | |||
| 2fa22ca2b3 | |||
| 25473c1b5c | |||
| 7c087e20b7 | |||
| 7250b59f19 | |||
| e54e7ad6ec | |||
| 4a2fd9fb72 | |||
| afd5f2b3be | |||
| d468b93e9f | |||
| 188ad0057f | |||
| bde91e3dbf | |||
| 3e9b233ecb | |||
| 1f6a3010b5 | |||
| fa15db70c9 | |||
| b74c359523 | |||
| 6d7e67820a | |||
| 3d884963a7 | |||
| 265cac6474 | |||
| 96ff18b45c | |||
| 8eb2d689fd | |||
| d2888db960 | |||
| 5d7de5a7ba | |||
| 4d3e9c39b1 | |||
| a2639afd7a | |||
| 6d8f89deba | |||
| b3a2493767 | |||
| 5d3cbbe194 | |||
| fd2e480e17 | |||
| 342556b763 | |||
| 1dd67ac7a9 | |||
| 2311c55319 | |||
| eb80d0d6c1 | |||
| c629596198 | |||
| 3fe86f753f | |||
| 031e5a74d7 | |||
| b26baed61e | |||
| a311f41f0c | |||
| 07c3fb05a8 | |||
| 0625ad2652 | |||
| ed3e4082a1 | |||
| b187130c34 | |||
| 3c56477f3a | |||
| 8c15ed7887 | |||
| 1f46c647f9 | |||
| 8791ddf547 | |||
| 04679efe76 | |||
| 9197d31552 | |||
| 47977e3b37 | |||
| e358689181 | |||
| 4cda4793bc | |||
| 7b470fa1f8 | |||
| 33b14d8848 | |||
| 01f6464a71 | |||
| c0c0755505 | |||
| 5bccf567fd | |||
| 670235480b | |||
| bfab104961 | |||
| 658ce63a04 | |||
| 208b602c97 | |||
| 49c65d9f64 | |||
| afdd241116 | |||
| 241748308a | |||
| fc0e0adbc7 | |||
| ce648a2649 | |||
| 92602510a5 | |||
| 2429745cc0 | |||
| 98f70179ef | |||
| bd167f6258 | |||
| 88423908f2 | |||
| 23a4c4b0b5 | |||
| 879b6b2810 | |||
| 3a3652e753 | |||
| e79079f4d4 | |||
| 3660eb62ec | |||
| c6d41ba35d | |||
| c9fb306823 | |||
| 6d9a9032b2 | |||
| fab8edef79 | |||
| df60f1c273 | |||
| fc0e3972a3 | |||
| 3ff6c327f9 | |||
| c22b067e30 | |||
| 6d6eff662d | |||
| 48ecc3915e | |||
| 8c87ead486 | |||
| 66cffd6722 | |||
| eadad135e2 | |||
| 9a44988707 | |||
| d9b2f92e64 | |||
| a8cb907b71 | |||
| 633c176e50 | |||
| 91e8c60036 | |||
| 9c1398814d | |||
| 672d1dc800 | |||
| fca666825a | |||
| dac76abd74 | |||
| e8ad53b990 | |||
| 002504fca3 | |||
| 16c36d346e | |||
| 18142fc257 | |||
| 72d3244dcb | |||
| 34ee246673 | |||
| 3ea1cff8de | |||
| a9de3f6f0e | |||
| a54bf745ae | |||
| 8ee547cad7 | |||
| 325a68877b | |||
| fd13759a79 | |||
| 283ac31f9b | |||
| ea253634d2 | |||
| 426c24ac52 | |||
| 15f7a39997 | |||
| 69107bb48c | |||
| 681e9deafd | |||
| 62911b8490 | |||
| 4e760a2a38 | |||
| 8a1a2b93c6 | |||
| 102458d9c8 | |||
| 2a75912ca4 | |||
| 81c6410394 | |||
| d8b759841e | |||
| d66d9a03ec | |||
| 1894c5c971 | |||
| 5012e4e9bd | |||
| b9ccd9849a | |||
| 898f961e2a | |||
| 88843a1f2d | |||
| 8ea37f2b03 | |||
| 162b5dcb00 | |||
| 2889258304 | |||
| 7770a41f08 | |||
| 0ba4a6a968 | |||
| c2d39b1357 | |||
| 415d47658b | |||
| a3e58605a5 | |||
| 7936c3d097 | |||
| d4da415e4d | |||
| 3197700bc0 | |||
| 285b99a5ac | |||
| fac40064ce | |||
| 4c4ef3ad64 | |||
| 73bfa8e069 | |||
| 2b9b254aaf | |||
| 69edb29412 | |||
| a1deadc40c | |||
| 495aa71860 | |||
| 2cf672ed17 | |||
| 95770429ef | |||
| 724b42f938 | |||
| 010f1f7a77 | |||
| 9e96f3e9bd | |||
| c68437cce8 | |||
| 3f0385d469 | |||
| 1d37344d01 | |||
| a5e412ee85 | |||
| a4b920b1ce | |||
| bdd1d765bf | |||
| 0f66ce9343 | |||
| 68de1c72e7 | |||
| 2062bd0b92 | |||
| 0ea1b6c527 | |||
| 0f1dcb88d4 | |||
| 12500bbd70 | |||
| a2a40d122e | |||
| 7bc15e05d6 | |||
| 84a8032233 | |||
| 67a4ab0dfe | |||
| eff25a9245 | |||
| b0c09da0bc | |||
| 5497cb527b | |||
| 9e6010f650 | |||
| 8d4ff54463 | |||
| a3f2ec8161 | |||
| a02f6b0276 | |||
| 570a433826 | |||
| e811ce765c | |||
| c17037ef65 | |||
| bd3682f4f1 | |||
| 28767c0f2d | |||
| f70431f7a5 | |||
| 26448858e8 | |||
| fc2d4a45ca | |||
| 478f4b86e7 | |||
| 5e8f7aa52d | |||
| c9b9cb5846 | |||
| 06a1b12069 | |||
| 815adb0856 | |||
| ef22f84ab1 | |||
| 596e7bf617 | |||
| a19609f5b1 | |||
| 2ad21f9cd0 | |||
| e91c9f06dd | |||
| d8e1cfe702 | |||
| 4949a8d464 | |||
| 121c624026 | |||
| 0ba5118b5e | |||
| 80d3824150 | |||
| 5bf74fda13 | |||
| a9cbfe93d0 | |||
| e4d30cb842 | |||
| 9b81677fbf | |||
| ce9762da73 | |||
| cb96902786 | |||
| 750d5efc0e | |||
| 929ebf6e0e | |||
| 3d446a5d30 | |||
| 978fe74291 | |||
| 04a5f4667a | |||
| 095bbc9d19 | |||
| 2478eb6873 | |||
| d1b490b59a | |||
| 01158bb16a | |||
| dd583cb908 | |||
| 702952740f | |||
| 99741d0805 | |||
| fcec89bf9c | |||
| d2aa0f521f | |||
| dab33a829b | |||
| dc08890d4c | |||
| 40c5192c1c | |||
| 06c0ef6dde | |||
| cb1ad31c86 | |||
| 8fbf7485dc | |||
| c03aa9cbfd | |||
| 063ffc6ece | |||
| b5881ef5b4 | |||
| 91582bc164 | |||
| c11b53bba9 | |||
| 362d82e32c | |||
| a024ff4899 | |||
| fb3dda807e | |||
| 98a4dbddfc | |||
| e2f0d4b643 | |||
| ad67f0de2c | |||
| 6df78fb8c0 | |||
| b75f9cc039 | |||
| 776b50945f | |||
| 6bdfc8a141 | |||
| 88fc4a26ed | |||
| 904fee3f53 | |||
| 784a9928cd | |||
| 493b79a18c | |||
| 503ae7679f | |||
| 873b1f3774 | |||
| e9e7e7a4c7 | |||
| e508edb680 | |||
| 334ad55548 | |||
| d14816d18e | |||
| 204f9fbe51 | |||
| 6d8b407872 | |||
| 0f5dab8095 | |||
| e1271a62cf | |||
| 549e182867 | |||
| 59f8e39092 | |||
| 5ff3c615e9 | |||
| 00300d80c2 | |||
| 7dd97a8490 | |||
| f39cfe8a22 | |||
| 3251c1125f | |||
| a7f0f1611f | |||
| cf1f0bdb63 | |||
| b7808a8fa6 | |||
| 7184016b62 | |||
| cb137bb28a | |||
| 5309911e2b | |||
| 101949ba1c | |||
| 1431d3a541 | |||
| c04ef86daf | |||
| f780bef0a9 | |||
| 014b74bb1d | |||
| 2578d22b93 | |||
| b91cbf1233 | |||
| 55955b7731 | |||
| 484c9dc910 | |||
| c235a78634 | |||
| 09bf6684d3 | |||
| ea0a80e4d1 | |||
| 52e706d212 | |||
| 55f8633cce | |||
| 11fa48d1b0 | |||
| a3d7c0bbcd | |||
| 32af7d10c6 | |||
| 1241e7e868 | |||
| 0a8601ca79 | |||
| 8fa8424f24 | |||
| c7465ba9ee | |||
| 604e627233 | |||
| 7c1f957275 | |||
| ad762c667b | |||
| 25148a4ccc | |||
| 803fa4781b | |||
| d1133a01a8 | |||
| 63cdb366ba | |||
| 585d1bfba9 | |||
| fb9d1e69a4 | |||
| 98f84e8740 | |||
| 75d975f91d | |||
| 0cebd3646f | |||
| 7633022d82 | |||
| 045747f625 | |||
| 8f146582a4 | |||
| 59bb27998f | |||
| d45cd9932b | |||
| 58b6a91f65 | |||
| 942875d1a1 | |||
| bcb38f8731 | |||
| ad53559b4f | |||
| 6b740baf73 | |||
| 1ada97c409 | |||
| 89caa5f87c | |||
| 1badc69477 | |||
| 6769ade9a9 | |||
| 0e783f0413 | |||
| 577ab77f1d | |||
| 7ee44be072 | |||
| fa7bbaf98b | |||
| 672fcfdb6d | |||
| fc01b4ad8a | |||
| ca7f0f5951 | |||
| f003ad3277 | |||
| dc6f59943f | |||
| ee51adb675 | |||
| 5406c24771 | |||
| d9c768c2d0 | |||
| d876c4d100 | |||
| 894aaa9fa9 | |||
| 010f22ea3b | |||
| 0a560c9594 | |||
| 7e53c18f5b | |||
| f0a3325ab8 | |||
| a18b226ea2 | |||
| 4112c88c1b | |||
| abda397c9f | |||
| ba0c7173c9 | |||
| 0c40287764 | |||
| 05902d88a7 | |||
| 228b00d5c7 | |||
| 6c2ce95b0e | |||
| fcb7ece378 | |||
| 65ef84179f | |||
| 0811018560 | |||
| c37f8153c0 | |||
| 1a70ce90f9 | |||
| cd011035ff | |||
| b7aa164ed8 | |||
| cecf304ace | |||
| ce60b83e65 | |||
| 892ddc8a79 | |||
| de237fb4af | |||
| d60fc52e74 | |||
| 1c8e702399 | |||
| c2dc35e857 | |||
| 2067e12bdf | |||
| 7764d4ba30 | |||
| 719a198e55 | |||
| 8eac35df75 | |||
| 975972885a | |||
| 056da4b782 | |||
| 0de3c26200 | |||
| f5539cf79f | |||
| a0bdca62b5 | |||
| ad996d39d2 | |||
| d393b9ea90 | |||
| 3ae37d764b | |||
| 6b15d7fca2 | |||
| d8167ea9dd | |||
| 6efb0cfccf | |||
| 19ac16ff62 | |||
| 1710df79f2 | |||
| 72cf0a5837 | |||
| 468f1788b8 | |||
| 36b5e1b5d7 | |||
| 03d371d9fc | |||
| 4b50455a22 | |||
| 5250de602c | |||
| ffb88cdf57 | |||
| 06773a22c9 | |||
| 6ddcbc7c18 | |||
| 1cdc9142df | |||
| 3bc88c4bf3 | |||
| c4b21c554f | |||
| 151c691af2 | |||
| 9f00f08c30 | |||
| 3e2c03d3a3 | |||
| cd1c420fae | |||
| c4d592be20 | |||
| 808e470b27 | |||
| f7bbe2d9f7 | |||
| 92760b2835 | |||
| 379718dbf9 | |||
| 787d690c41 | |||
| 32f124517a | |||
| f501a6ebac | |||
| 2f632a0f8d | |||
| 597b8a67e2 | |||
| f21689f8dd | |||
| 9bd3f170e5 | |||
| 34e9c614b8 | |||
| 960db59935 | |||
| d57efcf0fe | |||
| 306703e677 | |||
| 256edf489c | |||
| 655837f8cd | |||
| ef43e6a9a4 | |||
| d8ac434e7e | |||
| 6907fadce3 | |||
| 8fa222a9b9 | |||
| fc05ba6f4d | |||
| 3c6d7edea0 | |||
| 908f2efd43 | |||
| 0e90989998 | |||
| 329ba4a62d | |||
| dd9a8b9df5 | |||
| feac842b8a | |||
| 2916e4e625 | |||
| d758cd1e7d | |||
| b8bee2a9fe | |||
| 25b6af331e | |||
| 65e9994963 | |||
| cd8ced38f2 | |||
| f6e35f7250 | |||
| 29299704aa | |||
| ad83a772a1 | |||
| ba5a532da0 | |||
| ab838ae3ba | |||
| 4dedb18137 | |||
| 0c18cbc758 | |||
| c012a8045f | |||
| 536b64a63d | |||
| 6b6b91c083 | |||
| d4cd2d83d4 | |||
| 606b73d92f | |||
| 75543fe126 | |||
| 41e5f53c45 | |||
| 626efceaee | |||
| 9d0596089d | |||
| 9d17f14fbb | |||
| 1ad42f671a | |||
| 2c407117dd | |||
| b3ab757416 | |||
| 6667c03ff6 | |||
| 91b75fcae5 | |||
| 048a54716f | |||
| 694d562a80 | |||
| a12f5dba9f | |||
| 65571cfa06 | |||
| 7841606a41 | |||
| cf67d1cb77 | |||
| 255ecd64b2 | |||
| 96913b568f | |||
| 9c046cf10f | |||
| 68367474d0 | |||
| ab77f6d936 | |||
| 8dadf0dada | |||
| f074914f1b | |||
| 110981066e | |||
| 4fd5d6a814 | |||
| c68c365261 | |||
| 6195485cc7 | |||
| f13471428c | |||
| d2d7dc0717 | |||
| 55f1a05d17 | |||
| 173b81c288 | |||
| 1269d0234d | |||
| 62a8e2f72a | |||
| e50a0e84f2 | |||
| 0307890bf3 | |||
| 998daaa8d0 | |||
| 0b4faf9c80 | |||
| bafeadd809 | |||
| ecab59b715 | |||
| 0edb21a44b | |||
| 652fa57245 | |||
| 108f7bd8f7 | |||
| fce06d28a2 | |||
| 62169f91db | |||
| 2d2f1ec7d1 | |||
| 3a60d08eb3 | |||
| 04f8f175b9 | |||
| c687a4ae5b | |||
| c19c692ace | |||
| 8e71fa736b | |||
| e76fc53e85 | |||
| dc548da9eb | |||
| c618c0a6c3 | |||
| 6e4c75144a | |||
| c1f7e8f471 | |||
| fe098a7ee7 | |||
| aced6c5119 | |||
| abe9b200c9 | |||
| 8b542376c5 | |||
| b59df7faec | |||
| c9c516daf2 | |||
| 94cd21189e | |||
| 4e07941e7a | |||
| a2fcf85e3b | |||
| e830cb9141 | |||
| 9bf3985fb2 | |||
| 59ad11b0f4 | |||
| ca7d92703e | |||
| 75109ea476 | |||
| cd040fc148 | |||
| 6b62d86151 | |||
| 56c6ff289e | |||
| 30d331b16e | |||
| 7f41b96e07 | |||
| f8216e4f6a | |||
| 2e9a66c70c | |||
| 41d2dfce6a | |||
| 0a712e3a68 | |||
| 7455472856 | |||
| 0eca5a6fba | |||
| 8ab3f20eb8 | |||
| 634119f66a | |||
| 28718191d6 | |||
| 6b46c5d2b2 | |||
| 370fa31c11 | |||
| 29f5c6e111 | |||
| 0e4134b5f8 | |||
| 48cc6e7f5a | |||
| e921585baa | |||
| 7530a20cd8 | |||
| f3145e0d06 | |||
| 72a9c18b94 | |||
| 14522ee010 | |||
| d3d53b983a | |||
| 2a3b6d0ebb | |||
| 2bc9e29e4b | |||
| afbe93c5cb |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -3,7 +3,7 @@ name: Bug Report
|
||||
about: Report an issue with build_tools you've discovered.
|
||||
---
|
||||
|
||||
**Describe your problem**:
|
||||
# Describe your problem:
|
||||
|
||||
*Be clear in your description of the problem.
|
||||
Open an issue with a descriptive title and a summary in complete sentences.*
|
||||
|
||||
29
.github/workflows/check.yml
vendored
29
.github/workflows/check.yml
vendored
@ -1,15 +1,24 @@
|
||||
name: check
|
||||
on: [push]
|
||||
name: Markdown Lint
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- '**'
|
||||
paths:
|
||||
- '*.md'
|
||||
- 'develop/*.md'
|
||||
- 'scripts/**.md'
|
||||
- '.markdownlint.jsonc'
|
||||
|
||||
jobs:
|
||||
markdownlint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js 12
|
||||
uses: actions/setup-node@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v16
|
||||
with:
|
||||
node-version: 12
|
||||
- name: Check *.md files by `markdownlint`
|
||||
run: |
|
||||
npm install -g markdownlint-cli
|
||||
markdownlint *.md develop/*.md
|
||||
globs: |
|
||||
*.md
|
||||
develop/*.md
|
||||
scripts/**.md
|
||||
|
||||
88
.github/workflows/git-operations.yml
vendored
Normal file
88
.github/workflows/git-operations.yml
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
name: Git Operations
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
operation:
|
||||
description: 'Operation to perform'
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- create
|
||||
- remove
|
||||
default: 'create'
|
||||
|
||||
branch_name:
|
||||
description: 'Branch name to create or remove'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
base_branch:
|
||||
description: 'Base branch to work from (for create operation)'
|
||||
required: false
|
||||
type: string
|
||||
default: 'develop'
|
||||
|
||||
branding:
|
||||
description: 'Branding name'
|
||||
required: false
|
||||
type: string
|
||||
default: 'onlyoffice'
|
||||
|
||||
branding_url:
|
||||
description: 'Branding repository URL (relative to git host)'
|
||||
required: false
|
||||
type: string
|
||||
default: 'ONLYOFFICE/onlyoffice.git'
|
||||
|
||||
jobs:
|
||||
git-operations:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: ONLYOFFICE/build_tools
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
# Install any Python dependencies if requirements.txt exists
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name "GitHub Actions Bot"
|
||||
git config --global user.email "actions@github.com"
|
||||
|
||||
- name: Run Git Operations
|
||||
run: |
|
||||
cd ONLYOFFICE/build_tools/scripts/develop
|
||||
python git_operations.py ${{ inputs.operation }} "${{ inputs.branch_name }}" \
|
||||
--base-branch="${{ inputs.base_branch }}" \
|
||||
--branding="${{ inputs.branding }}" \
|
||||
--branding-url="${{ inputs.branding_url }}" \
|
||||
--modules="${{ inputs.modules }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Operation Summary
|
||||
run: |
|
||||
echo "## Git Operations Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Operation**: ${{ inputs.operation }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Branch Name**: ${{ inputs.branch_name }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Base Branch**: ${{ inputs.base_branch }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Branding**: ${{ inputs.branding }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Branding URL**: ${{ inputs.branding_url }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Modules**: ${{ inputs.modules }}" >> $GITHUB_STEP_SUMMARY
|
||||
if [ "${{ inputs.operation }}" = "remove" ] && [ "${{ inputs.force_remove }}" = "true" ]; then
|
||||
echo "- **Force Remove**: Yes" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
9
.github/workflows/update-version.yml
vendored
9
.github/workflows/update-version.yml
vendored
@ -3,16 +3,13 @@ name: Update hard-coded version
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
|
||||
update-version:
|
||||
if: >-
|
||||
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
|
||||
contains(github.ref, 'refs/heads/release/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PUSH_TOKEN }}
|
||||
|
||||
@ -25,9 +22,9 @@ jobs:
|
||||
run: echo "${{ env.version }}" > version
|
||||
|
||||
- name: Commit & push changes
|
||||
uses: EndBug/add-and-commit@v8
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
author_name: github-actions[bot]
|
||||
author_email: github-actions[bot]@users.noreply.github.com
|
||||
message: Update hard-coded version to v${{ env.version }}
|
||||
message: Update hard-coded version to ${{ env.version }}
|
||||
add: version
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@ -7,3 +7,11 @@ config
|
||||
*.*~
|
||||
**~
|
||||
*.DS_Store
|
||||
.idea
|
||||
scripts/license_checker/reports
|
||||
tests/puppeteer/node_modules
|
||||
tests/puppeteer/work_directory
|
||||
tests/puppeteer/package.json
|
||||
tests/puppeteer/package-lock.json
|
||||
scripts/sdkjs_common/jsdoc/node_modules
|
||||
scripts/sdkjs_common/jsdoc/package-lock.json
|
||||
|
||||
5
.markdownlint.jsonc
Normal file
5
.markdownlint.jsonc
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"line-length": {
|
||||
"code_block_line_length": 300
|
||||
}
|
||||
}
|
||||
44
Dockerfile
44
Dockerfile
@ -1,15 +1,45 @@
|
||||
FROM ubuntu:16.04
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
RUN echo 'keyboard-configuration keyboard-configuration/layoutcode string us' | debconf-set-selections && \
|
||||
echo 'keyboard-configuration keyboard-configuration/modelcode string pc105' | debconf-set-selections
|
||||
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y install python \
|
||||
python3 \
|
||||
sudo
|
||||
RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python
|
||||
apt-get -y install sudo \
|
||||
git \
|
||||
git-lfs \
|
||||
curl \
|
||||
wget \
|
||||
p7zip-full
|
||||
|
||||
ADD . /build_tools
|
||||
WORKDIR /build_tools
|
||||
|
||||
CMD cd tools/linux && \
|
||||
python3 ./automate.py
|
||||
# Install local Python
|
||||
RUN cd tools/linux && \
|
||||
./python.sh
|
||||
|
||||
# Fetch Qt binaries
|
||||
RUN cd tools/linux && \
|
||||
./python3/bin/python3 ./qt_binary_fetch.py amd64
|
||||
|
||||
# Install system dependencies
|
||||
RUN cd tools/linux && \
|
||||
./python3/bin/python3 ./deps.py
|
||||
|
||||
# Install CMake
|
||||
RUN cd tools/linux && \
|
||||
./cmake.sh
|
||||
|
||||
# Fetch sysroot
|
||||
RUN cd tools/linux/sysroot && \
|
||||
../python3/bin/python3 ./fetch.py amd64
|
||||
|
||||
ARG BRANCH=master
|
||||
ENV BRANCH=${BRANCH}
|
||||
|
||||
CMD ["sh", "-c", "./tools/linux/python3/bin/python3 ./configure.py --sysroot \"1\" --clean \"0\" --update-light \"1\" --branch \"${BRANCH}\" --update \"1\" --module \"desktop server builder\" --qt-dir \"$(pwd)/tools/linux/qt_build/Qt-5.9.9\" && ./tools/linux/python3/bin/python3 ./make.py"]
|
||||
385
README.md
385
README.md
@ -1,222 +1,218 @@
|
||||
# build_tools
|
||||
<h1>ONLYOFFICE Build Tools</h1>
|
||||
|
||||
## Overview
|
||||
Welcome to the ```build_tools``` repository! This powerful toolkit simplifies the process of compiling [ONLYOFFICE](https://github.com/ONLYOFFICE) products from source on Linux.
|
||||
|
||||
**build_tools** allow you to automatically get and install all the components
|
||||
necessary for the compilation process, all the dependencies required for the
|
||||
**ONLYOFFICE Document Server**, **Document Builder** and **Desktop Editors**
|
||||
correct work, as well as to get the latest version of
|
||||
**ONLYOFFICE products** source code and build all their components.
|
||||
It automatically fetches all the required dependencies and source code to build the latest versions of:
|
||||
|
||||
**Important!** We can only guarantee the correct work of the products built from
|
||||
the `master` branch.
|
||||
* [Docs (Document Server)](https://www.onlyoffice.com/docs?utm_source=github&utm_medium=cpc&utm_campaign=GitHubBuildTools)
|
||||
* [Desktop Editors](https://www.onlyoffice.com/desktop?utm_source=github&utm_medium=cpc&utm_campaign=GitHubBuildTools)
|
||||
* [Document Builder](https://www.onlyoffice.com/document-builder?utm_source=github&utm_medium=cpc&utm_campaign=GitHubBuildTools)
|
||||
|
||||
## How to use - Linux
|
||||
**A quick note:** For the most stable and reliable builds, we strongly recommend compiling from the ```master``` branch of this repository.
|
||||
|
||||
**Note**: The solution has been tested on **Ubuntu 16.04**.
|
||||
## **How do I use it on Linux? 🐧**
|
||||
|
||||
### Installing dependencies
|
||||
>This guide has been tested and verified on **Ubuntu 16.04**.
|
||||
|
||||
You might need to install **Python**, depending on your version of Ubuntu:
|
||||
### **Step 1: Install dependencies**
|
||||
|
||||
First, let's make sure you have **Python** installed, as it's needed to run the build scripts.
|
||||
|
||||
```bash
|
||||
sudo apt-get install -y python
|
||||
```
|
||||
|
||||
### Building ONLYOFFICE products source code
|
||||
### **Step 2: Build the source code**
|
||||
|
||||
1. Clone the build_tools repository:
|
||||
Now, you're ready to build the ONLYOFFICE products.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
1. **Clone the build_tools repository:**
|
||||
|
||||
This command downloads the build tools to your machine using Git:
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
```
|
||||
|
||||
2. **Navigate to the scripts directory:**
|
||||
```bash
|
||||
cd build_tools/tools/linux
|
||||
```
|
||||
3. **Run the automation script:**
|
||||
|
||||
This is where the magic happens! Running the script without any options will build all three products: Document Server, Document Builder, and Desktop Editors.
|
||||
|
||||
```bash
|
||||
./automate.py
|
||||
```
|
||||
You can also build ONLYOFFICE products separately. Just run the script with the parameter corresponding to the necessary product. For example, to build *Desktop Editors* and *Document Server*
|
||||
```bash
|
||||
./automate.py desktop server
|
||||
```
|
||||
|
||||
**Perfect!** Once the script finishes, you will find the compiled products in the ```./out``` directory.
|
||||
|
||||
## **Advanced options & different workflows 🚀**
|
||||
|
||||
### **How to use Docker**
|
||||
|
||||
If you prefer using Docker, you can build all products inside a container. This is a great way to keep your local system clean.
|
||||
|
||||
1. **Create an output directory:**
|
||||
|
||||
```bash
|
||||
mkdir out
|
||||
```
|
||||
|
||||
2. Go to the `build_tools/tools/linux` directory:
|
||||
2. **Build the Docker image:**
|
||||
|
||||
```bash
|
||||
cd build_tools/tools/linux
|
||||
```bash
|
||||
docker build --tag onlyoffice-document-editors-builder .
|
||||
```
|
||||
|
||||
3. **Run the container to start the build:**
|
||||
|
||||
This command mounts your local out directory into the container, so the final build files will appear on your machine.
|
||||
|
||||
```bash
|
||||
docker run -v $PWD/out:/build_tools/out onlyoffice-document-editors-builder
|
||||
```
|
||||
|
||||
3. Run the `automate.py` script:
|
||||
You've done it! The results will be in the ```./out``` directory you created.
|
||||
|
||||
```bash
|
||||
./automate.py
|
||||
```
|
||||
## **How to build and run the products separately ▶️**
|
||||
|
||||
If you run the script without any parameters this allows to build **ONLYOFFICE
|
||||
Document Server**, **Document Builder** and **Desktop Editors**.
|
||||
Don't need everything? You can save time by building only the products you need. Just add the product name as an argument to the script.
|
||||
|
||||
The result will be available in the `./out` directory.
|
||||
### Need just the [Document Builder](https://github.com/ONLYOFFICE/DocumentBuilder)❓
|
||||
* How to build
|
||||
|
||||
To build **ONLYOFFICE** products separately run the script with the parameter
|
||||
corresponding to the necessary product.
|
||||
```bash
|
||||
./automate.py builder
|
||||
```
|
||||
* How to run
|
||||
```bash
|
||||
cd ../../out/linux_64/onlyoffice/documentbuilder
|
||||
./docbuilder
|
||||
```
|
||||
|
||||
It’s also possible to build several products at once as shown in the example
|
||||
below.
|
||||
### Need just the [Desktop Editors](https://github.com/ONLYOFFICE/DesktopEditors)❓
|
||||
|
||||
**Example**: Building **Desktop Editors** and **Document Server**
|
||||
* How to build
|
||||
```bash
|
||||
./automate.py desktop
|
||||
```
|
||||
* How to run
|
||||
```bash
|
||||
cd ../../out/linux_64/onlyoffice/desktopeditors
|
||||
LD_LIBRARY_PATH=./ ./DesktopEditors
|
||||
```
|
||||
|
||||
### Need just the [Docs (Document Server)](https://github.com/ONLYOFFICE/DocumentServer)❓
|
||||
* How to build
|
||||
```bash
|
||||
./automate.py server
|
||||
```
|
||||
* How to run
|
||||
|
||||
Running the Document Server is a multi-step process because it relies on a few background services. Let's break it down step by step.
|
||||
|
||||
#### **Step 1. Set up dependencies**
|
||||
|
||||
The Document Server needs a few things to run correctly:
|
||||
|
||||
* **NGINX**: Acts as a web server to handle requests.
|
||||
* **PostgreSQL**: Used as the database to store information.
|
||||
* **RabbitMQ**: A message broker that helps different parts of the server communicate.
|
||||
|
||||
Here are the commands to install and configure them.
|
||||
|
||||
#### **Install and configure NGINX**
|
||||
|
||||
1. Install NGINX
|
||||
```bash
|
||||
sudo apt-get install nginx
|
||||
```
|
||||
2. Disable the default NGINX site
|
||||
```bash
|
||||
sudo rm -f /etc/nginx/sites-enabled/default
|
||||
```
|
||||
3. Set up the new website. To do that create the ```/etc/nginx/sites-available/onlyoffice-documentserver``` file with the following contents:
|
||||
|
||||
```bash
|
||||
./automate.py desktop server
|
||||
map $http_host $this_host {
|
||||
"" $host;
|
||||
default $http_host;
|
||||
}
|
||||
map $http_x_forwarded_proto $the_scheme {
|
||||
default $http_x_forwarded_proto;
|
||||
"" $scheme;
|
||||
}
|
||||
map $http_x_forwarded_host $the_host {
|
||||
default $http_x_forwarded_host;
|
||||
"" $this_host;
|
||||
}
|
||||
map $http_upgrade $proxy_connection {
|
||||
default upgrade;
|
||||
"" close;
|
||||
}
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $proxy_connection;
|
||||
proxy_set_header X-Forwarded-Host $the_host;
|
||||
proxy_set_header X-Forwarded-Proto $the_scheme;
|
||||
server {
|
||||
listen 0.0.0.0:80;
|
||||
listen [::]:80 default_server;
|
||||
server_tokens off;
|
||||
rewrite ^\/OfficeWeb(\/apps\/.*)$ /web-apps$1 redirect;
|
||||
location / {
|
||||
proxy_pass http://localhost:8000;
|
||||
proxy_http_version 1.1;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using Docker
|
||||
|
||||
You can also build all **ONLYOFFICE products** at once using Docker.
|
||||
Build the `onlyoffice-document-editors-builder` Docker image using the
|
||||
provided `Dockerfile` and run the corresponding Docker container.
|
||||
|
||||
4. Enable the new site by creating a symbolic link
|
||||
```bash
|
||||
mkdir out
|
||||
docker build --tag onlyoffice-document-editors-builder .
|
||||
docker run -v $PWD/out:/build_tools/out onlyoffice-document-editors-builder
|
||||
sudo ln -s /etc/nginx/sites-available/onlyoffice-documentserver /etc/nginx/sites-enabled/onlyoffice-documentserver
|
||||
```
|
||||
|
||||
The result will be available in the `./out` directory.
|
||||
|
||||
### Building and running ONLYOFFICE products separately
|
||||
|
||||
#### Document Builder
|
||||
|
||||
##### Building Document Builder
|
||||
|
||||
5. Restart NGINX to apply the changes
|
||||
```bash
|
||||
./automate.py builder
|
||||
sudo nginx -s reload
|
||||
```
|
||||
#### **Install and configure PostgreSQL**
|
||||
|
||||
##### Running Document Builder
|
||||
|
||||
```bash
|
||||
cd ../../out/linux_64/onlyoffice/documentbuilder
|
||||
./docbuilder
|
||||
```
|
||||
|
||||
#### Desktop Editors
|
||||
|
||||
##### Building Desktop Editors
|
||||
|
||||
```bash
|
||||
./automate.py desktop
|
||||
```
|
||||
|
||||
##### Running Desktop Editors
|
||||
|
||||
```bash
|
||||
cd ../../out/linux_64/onlyoffice/desktopeditors
|
||||
LD_LIBRARY_PATH=./ ./DesktopEditors
|
||||
```
|
||||
|
||||
#### Document Server
|
||||
|
||||
##### Building Document Server
|
||||
|
||||
```bash
|
||||
./automate.py server
|
||||
```
|
||||
|
||||
##### Installing and configuring Document Server dependencies
|
||||
|
||||
**Document Server** uses **NGINX** as a web server and **PostgreSQL** as a database.
|
||||
**RabbitMQ** is also required for **Document Server** to work correctly.
|
||||
|
||||
###### Installing and configuring NGINX
|
||||
|
||||
1. Install NGINX:
|
||||
|
||||
```bash
|
||||
sudo apt-get install nginx
|
||||
```
|
||||
|
||||
2. Disable the default website:
|
||||
|
||||
```bash
|
||||
sudo rm -f /etc/nginx/sites-enabled/default
|
||||
```
|
||||
|
||||
3. Set up the new website. To do that create the `/etc/nginx/sites-available/onlyoffice-documentserver`
|
||||
file with the following contents:
|
||||
|
||||
```bash
|
||||
map $http_host $this_host {
|
||||
"" $host;
|
||||
default $http_host;
|
||||
}
|
||||
map $http_x_forwarded_proto $the_scheme {
|
||||
default $http_x_forwarded_proto;
|
||||
"" $scheme;
|
||||
}
|
||||
map $http_x_forwarded_host $the_host {
|
||||
default $http_x_forwarded_host;
|
||||
"" $this_host;
|
||||
}
|
||||
map $http_upgrade $proxy_connection {
|
||||
default upgrade;
|
||||
"" close;
|
||||
}
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $proxy_connection;
|
||||
proxy_set_header X-Forwarded-Host $the_host;
|
||||
proxy_set_header X-Forwarded-Proto $the_scheme;
|
||||
server {
|
||||
listen 0.0.0.0:80;
|
||||
listen [::]:80 default_server;
|
||||
server_tokens off;
|
||||
rewrite ^\/OfficeWeb(\/apps\/.*)$ /web-apps$1 redirect;
|
||||
location / {
|
||||
proxy_pass http://localhost:8000;
|
||||
proxy_http_version 1.1;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Add the symlink to the newly created website to the
|
||||
`/etc/nginx/sites-available` directory:
|
||||
|
||||
```bash
|
||||
sudo ln -s /etc/nginx/sites-available/onlyoffice-documentserver /etc/nginx/sites-enabled/onlyoffice-documentserver
|
||||
```
|
||||
|
||||
5. Restart NGINX to apply the changes:
|
||||
|
||||
```bash
|
||||
sudo nginx -s reload
|
||||
```
|
||||
|
||||
###### Installing and configuring PostgreSQL
|
||||
|
||||
1. Install PostgreSQL:
|
||||
|
||||
1. Install PostgreSQL
|
||||
```bash
|
||||
sudo apt-get install postgresql
|
||||
```
|
||||
|
||||
2. Create the PostgreSQL database and user:
|
||||
|
||||
**Note**: The created database must have **onlyoffice** both for user and password.
|
||||
2. Create a database and user.
|
||||
|
||||
**Note**: The user and password must both be **'onlyoffice'.**
|
||||
```bash
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
|
||||
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
|
||||
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
|
||||
```
|
||||
|
||||
3. Configure the database:
|
||||
|
||||
3. Configure the database:
|
||||
```bash
|
||||
psql -hlocalhost -Uonlyoffice -d onlyoffice -f ../../out/linux_64/onlyoffice/documentserver/server/schema/postgresql/createdb.sql
|
||||
```
|
||||
|
||||
**Note**: Upon that, you will be asked to provide a password for the **onlyoffice**
|
||||
PostgreSQL user. Please enter the **onlyoffice** password.
|
||||
|
||||
###### Installing RabbitMQ
|
||||
Upon that, you will be asked to provide a password for the onlyoffice PostgreSQL user. Please enter the **onlyoffice** password.
|
||||
|
||||
#### **Install RabbitMQ**
|
||||
```bash
|
||||
sudo apt-get install rabbitmq-server
|
||||
```
|
||||
|
||||
###### Generate fonts data
|
||||
Now that you have all the dependencies installed, it's time to generate server files.
|
||||
#### **Step 2. Generate server files**
|
||||
|
||||
Before running the server, you need to generate font and theme data.
|
||||
|
||||
##### **Generate fonts data**
|
||||
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/
|
||||
@ -231,8 +227,7 @@ LD_LIBRARY_PATH=${PWD}/server/FileConverter/bin server/tools/allfontsgen \
|
||||
--use-system="true"
|
||||
```
|
||||
|
||||
###### Generate presentation themes
|
||||
|
||||
##### **Generate presentation themes**
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/
|
||||
LD_LIBRARY_PATH=${PWD}/server/FileConverter/bin server/tools/allthemesgen \
|
||||
@ -241,27 +236,39 @@ LD_LIBRARY_PATH=${PWD}/server/FileConverter/bin server/tools/allthemesgen \
|
||||
--output="${PWD}/sdkjs/common/Images"
|
||||
```
|
||||
|
||||
##### Running Document Server
|
||||
#### **Step 3. Run the Document Server services**
|
||||
|
||||
**Note**: All **Document Server** components run as foreground processes. Thus
|
||||
you need separate terminal consoles to run them or specific tools which will
|
||||
allow to run foreground processes in background mode.
|
||||
All Document Server components run as foreground processes. Thus you need separate terminal consoles to run them or specific tools which will allow to run foreground processes in background mode.
|
||||
|
||||
1. Start the **FileConverter** service:
|
||||
* **Start the FileConverter service:**
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/FileConverter
|
||||
LD_LIBRARY_PATH=$PWD/bin \
|
||||
NODE_ENV=development-linux \
|
||||
NODE_CONFIG_DIR=$PWD/../Common/config \
|
||||
./converter
|
||||
```
|
||||
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/FileConverter
|
||||
LD_LIBRARY_PATH=$PWD/bin \
|
||||
NODE_ENV=development-linux \
|
||||
NODE_CONFIG_DIR=$PWD/../Common/config \
|
||||
./converter
|
||||
```
|
||||
* **Start the DocService service:**
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/DocService
|
||||
NODE_ENV=development-linux \
|
||||
NODE_CONFIG_DIR=$PWD/../Common/config \
|
||||
./docservice
|
||||
```
|
||||
|
||||
2. Start the **DocService** service:
|
||||
## And it's a wrap! 🎉
|
||||
Congratulations! You have successfully used the ```build_tools``` to compile your desired ONLYOFFICE products from the latest source code.
|
||||
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/DocService
|
||||
NODE_ENV=development-linux \
|
||||
NODE_CONFIG_DIR=$PWD/../Common/config \
|
||||
./docservice
|
||||
```
|
||||
Everything is now set up. You can go ahead and run your brand-new, self-compiled ONLYOFFICE applications.
|
||||
|
||||
## Need help or have an idea? 💡
|
||||
|
||||
* **🐞 Found a bug?** Please report it by creating an [issue](https://github.com/ONLYOFFICE/build_tools/issues).
|
||||
* **❓ Have a question?** Ask our community and developers on the [ONLYOFFICE Forum](https://community.onlyoffice.com).
|
||||
* **💡 Want to suggest a feature?** Share your ideas on our [feedback platform](https://feedback.onlyoffice.com/forums/966080-your-voice-matters).
|
||||
* **🧑💻 Need help for developers?** Check our [API documentation](https://api.onlyoffice.com/?utm_source=github&utm_medium=cpc&utm_campaign=GitHubBuildTools).
|
||||
|
||||
---
|
||||
|
||||
<p align="center"> Made with ❤️ by the ONLYOFFICE Team </p>
|
||||
|
||||
119
build.pro
119
build.pro
@ -1,119 +0,0 @@
|
||||
TEMPLATE = subdirs
|
||||
|
||||
ROOT_DIR=$$PWD/..
|
||||
DEPLOY_DIR=$$PWD/deploy
|
||||
CORE_ROOT_DIR=$$ROOT_DIR/core
|
||||
|
||||
include($$PWD/common.pri)
|
||||
|
||||
CONFIG += ordered
|
||||
|
||||
core_windows {
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_linux {
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_mac {
|
||||
CONFIG += no_desktop_apps
|
||||
}
|
||||
core_ios {
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
core_android {
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
|
||||
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
|
||||
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
|
||||
cryptopp)
|
||||
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
|
||||
unicodeconverter)
|
||||
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(pdfwriter, $$CORE_ROOT_DIR/PdfWriter/PdfWriter.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
|
||||
kernel unicodeconverter graphics pdfwriter)
|
||||
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
|
||||
kernel unicodeconverter graphics pdfwriter)
|
||||
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
|
||||
kernel unicodeconverter graphics pdfwriter)
|
||||
addSubProject(pdfreader, $$CORE_ROOT_DIR/PdfReader/PdfReader.pro,\
|
||||
kernel unicodeconverter graphics pdfwriter htmlrenderer)
|
||||
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
|
||||
kernel unicodeconverter graphics network)
|
||||
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
|
||||
kernel unicodeconverter graphics htmlfile2)
|
||||
!no_x2t {
|
||||
addSubProject(docxformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/DocxFormatLib.pro)
|
||||
addSubProject(pptxformat, $$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/PPTXFormatLib.pro)
|
||||
addSubProject(docxfile, $$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/ASCOfficeDocxFile2Lib.pro)
|
||||
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/TxtXmlFormatLib.pro)
|
||||
addSubProject(rtfformat, $$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/RtfFormatLib.pro)
|
||||
addSubProject(pptformat, $$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/PPTFormatLib.pro)
|
||||
addSubProject(docformat, $$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/DocFormatLib.pro)
|
||||
addSubProject(odffilereader,$$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/OdfFileReaderLib.pro)
|
||||
addSubProject(odffilewriter,$$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/OdfFileWriterLib.pro)
|
||||
addSubProject(xlsformat, $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/XlsFormatLib.pro)
|
||||
addSubProject(xlsbformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/XlsbFormatLib.pro)
|
||||
addSubProject(vbaformat, $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/VbaFormatLib.pro)
|
||||
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
|
||||
docxformat pptxformat docxfile txtxmlformat rtfformat pptformat docformat odffilereader odffilewriter xlsformat xlsbformat fb2file epubfile docxrenderer)
|
||||
}
|
||||
!no_use_common_binary {
|
||||
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
|
||||
kernel unicodeconverter graphics doctrenderer)
|
||||
}
|
||||
!no_tests {
|
||||
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
|
||||
}
|
||||
|
||||
core_and_multimedia {
|
||||
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
}
|
||||
desktop {
|
||||
message(desktop)
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
|
||||
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdfwriter pdfreader djvufile xpsfile)
|
||||
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
|
||||
documentscore)
|
||||
!core_mac {
|
||||
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
|
||||
documentscore)
|
||||
}
|
||||
|
||||
!no_desktop_apps {
|
||||
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
|
||||
documentscore videoplayer)
|
||||
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
|
||||
documentscore videoplayer)
|
||||
}
|
||||
}
|
||||
|
||||
mobile {
|
||||
message(mobile)
|
||||
!desktop {
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
}
|
||||
}
|
||||
@ -14,19 +14,20 @@ parser.add_option("--clean", action="store", type="string", dest="clean", defaul
|
||||
parser.add_option("--module", action="store", type="string", dest="module", default="builder", help="defines what modules to build. You can specify several of them, e.g. --module 'core desktop builder server mobile'")
|
||||
parser.add_option("--develop", action="store", type="string", dest="develop", default="0", help="defines develop mode")
|
||||
parser.add_option("--beta", action="store", type="string", dest="beta", default="0", help="defines beta mode")
|
||||
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]")
|
||||
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'win_arm64', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]")
|
||||
parser.add_option("--config", action="store", type="string", dest="config", default="", help="provides ability to specify additional parameters for qmake")
|
||||
parser.add_option("--qt-dir", action="store", type="string", dest="qt-dir", default="", help="defines qmake directory path. qmake can be found in qt-dir/compiler/bin directory")
|
||||
parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp", default="", help="defines qmake directory path for Windows XP. qmake can be found in 'qt-dir/compiler/bin directory")
|
||||
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
|
||||
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
|
||||
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
|
||||
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
|
||||
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
|
||||
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
|
||||
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
|
||||
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
|
||||
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
|
||||
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
|
||||
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
|
||||
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")
|
||||
@ -41,6 +42,8 @@ parser.add_option("--vs-version", action="store", type="string", dest="vs-versio
|
||||
parser.add_option("--vs-path", action="store", type="string", dest="vs-path", default="", help="path to vcvarsall")
|
||||
parser.add_option("--siteUrl", action="store", type="string", dest="siteUrl", default="127.0.0.1", help="site url")
|
||||
parser.add_option("--multiprocess", action="store", type="string", dest="multiprocess", default="1", help="provides ability to specify single process for make")
|
||||
parser.add_option("--sysroot", action="store", type="string", dest="sysroot", default="0", help="provides ability to use sysroot (ubuntu 16.04) to build c++ code. If value is \"1\", then the sysroot from tools/linux/sysroot will be used, and if it is not there, it will download it and unpack it. You can also set value as the path to the your own sysroot (rarely used). Only for linux")
|
||||
parser.add_option("--qemu-win-arm64-dir", action="store", type="string", dest="qemu-win-arm64-dir", default="", help="dir to qemu virtual machine for win_arm64 cross build. It should contains start.bat. More info in tools/win/qemu.")
|
||||
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
configOptions = vars(options)
|
||||
|
||||
4
defaults
4
defaults
@ -1,3 +1,3 @@
|
||||
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode"
|
||||
sdkjs-plugin-server="speech, zotero, mendeley"
|
||||
sdkjs-plugin="ai, photoeditor, ocr, translator, thesaurus, youtube, highlightcode"
|
||||
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition"
|
||||
sdkjs-addons="sdkjs-forms"
|
||||
|
||||
@ -1,11 +1,19 @@
|
||||
FROM onlyoffice/documentserver:latest
|
||||
RUN apt-get update -y && \
|
||||
apt-get install git -y \
|
||||
python -y \
|
||||
python3 -y \
|
||||
openjdk-11-jdk -y \
|
||||
bzip2 -y \
|
||||
npm -y && \
|
||||
npm install -g grunt-cli -y && \
|
||||
npm install -g grunt grunt-cli -y && \
|
||||
ln -s /usr/bin/python3 /usr/bin/python && \
|
||||
ln -s /usr/bin/pip3 /usr/bin/pip && \
|
||||
git clone --depth 1 https://github.com/ONLYOFFICE/build_tools.git var/www/onlyoffice/documentserver/build_tools && \
|
||||
sed -i '/documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}/d' /app/ds/run-document-server.sh && \
|
||||
#Set Up Debug Logging
|
||||
sed -i 's/WARN/ALL/g' /etc/onlyoffice/documentserver/log4js/production.json && \
|
||||
#Start test example
|
||||
if [ -s /etc/supervisor/conf.d/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /etc/supervisor/conf.d/ds-example.conf; fi && \
|
||||
if [ -s /app/ds/setup/config/supervisor/ds/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /app/ds/setup/config/supervisor/ds/ds-example.conf; fi && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
ENTRYPOINT python /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver && /bin/sh -c /app/ds/run-document-server.sh
|
||||
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver $@ && /bin/sh -c /app/ds/run-document-server.sh
|
||||
|
||||
@ -1,50 +1,231 @@
|
||||
# Docker
|
||||
|
||||
This directory containing instruction for developers,
|
||||
who want to change something in sdkjs or web-apps module,
|
||||
but don't want to compile pretty compilcated core product to make those changes.
|
||||
This directory contains instructions for developers,
|
||||
who want to change something in sdkjs, web-apps, or the server module,
|
||||
but don't want to compile the complicated core product to make those changes.
|
||||
|
||||
## Installing ONLYOFFICE Docs
|
||||
## System requirements
|
||||
|
||||
## How to use - Linux or macOS
|
||||
### Windows
|
||||
|
||||
**Note**: You need the latest Docker version installed.
|
||||
You need the latest
|
||||
[Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/)
|
||||
installed.
|
||||
|
||||
You might need to pull **onlyoffice/documentserver** image:
|
||||
**Note**: Docker Desktop does not start automatically after installation.
|
||||
You should manually start the **Docker Desktop** application.
|
||||
|
||||
**Note**: Do not prefix docker command with sudo.
|
||||
[This](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
|
||||
instruction show how to use docker without sudo.
|
||||
**Note**: If you have problems running Docker Desktop with the
|
||||
"Use WSL 2 instead of Hyper-V" installation option,
|
||||
try reinstalling it without this option.
|
||||
|
||||
```bash
|
||||
docker pull onlyoffice/documentserver
|
||||
```
|
||||
### Linux or macOS
|
||||
|
||||
### Create develop image
|
||||
You need the latest
|
||||
[Docker](https://docs.docker.com/engine/install/)
|
||||
version installed.
|
||||
|
||||
To create a image with the ability to include external non-minified sdkjs code,
|
||||
use the following command:
|
||||
## Create Development Docker Image
|
||||
|
||||
To create an image with the ability to include external non-minified sdkjs code,
|
||||
use the following commands:
|
||||
|
||||
### Clone development environment to the working directory
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
```
|
||||
|
||||
### Build Docker Image
|
||||
|
||||
**Note**: Do not prefix the docker command with sudo.
|
||||
[These instructions](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
|
||||
show how to use docker without sudo.
|
||||
|
||||
```bash
|
||||
cd build_tools/develop
|
||||
docker build -t documentserver-develop .
|
||||
docker pull onlyoffice/documentserver
|
||||
docker build --no-cache -t documentserver-develop .
|
||||
```
|
||||
|
||||
**Note**: The dot at the end is required.
|
||||
|
||||
### Connecting external folders
|
||||
**Note**: Sometimes the build may fail due to network errors. Just restart it.
|
||||
|
||||
To connect external folders to the container,
|
||||
you need to pass the "-v" parameter
|
||||
along with the relative paths to the required folders.
|
||||
The folders `sdkjs` and `web-apps` are required for proper development workflow
|
||||
## Clone development modules
|
||||
|
||||
Clone development modules to the working directory.
|
||||
|
||||
* `sdkjs` repo is located [here](https://github.com/ONLYOFFICE/sdkjs/)
|
||||
* `web-apps` repo is located [here](https://github.com/ONLYOFFICE/web-apps/)
|
||||
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
|
||||
|
||||
```bash
|
||||
docker run -i -t -d -p 80:80 --restart=always \
|
||||
-v /host-dir/sdkjs:/var/www/onlyoffice/documentserver/sdkjs \
|
||||
-v /host-dir/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
git clone https://github.com/ONLYOFFICE/sdkjs.git
|
||||
git clone https://github.com/ONLYOFFICE/web-apps.git
|
||||
git clone https://github.com/ONLYOFFICE/server.git
|
||||
```
|
||||
|
||||
## Start server with external folders
|
||||
|
||||
To mount external folders to the container,
|
||||
you need to pass the "-v" parameter
|
||||
along with the relative paths to the required folders.
|
||||
The folders `sdkjs` and `web-apps` are required for proper development workflow.
|
||||
The folder `server` is optional.
|
||||
|
||||
**Note**: Run the command with the current working directory
|
||||
containing `sdkjs`, `web-apps`...
|
||||
|
||||
**Note**: ONLYOFFICE server uses port 80.
|
||||
Look for another application using port 80 and stop it.
|
||||
|
||||
**Note**: Starting the server with `sdkjs` and `web-apps` takes 15 minutes,
|
||||
or 20 minutes with `server`.
|
||||
|
||||
### docker run on Windows (PowerShell)
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
node_modules.
|
||||
|
||||
Run with `sdkjs` and `web-apps`
|
||||
|
||||
```powershell
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
```
|
||||
|
||||
Or run with `sdkjs`, `web-apps`, and `server`
|
||||
|
||||
```powershell
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $pwd/server:/var/www/onlyoffice/documentserver/server documentserver-develop
|
||||
```
|
||||
|
||||
**Note**: If using Git Bash instead of PowerShell, you may need to quote the paths:
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v "$(pwd)/sdkjs":/var/www/onlyoffice/documentserver/sdkjs -v "$(pwd)/web-apps":/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
```
|
||||
|
||||
### docker run on Linux or macOS
|
||||
|
||||
Run with `sdkjs` and `web-apps`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
```
|
||||
|
||||
Or run with `sdkjs`, `web-apps`, and `server`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $(pwd)/server:/var/www/onlyoffice/documentserver/server documentserver-develop
|
||||
```
|
||||
|
||||
## Open editor
|
||||
|
||||
After the server starts successfully, you will see Docker log messages like this.
|
||||
|
||||
```text
|
||||
[Date] [WARN] [localhost] [docId] [userId] nodeJS
|
||||
```
|
||||
|
||||
To try the document editor, open a browser tab and type
|
||||
[http://localhost/example](http://localhost/example) into the URL bar.
|
||||
|
||||
**Note**: Disable **ad blockers** for the localhost page.
|
||||
They may block some scripts (like Analytics.js).
|
||||
|
||||
## Modify sources
|
||||
|
||||
### To change something in `sdkjs`, do the following steps
|
||||
|
||||
1) Edit the source file. Let's insert an image URL into each open document.
|
||||
The following command inserts (in case of problems, you can replace the URL)
|
||||
`this.AddImageUrl(['http://localhost/example/images/logo.png']);`
|
||||
after event
|
||||
`this.sendEvent('asc_onDocumentContentReady');`
|
||||
in file
|
||||
`sdkjs/common/apiBase.js`
|
||||
|
||||
**Windows (PowerShell):**
|
||||
```powershell
|
||||
(Get-Content sdkjs/common/apiBase.js) -replace "this\.sendEvent\('asc_onDocumentContentReady'\);", "this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);" | Set-Content sdkjs/common/apiBase.js
|
||||
```
|
||||
|
||||
**Linux:**
|
||||
```bash
|
||||
sed -i "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
|
||||
```
|
||||
|
||||
**macOS:**
|
||||
```bash
|
||||
sed -i '' "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
|
||||
```
|
||||
|
||||
2) Clear the browser cache or hard reload the page (`Ctrl + Shift + R` or `Cmd + Shift + R` on macOS)
|
||||
|
||||
3) Open a new file in the browser
|
||||
|
||||
### To change something in `server`, do the following steps
|
||||
|
||||
1) Edit the source file. Let's send a `"Hello World!"`
|
||||
chat message every time a document is opened.
|
||||
The following command inserts
|
||||
`yield* onMessage(ctx, conn, {"message": "Hello World!"});`
|
||||
in function
|
||||
`sendAuthInfo`
|
||||
in file
|
||||
`server/DocService/sources/DocsCoServer.js`
|
||||
|
||||
**Windows (PowerShell):**
|
||||
```powershell
|
||||
(Get-Content server/DocService/sources/DocsCoServer.js) -replace 'opt_hasForgotten, opt_openedAt\) \{', 'opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});' | Set-Content server/DocService/sources/DocsCoServer.js
|
||||
```
|
||||
|
||||
**Linux:**
|
||||
```bash
|
||||
sed -i 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
|
||||
```
|
||||
|
||||
**macOS:**
|
||||
```bash
|
||||
sed -i '' 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
|
||||
```
|
||||
|
||||
2) Restart the document server process
|
||||
|
||||
**Note**: Look for `CONTAINER_ID` in the result of `docker ps`.
|
||||
|
||||
```bash
|
||||
docker exec -it CONTAINER_ID supervisorctl restart all
|
||||
```
|
||||
|
||||
3) Open a new file in the browser
|
||||
|
||||
## Start server with additional functionality (addons)
|
||||
|
||||
To get additional functionality and branding, you need to connect a branding folder,
|
||||
additional addon folders, and pass command line arguments.
|
||||
|
||||
For example, run with `onlyoffice` branding and
|
||||
addons: `sdkjs-forms`, `sdkjs-ooxml`, `web-apps-mobile`.
|
||||
|
||||
### docker run on Windows (PowerShell) with branding
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
node_modules.
|
||||
|
||||
```powershell
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true `
|
||||
-v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps `
|
||||
-v $pwd/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $pwd/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $pwd/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $pwd/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile `
|
||||
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
|
||||
```
|
||||
|
||||
### docker run on Linux or macOS with branding
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true \
|
||||
-v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps \
|
||||
-v $(pwd)/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $(pwd)/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $(pwd)/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $(pwd)/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile \
|
||||
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
|
||||
```
|
||||
|
||||
@ -1,12 +1,57 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
sys.path.append(sys.argv[1] + '/build_tools/scripts')
|
||||
sys.path.append(sys.argv[1] + '/build_tools/scripts/develop')
|
||||
import build_js
|
||||
import run_server
|
||||
import config
|
||||
import base
|
||||
|
||||
base.cmd_in_dir(sys.argv[1] + '/build_tools/', 'python', ['configure.py'])
|
||||
config.parse()
|
||||
git_dir = sys.argv[1]
|
||||
|
||||
build_js.build_js_develop(sys.argv[1])
|
||||
base.print_info('argv :'+' '.join(sys.argv))
|
||||
base.cmd_in_dir(git_dir + '/build_tools/', 'python3', ['configure.py', '--develop', '1'] + sys.argv[2:])
|
||||
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
|
||||
if base.is_exist(git_dir + "/server/FileConverter/bin/fonts.log"):
|
||||
base.print_info('remove font cache to regenerate fonts in external sdkjs volume')
|
||||
base.delete_file(git_dir + "/server/FileConverter/bin/fonts.log")
|
||||
|
||||
# external server volume
|
||||
if base.is_exist(sys.argv[1] + '/server/DocService/package.json'):
|
||||
base.print_info('replace supervisor cfg to run docservice and converter from source')
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
|
||||
base.print_info('run_server.run_docker_server')
|
||||
run_server.run_docker_server()
|
||||
else:
|
||||
#Fix theme generation for external sdkjs volume
|
||||
if base.is_exist(git_dir + "/server/FileConverter/bin/DoctRenderer.config"):
|
||||
base.print_info('replace DoctRenderer.config for external sdkjs volume')
|
||||
base.generate_doctrenderer_config(git_dir + "/server/FileConverter/bin/DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
|
||||
|
||||
addons = {}
|
||||
addons.update(base.get_sdkjs_addons())
|
||||
addons.update(base.get_web_apps_addons())
|
||||
staticContent = ""
|
||||
for addon in addons:
|
||||
if (addon):
|
||||
staticContent += '"/' + addon + '": {"path": "/var/www/onlyoffice/documentserver/' + addon + '","options": {"maxAge": "7d"}},'
|
||||
|
||||
if staticContent:
|
||||
base.print_info('replace production-linux.json for addons'+staticContent)
|
||||
base.replaceInFileRE("/etc/onlyoffice/documentserver/production-linux.json", '"static_content": {.*', '"static_content": {' + staticContent)
|
||||
|
||||
base.print_info('replace supervisor cfg to run docservice and converter from pkg')
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
|
||||
base.print_info('run_server.run_docker_sdk_web_apps: ' + git_dir)
|
||||
run_server.run_docker_sdk_web_apps(git_dir)
|
||||
|
||||
66
linux-start.sh
Normal file
66
linux-start.sh
Normal file
@ -0,0 +1,66 @@
|
||||
sudo apt-get install git curl wget p7zip-full
|
||||
|
||||
sudo apt-get install git-lfs
|
||||
# for old system (ubuntu 16)
|
||||
#curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash
|
||||
#sudo apt-get install git-lfs
|
||||
|
||||
# save login
|
||||
git config --global credential.helper store
|
||||
|
||||
# clone build_tools
|
||||
git clone https://git.onlyoffice.com/ONLYOFFICE/build_tools.git
|
||||
|
||||
# deps =========================================
|
||||
|
||||
cd ./build_tools/tools/linux
|
||||
|
||||
# python 3.10
|
||||
./python.sh
|
||||
|
||||
# qt
|
||||
#./python3/bin/python3 ./qt_binary_fetch.py amd64
|
||||
#./python3/bin/python3 ./qt_binary_fetch.py arm64
|
||||
./python3/bin/python3 ./qt_binary_fetch.py all
|
||||
|
||||
# deps
|
||||
./python3/bin/python3 ./deps.py
|
||||
|
||||
# cmake 3.30
|
||||
sudo ./cmake.sh
|
||||
|
||||
cd ../../
|
||||
|
||||
# ==============================================
|
||||
|
||||
# sysroots (IF NEEDED) =========================
|
||||
|
||||
cd ./build_tools/tools/linux/sysroot
|
||||
#./python3/bin/python3 ./fetch.py amd64
|
||||
#./python3/bin/python3 ./fetch.py arm64
|
||||
./../python3/bin/python3 ./fetch.py all
|
||||
cd ../../../
|
||||
|
||||
# ==============================================
|
||||
|
||||
|
||||
# configure ====================================
|
||||
|
||||
./tools/linux/python3/bin/python3 ./configure.py --clean "0" --update-light "1" --update "1" --branch "hotfix/v9.2.1" --module "desktop" --qt-dir "$(pwd)/tools/linux/qt_build/Qt-5.9.9"
|
||||
|
||||
# with sysroot: sysroot "1"
|
||||
|
||||
# ==============================================
|
||||
|
||||
# cross build linux_arm64
|
||||
sudo apt install qemu-user qemu-user-static binfmt-support
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
|
||||
# 1) without sysroot
|
||||
#sudo apt install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
|
||||
#sudo dpkg --add-architecture arm64
|
||||
#sudo apt update
|
||||
#... install all dev packages ...
|
||||
|
||||
# 2) official supported: with sysroot
|
||||
./tools/linux/python3/bin/python3 ./configure.py sysroot "1" #...
|
||||
51
make.py
51
make.py
@ -1,22 +1,34 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append('scripts/develop')
|
||||
sys.path.append('scripts/develop/vendor')
|
||||
sys.path.append('scripts/core_common')
|
||||
sys.path.append('scripts/core_common/modules')
|
||||
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.append(__dir__name__ + '/scripts')
|
||||
sys.path.append(__dir__name__ + '/scripts/develop')
|
||||
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
|
||||
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
|
||||
import config
|
||||
import base
|
||||
import build
|
||||
import build_sln
|
||||
import build_js
|
||||
import build_server
|
||||
import deploy
|
||||
import make_common
|
||||
import develop
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="options")
|
||||
parser.add_argument("--build-only-branding", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if (args.build_only_branding):
|
||||
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
base.check_python()
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
@ -33,7 +45,7 @@ if ("1" != base.get_env("OO_RUNNING_BRANDING")) and ("" != config.option("brandi
|
||||
base.cmd("git", ["clone", config.option("branding-url"), branding_dir])
|
||||
|
||||
base.cmd_in_dir(branding_dir, "git", ["fetch"], True)
|
||||
|
||||
|
||||
if not is_exist or ("1" != config.option("update-light")):
|
||||
base.cmd_in_dir(branding_dir, "git", ["checkout", "-f", config.option("branch")], True)
|
||||
|
||||
@ -59,40 +71,29 @@ if ("1" == config.option("update")):
|
||||
base.configure_common_apps()
|
||||
|
||||
# developing...
|
||||
develop.make();
|
||||
develop.make()
|
||||
|
||||
# check only js builds
|
||||
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
|
||||
build_js.make()
|
||||
exit(0)
|
||||
|
||||
#base.check_tools()
|
||||
|
||||
# core 3rdParty
|
||||
make_common.make()
|
||||
|
||||
# build updmodule for desktop (only for windows version)
|
||||
if config.check_option("module", "desktop"):
|
||||
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "-1/apps")
|
||||
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "/apps")
|
||||
|
||||
if "windows" == base.host_platform():
|
||||
config.extend_option("config", "updmodule")
|
||||
config.extend_option("qmake_addon", "LINK=https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.xml")
|
||||
|
||||
if not base.is_file(base_dir + "/tools/WinSparkle-0.7.0.zip"):
|
||||
base.cmd("curl.exe", ["https://d2ettrnqo7v976.cloudfront.net/winsparkle/WinSparkle-0.7.0.zip", "--output", base_dir + "/tools/WinSparkle-0.7.0.zip"])
|
||||
|
||||
if not base.is_dir(base_dir + "/tools/WinSparkle-0.7.0"):
|
||||
base.cmd("7z.exe", ["x", base_dir + "/tools/WinSparkle-0.7.0.zip", "-otools"])
|
||||
|
||||
base.create_dir(base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle")
|
||||
#base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/include", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/include")
|
||||
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_32")
|
||||
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/x64/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_64")
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.set_env("VIDEO_PLAYER_VLC_DIR", base_dir + "/../desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc")
|
||||
base.set_env("DESKTOP_URL_UPDATES_MAIN_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
|
||||
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
|
||||
|
||||
# build
|
||||
build.make()
|
||||
build_sln.make()
|
||||
|
||||
# js
|
||||
build_js.make()
|
||||
|
||||
107
make_package.py
107
make_package.py
@ -2,17 +2,108 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append("scripts")
|
||||
import argparse
|
||||
import package_common as common
|
||||
import package_utils as utils
|
||||
|
||||
# config
|
||||
utils.parse()
|
||||
# parse
|
||||
parser = argparse.ArgumentParser(description="Build packages.")
|
||||
parser.add_argument("-P", "--platform", dest="platform", type=str,
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-V", "--version", dest="version", type=str,
|
||||
action="store", help="Defines version")
|
||||
parser.add_argument("-B", "--build", dest="build", type=str,
|
||||
action="store", help="Defines build")
|
||||
parser.add_argument("-H", "--branch", dest="branch", type=str,
|
||||
action="store", help="Defines branch")
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
args = parser.parse_args()
|
||||
|
||||
# vars
|
||||
common.os_family = utils.host_platform()
|
||||
common.platform = args.platform
|
||||
common.prefix = common.platformPrefixes[common.platform] if common.platform in common.platformPrefixes else ""
|
||||
common.targets = args.targets
|
||||
common.clean = "clean" in args.targets
|
||||
common.sign = "sign" in args.targets
|
||||
common.deploy = "deploy" in args.targets
|
||||
if args.version: common.version = args.version
|
||||
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
|
||||
utils.set_env("PRODUCT_VERSION", common.version)
|
||||
utils.set_env("BUILD_VERSION", common.version)
|
||||
if args.build: common.build = args.build
|
||||
else: common.build = utils.get_env("BUILD_NUMBER", "0")
|
||||
utils.set_env("BUILD_NUMBER", common.build)
|
||||
if args.branch: common.branch = args.branch
|
||||
else: common.branch = utils.get_env("BRANCH_NAME", "null")
|
||||
utils.set_env("BRANCH_NAME", common.branch)
|
||||
common.branding = args.branding
|
||||
common.timestamp = utils.get_timestamp()
|
||||
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
|
||||
common.branding_dir = utils.get_abspath(common.workspace_dir + "/" + args.branding) if args.branding else common.workspace_dir
|
||||
common.summary = []
|
||||
utils.log("os_family: " + common.os_family)
|
||||
utils.log("platform: " + str(common.platform))
|
||||
utils.log("prefix: " + str(common.prefix))
|
||||
utils.log("targets: " + str(common.targets))
|
||||
utils.log("clean: " + str(common.clean))
|
||||
utils.log("sign: " + str(common.sign))
|
||||
utils.log("deploy: " + str(common.deploy))
|
||||
utils.log("version: " + common.version)
|
||||
utils.log("build: " + common.build)
|
||||
utils.log("branding: " + str(common.branding))
|
||||
utils.log("timestamp: " + common.timestamp)
|
||||
utils.log("workspace_dir: " + common.workspace_dir)
|
||||
utils.log("branding_dir: " + common.branding_dir)
|
||||
|
||||
# branding
|
||||
if utils.branding is not None:
|
||||
branding_path = utils.get_path('..', utils.branding)
|
||||
sys.path.insert(-1, utils.get_path(branding_path, 'build_tools/scripts'))
|
||||
if common.branding is not None:
|
||||
sys.path.insert(-1, \
|
||||
utils.get_path("../" + common.branding + "/build_tools/scripts"))
|
||||
|
||||
import package_core
|
||||
import package_desktop
|
||||
import package_server
|
||||
import package_builder
|
||||
import package_mobile
|
||||
|
||||
# build
|
||||
import package
|
||||
package.make(utils.product)
|
||||
utils.set_cwd(common.workspace_dir, verbose=True)
|
||||
if "core" in common.targets:
|
||||
package_core.make()
|
||||
if "closuremaps_sdkjs_opensource" in common.targets:
|
||||
package_core.deploy_closuremaps_sdkjs("opensource")
|
||||
if "closuremaps_sdkjs_commercial" in common.targets:
|
||||
package_core.deploy_closuremaps_sdkjs("commercial")
|
||||
if "closuremaps_webapps" in common.targets:
|
||||
package_core.deploy_closuremaps_webapps("opensource")
|
||||
if "desktop" in common.targets:
|
||||
package_desktop.make()
|
||||
if "builder" in common.targets:
|
||||
package_builder.make()
|
||||
if "server_community" in common.targets:
|
||||
package_server.make("community")
|
||||
if "server_enterprise" in common.targets:
|
||||
package_server.make("enterprise")
|
||||
if "server_developer" in common.targets:
|
||||
package_server.make("developer")
|
||||
if "server_prerequisites" in common.targets:
|
||||
package_server.make("prerequisites")
|
||||
if "mobile" in common.targets:
|
||||
package_mobile.make()
|
||||
|
||||
# summary
|
||||
utils.log_h1("Build summary")
|
||||
exitcode = 0
|
||||
for i in common.summary:
|
||||
if list(i.values())[0]:
|
||||
utils.log("[ OK ] " + list(i.keys())[0])
|
||||
else:
|
||||
utils.log("[FAILED] " + list(i.keys())[0])
|
||||
exitcode = 1
|
||||
|
||||
exit(exitcode)
|
||||
|
||||
1159
scripts/base.py
1159
scripts/base.py
File diff suppressed because it is too large
Load Diff
113
scripts/build.py
113
scripts/build.py
@ -1,113 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import multiprocessing
|
||||
|
||||
def make_pro_file(makefiles_dir, pro_file):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
old_env = dict(os.environ)
|
||||
|
||||
# if you need change output libraries path - set the env variable
|
||||
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
|
||||
|
||||
isAndroid = False if (-1 == platform.find("android")) else True
|
||||
if isAndroid:
|
||||
toolchain_platform = "linux-x86_64"
|
||||
if ("mac" == base.host_platform()):
|
||||
toolchain_platform = "darwin-x86_64"
|
||||
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
|
||||
old_path = base.get_env("PATH")
|
||||
new_path = base.qt_setup(platform) + "/bin:"
|
||||
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
|
||||
new_path += old_path
|
||||
base.set_env("PATH", new_path)
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
|
||||
|
||||
if (-1 != platform.find("ios")):
|
||||
base.hack_xcode_ios()
|
||||
|
||||
# makefile suffix
|
||||
file_suff = platform
|
||||
if (config.check_option("config", "debug")):
|
||||
file_suff += "_debug_"
|
||||
file_suff += config.option("branding")
|
||||
|
||||
# setup qt
|
||||
qt_dir = base.qt_setup(platform)
|
||||
base.set_env("OS_DEPLOY", platform)
|
||||
|
||||
# qmake CONFIG+=...
|
||||
config_param = base.qt_config(platform)
|
||||
|
||||
# qmake ADDON
|
||||
qmake_addon = []
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon = config.option("qmake_addon").split()
|
||||
|
||||
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
continue
|
||||
|
||||
# non windows platform
|
||||
if not base.is_windows():
|
||||
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
|
||||
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
|
||||
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if not base.is_file(pro_file):
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
|
||||
else:
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
|
||||
else:
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("1" == config.option("clean")):
|
||||
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.run_as_bat(qmake_bat)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
|
||||
base.delete_file(".qmake.stash")
|
||||
|
||||
# make build.pro
|
||||
def make():
|
||||
make_pro_file("makefiles", "build.pro")
|
||||
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
|
||||
# check replace
|
||||
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/docbuilder.h")
|
||||
if ("2019" == config.option("vs-version")):
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com", "docbuilder.com_2019.sln")
|
||||
if (True):
|
||||
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
|
||||
base.restorePathForBuilder(new_path_net)
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
return
|
||||
@ -27,89 +27,91 @@ def correct_sdkjs_licence(directory):
|
||||
def make():
|
||||
if ("1" == base.get_env("OO_NO_BUILD_JS")):
|
||||
return
|
||||
if not base.is_need_build_js():
|
||||
return
|
||||
|
||||
base.set_env('NODE_ENV', 'production')
|
||||
|
||||
base_dir = base.get_script_dir() + "/.."
|
||||
out_dir = base_dir + "/out/js/";
|
||||
out_dir = base_dir + "/out/js/"
|
||||
branding = config.option("branding-name")
|
||||
if ("" == branding):
|
||||
branding = "onlyoffice"
|
||||
out_dir += branding
|
||||
base.create_dir(out_dir)
|
||||
|
||||
isOnlyMobile = False
|
||||
if (config.option("module") == "mobile"):
|
||||
isOnlyMobile = True
|
||||
|
||||
# builder
|
||||
build_interface(base_dir + "/../web-apps/build")
|
||||
build_sdk_builder(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/builder")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/builder/web-apps")
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/builder/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/builder/sdkjs")
|
||||
if not isOnlyMobile:
|
||||
base.cmd_in_dir(base_dir + "/../web-apps/translation", "python", ["merge_and_check.py"])
|
||||
build_interface(base_dir + "/../web-apps/build")
|
||||
build_sdk_builder(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/builder")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/builder/web-apps")
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/builder/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/builder/sdkjs")
|
||||
|
||||
# desktop
|
||||
if config.check_option("module", "desktop"):
|
||||
if config.check_option("module", "desktop") and not isOnlyMobile:
|
||||
build_sdk_desktop(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/desktop")
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
|
||||
if not base.is_file(out_dir + "/desktop/sdkjs/common/AllFonts.js"):
|
||||
base.copy_file(base_dir + "/../sdkjs/common/HtmlFileInternal/AllFonts.js", out_dir + "/desktop/sdkjs/common/AllFonts.js")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/mobile")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/mobile")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/embed")
|
||||
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/mobile")
|
||||
|
||||
deldirs = ['ie', 'mobile', 'embed']
|
||||
[base.delete_dir(root + "/" + d) for root, dirs, f in os.walk(out_dir + "/desktop/web-apps/apps") for d in dirs if d in deldirs]
|
||||
|
||||
base.copy_file(base_dir + "/../web-apps/apps/api/documents/index.html.desktop", out_dir + "/desktop/web-apps/apps/api/documents/index.html")
|
||||
|
||||
build_interface(base_dir + "/../desktop-apps/common/loginpage/build")
|
||||
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/index.html", out_dir + "/desktop/index.html")
|
||||
|
||||
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/noconnect.html", out_dir + "/desktop/noconnect.html")
|
||||
|
||||
# mobile
|
||||
if config.check_option("module", "mobile"):
|
||||
build_sdk_native(base_dir + "/../sdkjs/build", False)
|
||||
build_sdk_native(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/mobile")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs")
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_word.js")
|
||||
|
||||
prefix_js = [
|
||||
vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs-native/common/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"
|
||||
]
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/cell/native/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_cell.js")
|
||||
postfix_js = [
|
||||
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
|
||||
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
|
||||
]
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_slide.js")
|
||||
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
|
||||
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/word")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/cell")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/slide")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_word.js")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_cell.js")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_slide.js")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
|
||||
return
|
||||
|
||||
# JS build
|
||||
def _run_npm(directory):
|
||||
return base.cmd_in_dir(directory, "npm", ["install"])
|
||||
retValue = base.cmd_in_dir(directory, "npm", ["install"], True)
|
||||
if (0 != retValue):
|
||||
retValue = base.cmd_in_dir(directory, "npm", ["install", "--verbose"])
|
||||
return retValue
|
||||
|
||||
def _run_npm_ci(directory):
|
||||
return base.cmd_in_dir(directory, "npm", ["ci"])
|
||||
|
||||
def _run_npm_cli(directory):
|
||||
return base.cmd_in_dir(directory, "npm", ["install", "-g", "grunt-cli"])
|
||||
@ -119,7 +121,7 @@ def _run_grunt(directory, params=[]):
|
||||
|
||||
def build_interface(directory):
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, ["--force"] + base.web_apps_addons_param())
|
||||
_run_grunt(directory, ["--force", "--verbose"] + base.web_apps_addons_param())
|
||||
return
|
||||
|
||||
def get_build_param(minimize=True):
|
||||
@ -139,33 +141,47 @@ def build_sdk_desktop(directory):
|
||||
def build_sdk_builder(directory):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param())
|
||||
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param() + ["--map"])
|
||||
return
|
||||
|
||||
def build_sdk_native(directory, minimize=True):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
|
||||
addons = base.sdkjs_addons_param()
|
||||
if not config.check_option("sdkjs-addons", "sdkjs-native"):
|
||||
addons.append("--addon=sdkjs-native")
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
|
||||
return
|
||||
|
||||
|
||||
def build_sdkjs_develop(root_dir):
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
|
||||
|
||||
def build_js_develop(root_dir):
|
||||
#_run_npm_cli(root_dir + "/sdkjs/build")
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
build_sdkjs_develop(root_dir)
|
||||
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build")
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build/sprites")
|
||||
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
|
||||
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
|
||||
base.cmd_in_dir(root_dir + external_folder + "/web-apps/translation", "python", ["merge_and_check.py"])
|
||||
|
||||
old_cur = os.getcwd()
|
||||
old_product_version = base.get_env("PRODUCT_VERSION")
|
||||
base.set_env("PRODUCT_VERSION", old_product_version + "d")
|
||||
os.chdir(root_dir + external_folder + "/web-apps/vendor/framework7-react")
|
||||
base.cmd("npm", ["install"])
|
||||
base.cmd("npm", ["ci"])
|
||||
base.cmd("npm", ["run", "deploy-word"])
|
||||
base.cmd("npm", ["run", "deploy-cell"])
|
||||
base.cmd("npm", ["run", "deploy-slide"])
|
||||
|
||||
@ -14,6 +14,9 @@ parser.add_option("--output",
|
||||
parser.add_option("--write-version",
|
||||
action="store_true", dest="write_version", default=False,
|
||||
help="Create version file of build")
|
||||
parser.add_option("--minimize",
|
||||
action="store", type="string", dest="minimize", default="0",
|
||||
help="Is minimized version")
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
|
||||
def write_version_files(output_dir):
|
||||
@ -31,7 +34,12 @@ def write_version_files(output_dir):
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.extend_option("jsminimize", "0")
|
||||
config.parse_defaults()
|
||||
|
||||
isMinimize = False
|
||||
if ("1" == options.minimize or "true" == options.minimize):
|
||||
isMinimize = True
|
||||
config.set_option("jsminimize", "disable")
|
||||
|
||||
branding = config.option("branding-name")
|
||||
if ("" == branding):
|
||||
@ -45,41 +53,32 @@ if (options.output):
|
||||
|
||||
base.create_dir(out_dir)
|
||||
|
||||
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
|
||||
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_word.js")
|
||||
prefix_js = [
|
||||
vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs-native/common/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"
|
||||
]
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/cell/native/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_cell.js")
|
||||
postfix_js = [
|
||||
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
|
||||
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
|
||||
]
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_slide.js")
|
||||
base.join_scripts(prefix_js, out_dir + "/banners.js")
|
||||
|
||||
base.create_dir(out_dir + "/word")
|
||||
base.join_scripts([out_dir + "/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
|
||||
base.create_dir(out_dir + "/cell")
|
||||
base.join_scripts([out_dir + "/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
|
||||
base.create_dir(out_dir + "/slide")
|
||||
base.join_scripts([out_dir + "/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
|
||||
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/banners_word.js")
|
||||
base.delete_file(out_dir + "/banners_cell.js")
|
||||
base.delete_file(out_dir + "/banners_slide.js")
|
||||
base.delete_file(out_dir + "/banners.js")
|
||||
|
||||
# Write sdk version mark file if needed
|
||||
if (options.write_version):
|
||||
|
||||
@ -11,13 +11,13 @@ def make():
|
||||
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
server_dir = base.get_script_dir() + "/../../server"
|
||||
server_admin_panel_dir = base.get_script_dir() + "/../../server-admin-panel"
|
||||
branding_dir = server_dir + "/branding"
|
||||
|
||||
if("" != config.option("branding")):
|
||||
branding_dir = git_dir + '/' + config.option("branding") + '/server'
|
||||
|
||||
base.cmd_in_dir(server_dir, "npm", ["install"])
|
||||
base.cmd_in_dir(server_dir, "grunt", ["--no-color", "-v"] + base.server_addons_param())
|
||||
build_server_with_addons()
|
||||
|
||||
#env variables
|
||||
product_version = base.get_env('PRODUCT_VERSION')
|
||||
@ -30,18 +30,17 @@ def make():
|
||||
|
||||
cur_date = datetime.date.today().strftime("%m/%d/%Y")
|
||||
|
||||
server_build_dir = server_dir + "/build/server"
|
||||
|
||||
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
|
||||
base.replaceInFileRE(server_build_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
|
||||
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
|
||||
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
|
||||
base.replaceInFileRE(server_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
|
||||
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
|
||||
|
||||
custom_public_key = branding_dir + '/debug.js'
|
||||
|
||||
if(base.is_exist(custom_public_key)):
|
||||
base.copy_file(custom_public_key, server_build_dir + '/Common/sources')
|
||||
base.copy_file(custom_public_key, server_dir + '/Common/sources')
|
||||
|
||||
pkg_target = "node14"
|
||||
#node22 packaging has issue https://github.com/yao-pkg/pkg/issues/87
|
||||
pkg_target = "node20"
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
pkg_target += "-linux"
|
||||
@ -51,16 +50,29 @@ def make():
|
||||
if ("windows" == base.host_platform()):
|
||||
pkg_target += "-win"
|
||||
|
||||
base.cmd_in_dir(server_build_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
|
||||
base.cmd_in_dir(server_build_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
|
||||
base.cmd_in_dir(server_build_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
|
||||
base.cmd_in_dir(server_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=6144", "-o", "docservice"])
|
||||
base.cmd_in_dir(server_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
|
||||
base.cmd_in_dir(server_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
|
||||
if "server-admin-panel" in base.get_server_addons() and base.is_exist(server_admin_panel_dir):
|
||||
base.cmd_in_dir(server_admin_panel_dir + "/server", "pkg", [".", "-t", pkg_target, "-o", "adminpanel"])
|
||||
|
||||
example_dir = base.get_script_dir() + "/../../document-server-integration/web/documentserver-example/nodejs"
|
||||
base.delete_dir(example_dir + "/node_modules")
|
||||
base.cmd_in_dir(example_dir, "npm", ["install"])
|
||||
base.cmd_in_dir(example_dir, "npm", ["ci"])
|
||||
base.cmd_in_dir(example_dir, "pkg", [".", "-t", pkg_target, "-o", "example"])
|
||||
|
||||
def build_server_with_addons():
|
||||
addons = {}
|
||||
addons["server"] = [True, False]
|
||||
addons.update(base.get_server_addons())
|
||||
for addon in addons:
|
||||
if (addon):
|
||||
addon_dir = base.get_script_dir() + "/../../" + addon
|
||||
if (base.is_exist(addon_dir + "/package.json")):
|
||||
base.print_info("npm ci: " + addon)
|
||||
base.cmd_in_dir(addon_dir, "npm", ["ci"])
|
||||
base.print_info("npm run build: " + addon)
|
||||
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
|
||||
|
||||
def build_server_develop():
|
||||
server_dir = base.get_script_dir() + "/../../server"
|
||||
base.cmd_in_dir(server_dir, "npm", ["install"])
|
||||
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())
|
||||
build_server_with_addons()
|
||||
|
||||
75
scripts/build_sln.py
Normal file
75
scripts/build_sln.py
Normal file
@ -0,0 +1,75 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(__file__) + "/..")
|
||||
import sln
|
||||
import qmake
|
||||
|
||||
# make solution
|
||||
def make(solution=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
|
||||
if ("" == solution):
|
||||
solution = "./sln.json"
|
||||
projects = sln.get_projects(solution, platform)
|
||||
|
||||
for pro in projects:
|
||||
qmake_main_addon = ""
|
||||
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
|
||||
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
|
||||
print("[WARNING:] temporary enable strip for x2t library in debug")
|
||||
qmake_main_addon += "build_strip_debug"
|
||||
|
||||
qmake.make(platform, pro, qmake_main_addon)
|
||||
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
|
||||
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
|
||||
|
||||
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
|
||||
# check branding libs
|
||||
if (config.option("branding-name") == "onlyoffice"):
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
core_lib_unbranding_dir = os.getcwd() + "/../core/build/lib/" + platform + base.qt_dst_postfix()
|
||||
if not base.is_dir(core_lib_unbranding_dir):
|
||||
base.create_dir(core_lib_unbranding_dir)
|
||||
core_lib_branding_dir = os.getcwd() + "/../core/build/onlyoffice/lib/" + platform + base.qt_dst_postfix()
|
||||
base.copy_file(core_lib_branding_dir + "/doctrenderer.dll", core_lib_unbranding_dir + "/doctrenderer.dll")
|
||||
base.copy_file(core_lib_branding_dir + "/doctrenderer.lib", core_lib_unbranding_dir + "/doctrenderer.lib")
|
||||
|
||||
# check replace
|
||||
directory_builder_branding = os.getcwd() + "/../core/DesktopEditor/doctrenderer"
|
||||
if base.is_dir(directory_builder_branding):
|
||||
new_replace_path = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.com/src/docbuilder.h")
|
||||
if ("2019" == config.option("vs-version")):
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
|
||||
if (True):
|
||||
new_path_net = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.net/src/docbuilder.net.cpp")
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
|
||||
base.restorePathForBuilder(new_path_net)
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
|
||||
# build Java docbuilder wrapper
|
||||
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
# build JNI library
|
||||
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
|
||||
# build Java code to JAR
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
|
||||
|
||||
return
|
||||
@ -24,7 +24,7 @@ def parse():
|
||||
|
||||
# all platforms
|
||||
global platforms
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp",
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "win_arm64",
|
||||
"linux_64", "linux_32", "linux_arm64",
|
||||
"mac_64", "mac_arm64",
|
||||
"ios",
|
||||
@ -57,11 +57,11 @@ def parse():
|
||||
if not check_option("platform", "mac_64"):
|
||||
options["platform"] = "mac_64 " + options["platform"]
|
||||
|
||||
if ("linux" == host_platform) and check_option("platform", "linux_arm64") and not base.is_os_arm():
|
||||
if not check_option("platform", "linux_64"):
|
||||
# linux_64 binaries need only for desktop
|
||||
if check_option("module", "desktop"):
|
||||
options["platform"] = "linux_64 " + options["platform"]
|
||||
if (False):
|
||||
# use qemu on deploy for emulation
|
||||
if ("windows" == host_platform) and check_option("platform", "win_arm64") and not base.is_os_arm():
|
||||
if not check_option("platform", "win_64"):
|
||||
options["platform"] = "win_64 " + options["platform"]
|
||||
|
||||
if check_option("platform", "xp") and ("windows" == host_platform):
|
||||
options["platform"] += " win_64_xp win_32_xp"
|
||||
@ -70,19 +70,39 @@ def parse():
|
||||
options["platform"] += " android_arm64_v8a android_armv7 android_x86 android_x86_64"
|
||||
|
||||
# check vs-version
|
||||
if ("" == option("vs-version")):
|
||||
options["vs-version"] = "2015"
|
||||
|
||||
# enable v8 8.9 version, if compiler support sources
|
||||
if ("linux" == host_platform) and (5004 <= base.get_gcc_version()) and not check_option("platform", "android"):
|
||||
extend_option("config", "v8_version_89")
|
||||
if ("windows" == host_platform) and ("" == option("vs-version")):
|
||||
options["vs-version"] = "2019"
|
||||
if check_option("platform", "win_64_xp") or check_option("platform", "win_32_xp"):
|
||||
options["vs-version"] = "2015"
|
||||
|
||||
if ("windows" == host_platform) and ("2019" == option("vs-version")):
|
||||
extend_option("config", "v8_version_89")
|
||||
extend_option("config", "vs2019")
|
||||
extend_option("config", "vs2019")
|
||||
|
||||
# sysroot setup
|
||||
if "linux" != host_platform and "sysroot" in options:
|
||||
options["sysroot"] = ""
|
||||
|
||||
if check_option("platform", "linux_arm64"):
|
||||
extend_option("config", "v8_version_89")
|
||||
if "linux" == host_platform and "sysroot" in options:
|
||||
if options["sysroot"] == "0":
|
||||
options["sysroot"] = ""
|
||||
elif options["sysroot"] == "1":
|
||||
dst_dir = os.path.abspath(base.get_script_dir(__file__) + '/../tools/linux/sysroot')
|
||||
dst_dir_amd64 = dst_dir + "/ubuntu16-amd64-sysroot"
|
||||
dst_dir_arm64 = dst_dir + "/ubuntu16-arm64-sysroot"
|
||||
if not base.is_dir(dst_dir_amd64) or not base.is_dir(dst_dir_arm64):
|
||||
base.cmd_in_dir(dst_dir, "python3", ["./fetch.py", "all"])
|
||||
options["sysroot_linux_64"] = dst_dir_amd64
|
||||
options["sysroot_linux_arm64"] = dst_dir_arm64
|
||||
else:
|
||||
# specific sysroot => one platform for build!
|
||||
options["sysroot"] = "1"
|
||||
options["sysroot_linux_64"] = options["sysroot"]
|
||||
options["sysroot_linux_arm64"] = options["sysroot"]
|
||||
|
||||
if is_cef_107():
|
||||
extend_option("config", "cef_version_107")
|
||||
if is_v8_60():
|
||||
extend_option("config", "v8_version_60")
|
||||
|
||||
# check vs-path
|
||||
if ("windows" == host_platform) and ("" == option("vs-path")):
|
||||
@ -105,8 +125,26 @@ def parse():
|
||||
if not "sdkjs-plugin-server" in options:
|
||||
options["sdkjs-plugin-server"] = "default"
|
||||
|
||||
if not "arm64-toolchain-bin" in options:
|
||||
options["arm64-toolchain-bin"] = "/usr/bin"
|
||||
if check_option("platform", "ios"):
|
||||
if not check_option("config", "no_bundle_xcframeworks"):
|
||||
if not check_option("config", "bundle_xcframeworks"):
|
||||
extend_option("config", "bundle_xcframeworks")
|
||||
|
||||
if check_option("config", "bundle_xcframeworks"):
|
||||
if not check_option("config", "bundle_dylibs"):
|
||||
extend_option("config", "bundle_dylibs")
|
||||
|
||||
if ("mac" == host_platform) and check_option("module", "desktop"):
|
||||
if not check_option("config", "bundle_dylibs"):
|
||||
extend_option("config", "bundle_dylibs")
|
||||
|
||||
if check_option("use-system-qt", "1"):
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../tools/linux", "python", ["use_system_qt.py"])
|
||||
options["qt-dir"] = base.get_script_dir() + "/../tools/linux/system_qt"
|
||||
|
||||
# disable all warnings (enable if needed with core_enable_all_warnings options)
|
||||
if not check_option("config", "core_enable_all_warnings"):
|
||||
extend_option("config", "core_disable_all_warnings")
|
||||
|
||||
return
|
||||
|
||||
@ -123,6 +161,9 @@ def check_compiler(platform):
|
||||
if (0 == platform.find("win")):
|
||||
compiler["compiler"] = "msvc" + options["vs-version"]
|
||||
compiler["compiler_64"] = "msvc" + options["vs-version"] + "_64"
|
||||
if (0 == platform.find("win_arm")):
|
||||
compiler["compiler"] = "msvc" + options["vs-version"] + "_arm"
|
||||
compiler["compiler_64"] = "msvc" + options["vs-version"] + "_arm64"
|
||||
elif (0 == platform.find("linux")):
|
||||
compiler["compiler"] = "gcc"
|
||||
compiler["compiler_64"] = "gcc_64"
|
||||
@ -166,6 +207,9 @@ def extend_option(name, value):
|
||||
else:
|
||||
options[name] = value
|
||||
|
||||
def set_option(name, value):
|
||||
options[name] = value
|
||||
|
||||
def branding():
|
||||
branding = option("branding-name")
|
||||
if ("" == branding):
|
||||
@ -180,6 +224,26 @@ def is_mobile_platform():
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_custom_sysroot_bin(platform):
|
||||
use_platform = platform
|
||||
if "linux_arm64" == platform and not base.is_os_arm():
|
||||
# use cross compiler
|
||||
use_platform = "linux_64"
|
||||
|
||||
return option("sysroot_" + use_platform) + "/usr/bin"
|
||||
|
||||
def get_custom_sysroot_lib(platform, isNatural=False):
|
||||
use_platform = platform
|
||||
if "linux_arm64" == platform and not base.is_os_arm() and not isNatural:
|
||||
# use cross compiler
|
||||
use_platform = "linux_64"
|
||||
|
||||
if ("linux_64" == use_platform):
|
||||
return option("sysroot_linux_64") + "/usr/lib/x86_64-linux-gnu"
|
||||
if ("linux_arm64" == use_platform):
|
||||
return option("sysroot_linux_arm64") + "/usr/lib/aarch64-linux-gnu"
|
||||
return ""
|
||||
|
||||
def parse_defaults():
|
||||
defaults_path = base.get_script_dir() + "/../defaults"
|
||||
if ("" != option("branding")):
|
||||
@ -203,4 +267,28 @@ def parse_defaults():
|
||||
options[name] = options[name].replace("default", defaults_options[name])
|
||||
else:
|
||||
options[name] = defaults_options[name]
|
||||
|
||||
if ("config_addon" in defaults_options):
|
||||
extend_option("config", defaults_options["config_addon"])
|
||||
|
||||
return
|
||||
|
||||
def is_cef_107():
|
||||
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_v8_60():
|
||||
if check_option("platform", "linux_arm64"):
|
||||
return False
|
||||
|
||||
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
|
||||
return True
|
||||
|
||||
if ("windows" == base.host_platform()) and ("2015" == option("vs-version")):
|
||||
return True
|
||||
|
||||
#if check_option("config", "use_v8"):
|
||||
# return True
|
||||
|
||||
return False
|
||||
|
||||
@ -13,12 +13,19 @@ import cef
|
||||
import icu
|
||||
import openssl
|
||||
import curl
|
||||
import websocket
|
||||
import websocket_all
|
||||
import v8
|
||||
import html2
|
||||
import iwork
|
||||
import md
|
||||
import hunspell
|
||||
import glew
|
||||
import harfbuzz
|
||||
import oo_brotli
|
||||
import hyphen
|
||||
import googletest
|
||||
import libvlc
|
||||
import heif
|
||||
|
||||
def check_android_ndk_macos_arm(dir):
|
||||
if base.is_dir(dir + "/darwin-x86_64") and not base.is_dir(dir + "/darwin-arm64"):
|
||||
@ -39,10 +46,21 @@ def make():
|
||||
openssl.make()
|
||||
v8.make()
|
||||
html2.make()
|
||||
iwork.make(False)
|
||||
md.make()
|
||||
hunspell.make(False)
|
||||
harfbuzz.make()
|
||||
glew.make()
|
||||
hyphen.make()
|
||||
googletest.make()
|
||||
oo_brotli.make()
|
||||
heif.make()
|
||||
|
||||
if config.check_option("build-libvlc", "1"):
|
||||
libvlc.make()
|
||||
|
||||
if config.check_option("module", "mobile"):
|
||||
curl.make()
|
||||
websocket.make()
|
||||
if (config.check_option("platform", "android")):
|
||||
curl.make()
|
||||
websocket_all.make()
|
||||
return
|
||||
|
||||
163
scripts/core_common/modules/android/android_ndk.py
Executable file
163
scripts/core_common/modules/android/android_ndk.py
Executable file
@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import re
|
||||
|
||||
def get_android_ndk_version():
|
||||
env_val = base.get_env("ANDROID_NDK_ROOT")
|
||||
if (env_val == ""):
|
||||
env_val = "21.1.6352462"
|
||||
return env_val.strip("/").split("/")[-1]
|
||||
|
||||
def get_android_ndk_version_major():
|
||||
val = get_android_ndk_version().split(".")[0]
|
||||
val = re.sub("[^0-9]", "", val)
|
||||
return int(val)
|
||||
|
||||
def get_sdk_api():
|
||||
if (23 > get_android_ndk_version_major()):
|
||||
return "21"
|
||||
return "23"
|
||||
|
||||
global archs
|
||||
archs = ["arm64", "arm", "x86_64", "x86"]
|
||||
|
||||
global platforms
|
||||
platforms = {
|
||||
"arm64" : {
|
||||
"abi" : "arm64-v8a",
|
||||
"target" : "aarch64-linux-android",
|
||||
"dst" : "arm64_v8a",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "aarch64-linux-android"
|
||||
},
|
||||
"arm" : {
|
||||
"abi" : "armeabi-v7a",
|
||||
"target" : "armv7a-linux-androideabi",
|
||||
"dst" : "armv7",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "arm-linux-android"
|
||||
},
|
||||
"x86_64" : {
|
||||
"arch" : "x86_64",
|
||||
"target" : "x86_64-linux-android",
|
||||
"dst" : "x86_64",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "x86_64-linux-android"
|
||||
},
|
||||
"x86" : {
|
||||
"arch" : "x86",
|
||||
"target" : "i686-linux-android",
|
||||
"dst" : "x86",
|
||||
"api" : get_sdk_api(),
|
||||
"old" : "i686-linux-android"
|
||||
}
|
||||
}
|
||||
|
||||
# todo: check arm host!
|
||||
global host
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
host = {
|
||||
"name" : "linux",
|
||||
"arch" : "linux-x86_64"
|
||||
}
|
||||
else:
|
||||
host = {
|
||||
"name" : "darwin",
|
||||
"arch" : "darwin-x86_64"
|
||||
}
|
||||
|
||||
def get_options_dict_as_array(opts):
|
||||
value = []
|
||||
for key in opts:
|
||||
value.append(key + "=" + opts[key])
|
||||
return value
|
||||
|
||||
def get_options_array_as_string(opts):
|
||||
return " ".join(opts)
|
||||
|
||||
def ndk_dir():
|
||||
return base.get_env("ANDROID_NDK_ROOT")
|
||||
|
||||
def sdk_dir():
|
||||
ndk_path = ndk_dir()
|
||||
if (-1 != ndk_path.find("/ndk/")):
|
||||
return ndk_path + "/../.."
|
||||
return ndk_path + "/.."
|
||||
|
||||
def toolchain_dir():
|
||||
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
|
||||
|
||||
def prepare_platform(arch, cpp_standard=11):
|
||||
target = platforms[arch]["target"]
|
||||
api = platforms[arch]["api"]
|
||||
|
||||
ndk_directory = ndk_dir()
|
||||
toolchain = toolchain_dir()
|
||||
|
||||
base.set_env("TARGET", target)
|
||||
base.set_env("TOOLCHAIN", toolchain)
|
||||
base.set_env("NDK_STANDARD_ROOT", toolchain)
|
||||
base.set_env("ANDROIDVER", api)
|
||||
base.set_env("ANDROID_API", api)
|
||||
|
||||
base.set_env("AR", toolchain + "/bin/llvm-ar")
|
||||
base.set_env("AS", toolchain + "/bin/llvm-as")
|
||||
base.set_env("LD", toolchain + "/bin/ld")
|
||||
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
|
||||
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
|
||||
|
||||
base.set_env("CC", target + api + "-clang")
|
||||
base.set_env("CXX", target + api + "-clang++")
|
||||
|
||||
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
|
||||
if (23 > get_android_ndk_version_major()):
|
||||
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
|
||||
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
|
||||
|
||||
base.set_env("LDFLAGS", ld_flags)
|
||||
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
|
||||
|
||||
cflags = [
|
||||
"-Os",
|
||||
"-ffunction-sections",
|
||||
"-fdata-sections",
|
||||
"-fvisibility=hidden",
|
||||
|
||||
"-Wno-unused-function",
|
||||
|
||||
"-fPIC",
|
||||
|
||||
"-I" + toolchain + "/sysroot/usr/include",
|
||||
|
||||
"-D__ANDROID_API__=" + api,
|
||||
"-DANDROID"
|
||||
]
|
||||
|
||||
cflags_string = " ".join(cflags)
|
||||
cppflags_string = cflags_string
|
||||
|
||||
if (cpp_standard >= 11):
|
||||
cppflags_string += " -std=c++11"
|
||||
|
||||
base.set_env("CFLAGS", cflags_string)
|
||||
base.set_env("CXXFLAGS", cppflags_string)
|
||||
base.set_env("CPPPLAGS", cflags_string)
|
||||
return
|
||||
|
||||
def extend_cflags(params):
|
||||
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
|
||||
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
|
||||
return
|
||||
|
||||
def extend_cxxflags(params):
|
||||
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
|
||||
return
|
||||
|
||||
def extend_ldflags(params):
|
||||
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
|
||||
return
|
||||
94
scripts/core_common/modules/android/curl_android.py
Executable file
94
scripts/core_common/modules/android/curl_android.py
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
lib_version = "curl-7_68_0"
|
||||
lib_name = "curl-7.68.0"
|
||||
|
||||
def fetch():
|
||||
if not base.is_dir(current_dir + lib_name):
|
||||
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
|
||||
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
|
||||
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
|
||||
return
|
||||
|
||||
def build_host():
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
base.set_env("ANDROID_NDK_HOME", ndk_dir)
|
||||
base.set_env("ANDROID_NDK", ndk_dir)
|
||||
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(current_dir + lib_name)
|
||||
|
||||
params = []
|
||||
if ("arm64" == arch):
|
||||
params.append("--host=aarch64-linux-android")
|
||||
elif ("arm" == arch):
|
||||
params.append("--host=arm-linux-androideabi")
|
||||
elif ("x86_64" == arch):
|
||||
params.append("--host=x86_64-linux-android")
|
||||
elif ("x86" == arch):
|
||||
params.append("--host=i686-linux-android")
|
||||
|
||||
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
|
||||
|
||||
params.append("--enable-ipv6")
|
||||
params.append("--enable-static")
|
||||
params.append("--disable-shared")
|
||||
params.append("--prefix=" + arch_build_dir)
|
||||
params.append("--with-ssl=" + openssl_dir)
|
||||
|
||||
base.cmd("./configure", params)
|
||||
|
||||
base.cmd("make", ["clean"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
|
||||
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch()
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
142
scripts/core_common/modules/android/icu_android.py
Executable file
142
scripts/core_common/modules/android/icu_android.py
Executable file
@ -0,0 +1,142 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
icu_major = "74"
|
||||
icu_minor = "2"
|
||||
|
||||
options = {
|
||||
"--enable-strict" : "no",
|
||||
"--enable-extras" : "no",
|
||||
"--enable-draft" : "yes",
|
||||
"--enable-samples" : "no",
|
||||
"--enable-tests" : "no",
|
||||
"--enable-renaming" : "yes",
|
||||
"--enable-icuio" : "no",
|
||||
"--enable-layoutex" : "no",
|
||||
"--with-library-bits" : "nochange",
|
||||
"--with-library-suffix" : "",
|
||||
"--enable-static" : "yes",
|
||||
"--enable-shared" : "no",
|
||||
"--with-data-packaging" : "archive"
|
||||
}
|
||||
|
||||
cpp_flags_base = [
|
||||
"-Os",
|
||||
"-ffunction-sections",
|
||||
"-fdata-sections",
|
||||
"-fvisibility=hidden",
|
||||
"-fPIC"
|
||||
]
|
||||
|
||||
cpp_flags = [
|
||||
"-fno-short-wchar",
|
||||
"-fno-short-enums",
|
||||
|
||||
"-DU_USING_ICU_NAMESPACE=0",
|
||||
"-DU_HAVE_NL_LANGINFO_CODESET=0",
|
||||
"-DU_TIMEZONE=0",
|
||||
"-DU_DISABLE_RENAMING=0",
|
||||
|
||||
"-DUCONFIG_NO_COLLATION=0",
|
||||
"-DUCONFIG_NO_FORMATTING=0",
|
||||
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
|
||||
"-DUCONFIG_NO_TRANSLITERATION=0",
|
||||
|
||||
"-DU_STATIC_IMPLEMENTATION"
|
||||
]
|
||||
|
||||
def fetch_icu(major, minor):
|
||||
if not base.is_dir(current_dir + "icu"):
|
||||
base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
|
||||
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
|
||||
base.delete_dir_with_access_error(current_dir + "icu2")
|
||||
return
|
||||
|
||||
def build_host():
|
||||
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
|
||||
if not base.is_dir(cross_build_dir):
|
||||
base.create_dir(cross_build_dir)
|
||||
os.chdir(cross_build_dir)
|
||||
|
||||
ld_flags = "-pthread"
|
||||
if ("linux" == base.host_platform()):
|
||||
ld_flags += " -Wl,--gc-sections"
|
||||
else:
|
||||
# gcc on OSX does not support --gc-sections
|
||||
ld_flags += " -Wl,-dead_strip"
|
||||
|
||||
base.set_env("LDFLAGS", ld_flags)
|
||||
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
|
||||
|
||||
host_type = "Linux"
|
||||
if ("mac" == base.host_platform()):
|
||||
host_type = "MacOSX/GCC"
|
||||
|
||||
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
base.create_dir(current_dir + "build")
|
||||
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
|
||||
|
||||
os.chdir(current_dir)
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
android_ndk.extend_cflags(" ".join(cpp_flags))
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
os.chdir(arch_build_dir)
|
||||
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
|
||||
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
|
||||
base.cmd("make", ["-j4"])
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
|
||||
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
|
||||
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch_icu(icu_major, icu_minor)
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
94
scripts/core_common/modules/android/openssl_android.py
Executable file
94
scripts/core_common/modules/android/openssl_android.py
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
import android_ndk
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
current_dir = os.path.abspath(current_dir)
|
||||
if not current_dir.endswith("/"):
|
||||
current_dir += "/"
|
||||
|
||||
lib_name="openssl-1.1.1t"
|
||||
|
||||
options = [
|
||||
"no-shared",
|
||||
"no-tests",
|
||||
"enable-ssl3",
|
||||
"enable-ssl3-method",
|
||||
"enable-md2",
|
||||
"no-asm"
|
||||
]
|
||||
|
||||
def fetch():
|
||||
if not base.is_dir(current_dir + lib_name):
|
||||
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
|
||||
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
|
||||
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
|
||||
return
|
||||
|
||||
def build_host():
|
||||
# not needed, just create directories
|
||||
if not base.is_dir(current_dir + "/build"):
|
||||
base.create_dir(current_dir + "/build")
|
||||
if not base.is_dir(current_dir + "/build/android"):
|
||||
base.create_dir(current_dir + "/build/android")
|
||||
return
|
||||
|
||||
def build_arch(arch):
|
||||
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
|
||||
if base.is_dir(dst_dir):
|
||||
return
|
||||
|
||||
android_ndk.prepare_platform(arch)
|
||||
|
||||
ndk_dir = android_ndk.ndk_dir()
|
||||
toolchain = android_ndk.toolchain_dir()
|
||||
|
||||
base.set_env("ANDROID_NDK_HOME", ndk_dir)
|
||||
base.set_env("ANDROID_NDK", ndk_dir)
|
||||
|
||||
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
|
||||
base.create_dir(arch_build_dir)
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(current_dir + lib_name)
|
||||
|
||||
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
|
||||
|
||||
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
|
||||
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
|
||||
|
||||
base.cmd("make", ["clean"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
base.create_dir(dst_dir)
|
||||
base.create_dir(dst_dir + "/lib")
|
||||
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
|
||||
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
|
||||
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
|
||||
|
||||
base.delete_dir(arch_build_dir)
|
||||
return
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
|
||||
fetch()
|
||||
|
||||
build_host()
|
||||
|
||||
for arch in android_ndk.archs:
|
||||
build_arch(arch)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
make()
|
||||
@ -22,10 +22,10 @@ def move_debug_libs_windows(dir):
|
||||
|
||||
def clean():
|
||||
if base.is_dir("boost_1_58_0"):
|
||||
base.delete_dir_with_access_error("boost_1_58_0");
|
||||
base.delete_dir_with_access_error("boost_1_58_0")
|
||||
base.delete_dir("boost_1_58_0")
|
||||
if base.is_dir("boost_1_72_0"):
|
||||
base.delete_dir_with_access_error("boost_1_72_0");
|
||||
base.delete_dir_with_access_error("boost_1_72_0")
|
||||
base.delete_dir("boost_1_72_0")
|
||||
if base.is_dir("build"):
|
||||
base.delete_dir("build")
|
||||
@ -73,6 +73,8 @@ def make():
|
||||
win_toolset = "msvc-14.2"
|
||||
win_boot_arg = "vc142"
|
||||
win_vs_version = "vc142"
|
||||
|
||||
# add "define=_ITERATOR_DEBUG_LEVEL=0" to b2 args before install for disable _ITERATOR_DEBUG_LEVEL
|
||||
if (-1 != config.option("platform").find("win_64")) and not base.is_file("../build/win_64/lib/libboost_system-" + win_vs_version + "-mt-x64-1_72.lib"):
|
||||
base.cmd("bootstrap.bat", [win_boot_arg])
|
||||
base.cmd("b2.exe", ["headers"])
|
||||
@ -83,14 +85,36 @@ def make():
|
||||
base.cmd("b2.exe", ["headers"])
|
||||
base.cmd("b2.exe", ["--clean"])
|
||||
base.cmd("b2.exe", ["--prefix=./../build/win_32", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=32", "install"])
|
||||
if (-1 != config.option("platform").find("win_arm64") and not base.is_file("../build/win_arm64/lib/libboost_system-" + win_vs_version + "-mt-a64-1_72.lib")):
|
||||
boost_bat = []
|
||||
boost_bat.append("call bootstrap.bat " + win_boot_arg) # first build b2 for win64, so vcvarsall_call with arm64 later
|
||||
vcvarsall_call = ("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + "x64_arm64")
|
||||
boost_bat.append(vcvarsall_call)
|
||||
boost_bat.append("call b2.exe headers")
|
||||
boost_bat.append("call b2.exe --clean")
|
||||
boost_bat.append("call b2.exe --prefix=./../build/win_arm64 architecture=arm link=static --with-filesystem --with-system --with-date_time --with-regex --toolset=" + win_toolset + " address-model=64 install")
|
||||
base.run_as_bat(boost_bat)
|
||||
correct_install_includes_win(base_dir, "win_64")
|
||||
correct_install_includes_win(base_dir, "win_32")
|
||||
correct_install_includes_win(base_dir, "win_32")
|
||||
correct_install_includes_win(base_dir, "win_arm64")
|
||||
|
||||
if config.check_option("platform", "linux_64") and not base.is_dir("../build/linux_64"):
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"])
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
base.cmd("./b2", ["--prefix=./../build/linux_64", "link=static", "cxxflags=-fPIC", "install"])
|
||||
if config.option("sysroot") == "":
|
||||
addon_config = []
|
||||
addon_compile = []
|
||||
if "1" == config.option("use-clang"):
|
||||
addon_config = ["--with-toolset=clang"]
|
||||
addon_compile = ["cxxflags=-stdlib=libc++", "linkflags=-stdlib=libc++", "define=_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION"]
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"] + addon_config)
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
base.cmd("./b2", ["--prefix=./../build/linux_64", "link=static", "cxxflags=-fPIC"] + addon_compile + ["install"])
|
||||
else: # build via qmake when custom sysroot is needed
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "linux_64")
|
||||
directory_build = base_dir + "/build/linux_64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
base.delete_file(directory_build + "/libboost_system.dylib")
|
||||
base.copy_files(directory_build + "/linux_64/*.a", directory_build)
|
||||
# TODO: support x86
|
||||
|
||||
if config.check_option("platform", "linux_arm64") and not base.is_dir("../build/linux_arm64"):
|
||||
@ -101,21 +125,27 @@ def make():
|
||||
base.copy_files(directory_build + "/linux_arm64/*.a", directory_build)
|
||||
|
||||
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios"):
|
||||
old_cur2 = os.getcwd()
|
||||
clang_correct()
|
||||
os.chdir("../")
|
||||
base.bash("./boost_ios")
|
||||
os.chdir(old_cur2)
|
||||
|
||||
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios_xcframework"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios_simulator", "xcframework_platform_ios_simulator")
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios")
|
||||
|
||||
if (-1 != config.option("platform").find("android")) and not base.is_dir("../build/android"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"])
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
|
||||
if config.check_option("platform", "mac_64") and not base.is_dir("../build/mac_64"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_64")
|
||||
directory_build = base_dir + "/build/mac_64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
base.delete_file(directory_build + "/libboost_system.dylib")
|
||||
base.copy_files(directory_build + "/mac_64/*.a", directory_build)
|
||||
|
||||
if (-1 != config.option("platform").find("mac_arm64")) and not base.is_dir("../build/mac_arm64"):
|
||||
if config.check_option("platform", "mac_arm64") and not base.is_dir("../build/mac_arm64"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_arm64")
|
||||
directory_build = base_dir + "/build/mac_arm64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
|
||||
@ -1,108 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
platforms = {
|
||||
"arm64_v8a" : {
|
||||
"name" : "arm64-v8a",
|
||||
"toolset" : "arm64v8a",
|
||||
"clang_triple" : "aarch64-linux-android21",
|
||||
"tool_triple" : "aarch64-linux-android",
|
||||
"abi" : "aapcs",
|
||||
"arch" : "arm",
|
||||
"address_model" : "64",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
},
|
||||
"armv7" : {
|
||||
"name" : "armeabi-v7a",
|
||||
"toolset" : "armeabiv7a",
|
||||
"clang_triple" : "armv7a-linux-androideabi16",
|
||||
"tool_triple" : "arm-linux-androideabi",
|
||||
"abi" : "aapcs",
|
||||
"arch" : "arm",
|
||||
"address_model" : "32",
|
||||
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
|
||||
"linker_flags" : "-Wl,--fix-cortex-a8"
|
||||
},
|
||||
"x86" : {
|
||||
"name" : "x86",
|
||||
"toolset" : "x86",
|
||||
"clang_triple" : "i686-linux-android16",
|
||||
"tool_triple" : "i686-linux-android",
|
||||
"abi" : "sysv",
|
||||
"arch" : "x86",
|
||||
"address_model" : "32",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
},
|
||||
"x86_64" : {
|
||||
"name" : "x86_64",
|
||||
"toolset" : "x8664",
|
||||
"clang_triple" : "x86_64-linux-android21",
|
||||
"tool_triple" : "x86_64-linux-android",
|
||||
"abi" : "sysv",
|
||||
"arch" : "x86",
|
||||
"address_model" : "64",
|
||||
"compiler_flags" : "",
|
||||
"linker_flags" : ""
|
||||
}
|
||||
}
|
||||
|
||||
base_dir = base.get_script_dir()
|
||||
|
||||
def make(platform):
|
||||
tmp_build_dir = base_dir + "/core_common/modules/boost"
|
||||
if (base.is_dir(tmp_build_dir)):
|
||||
base.delete_dir(tmp_build_dir)
|
||||
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
|
||||
|
||||
current_platform = platforms[platform]
|
||||
|
||||
if (base.host_platform() == "mac"):
|
||||
source = "prebuilt/linux-x86_64"
|
||||
dest = "prebuilt/darwin-x86_64"
|
||||
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
|
||||
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
|
||||
|
||||
build_dir_tmp = tmp_build_dir + "/tmp"
|
||||
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
|
||||
old_path = base.get_env("PATH")
|
||||
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
|
||||
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
|
||||
|
||||
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
|
||||
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
|
||||
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
|
||||
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
|
||||
|
||||
print(current_platform)
|
||||
base.cmd("./b2", ["-q", "-j4",
|
||||
"toolset=clang-" + current_platform["toolset"],
|
||||
"binary-format=elf",
|
||||
"address-model=" + current_platform["address_model"],
|
||||
"architecture=" + current_platform["arch"],
|
||||
"abi=" + current_platform["abi"],
|
||||
"link=static",
|
||||
"threading=multi",
|
||||
"target-os=android",
|
||||
"--user-config=" + tmp_build_dir + "/user-config.jam",
|
||||
"--ignore-site-config",
|
||||
"--layout=system",
|
||||
"install"], True)
|
||||
|
||||
base.set_env("PATH", old_path)
|
||||
base.delete_dir(tmp_build_dir)
|
||||
return
|
||||
@ -5,15 +5,18 @@ sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import build
|
||||
import qmake
|
||||
|
||||
def make(src_dir, modules, build_platform="android"):
|
||||
def make(src_dir, modules, build_platform="android", qmake_addon=""):
|
||||
old_cur = os.getcwd()
|
||||
old_env = dict(os.environ)
|
||||
b2_addon = ""
|
||||
|
||||
print("boost-headers...")
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=system"])
|
||||
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=system"])
|
||||
base.cmd("./b2", ["--prefix=./../build/" + build_platform, "headers", "install"])
|
||||
|
||||
|
||||
for module in modules:
|
||||
print("boost-module: " + module + " ...")
|
||||
module_dir = src_dir + "/libs/" + module
|
||||
@ -23,17 +26,13 @@ def make(src_dir, modules, build_platform="android"):
|
||||
pro_file_content.append("TARGET = boost_" + module)
|
||||
pro_file_content.append("TEMPLATE = lib")
|
||||
pro_file_content.append("CONFIG += staticlib")
|
||||
if (build_platform == "android"):
|
||||
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
|
||||
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
|
||||
pro_file_content.append("include($$PWD/../../../../../base.pri)")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
|
||||
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
|
||||
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
|
||||
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
|
||||
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
|
||||
pro_file_content.append("INCLUDEPATH += $$PWD/include")
|
||||
@ -43,7 +42,9 @@ def make(src_dir, modules, build_platform="android"):
|
||||
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
|
||||
os.chdir(module_dir)
|
||||
build.make_pro_file("./", module + ".pro")
|
||||
|
||||
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
@ -5,6 +5,13 @@ sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
|
||||
def clear_module():
|
||||
for child in glob.glob("./*"):
|
||||
if base.is_dir(child):
|
||||
base.delete_dir(child)
|
||||
return
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: cef")
|
||||
@ -13,56 +20,77 @@ def make():
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64"]
|
||||
|
||||
url = "http://d2ettrnqo7v976.cloudfront.net/cef/4280/"
|
||||
base.check_module_version("2", clear_module)
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64", "win_arm64", "linux_arm64"]
|
||||
|
||||
for platform in platforms:
|
||||
if not config.check_option("platform", platform):
|
||||
continue
|
||||
|
||||
url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/cef/"
|
||||
archive_name = "./cef_binary.7z"
|
||||
|
||||
if (-1 != platform.find("_xp")):
|
||||
url += "4280/"
|
||||
archive_name = "./cef_binary_xp.7z"
|
||||
elif (config.check_option("config", "cef_version_107")):
|
||||
url += "5304/"
|
||||
archive_name = "./cef_binary_107.7z"
|
||||
elif ("mac_64" == platform) and (config.check_option("config", "use_v8")):
|
||||
url += "5060/"
|
||||
archive_name = "./cef_binary_103.7z"
|
||||
else:
|
||||
url += "5414/"
|
||||
|
||||
url_platform = (url + platform + "/cef_binary.7z")
|
||||
archive_name_data = archive_name + ".data"
|
||||
|
||||
if not base.is_dir(platform):
|
||||
base.create_dir(platform)
|
||||
|
||||
os.chdir(platform)
|
||||
|
||||
|
||||
data_url = base.get_file_last_modified_url(url_platform)
|
||||
old_data_url = base.readFile("./cef_binary.7z.data")
|
||||
old_data_url = base.readFile(archive_name_data)
|
||||
|
||||
build_dir_name = "build"
|
||||
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
|
||||
build_dir_name = "build_107"
|
||||
if ("mac_64" == platform) and (config.check_option("config", "use_v8")):
|
||||
build_dir_name = "build_103"
|
||||
|
||||
if (data_url != old_data_url):
|
||||
if base.is_file("./cef_binary.7z"):
|
||||
base.delete_file("./cef_binary.7z")
|
||||
if base.is_dir("build"):
|
||||
base.delete_dir("build")
|
||||
if base.is_file(archive_name):
|
||||
base.delete_file(archive_name)
|
||||
if base.is_dir(build_dir_name):
|
||||
base.delete_dir(build_dir_name)
|
||||
|
||||
if base.is_dir("build"):
|
||||
if base.is_dir(build_dir_name):
|
||||
os.chdir(base_dir)
|
||||
continue
|
||||
|
||||
# download
|
||||
if not base.is_file("./cef_binary.7z"):
|
||||
base.download(url_platform, "./cef_binary.7z")
|
||||
if not base.is_file(archive_name):
|
||||
base.download(url_platform, archive_name)
|
||||
|
||||
# extract
|
||||
base.extract("./cef_binary.7z", "./")
|
||||
base.extract(archive_name, "./")
|
||||
|
||||
base.delete_file("./cef_binary.7z.data")
|
||||
base.writeFile("./cef_binary.7z.data", data_url)
|
||||
base.delete_file(archive_name_data)
|
||||
base.writeFile(archive_name_data, data_url)
|
||||
|
||||
base.create_dir("./build")
|
||||
base.create_dir("./" + build_dir_name)
|
||||
|
||||
# deploy
|
||||
if (0 != platform.find("mac")):
|
||||
base.copy_files("cef_binary/Release/*", "build/")
|
||||
base.copy_files("cef_binary/Resources/*", "build/")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.cmd("chmod", ["a+xr", "build/locales"])
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.cmd("mv", ["Chromium Embedded Framework.framework", "build/Chromium Embedded Framework.framework"])
|
||||
base.cmd("mv", ["Chromium Embedded Framework.framework", build_dir_name + "/Chromium Embedded Framework.framework"])
|
||||
base.delete_dir("./Chromium Embedded Framework.framework")
|
||||
else:
|
||||
base.copy_files("cef_binary/Release/*", build_dir_name + "/")
|
||||
base.copy_files("cef_binary/Resources/*", build_dir_name + "/")
|
||||
if (0 == platform.find("linux")):
|
||||
base.cmd("chmod", ["a+xr", build_dir_name + "/locales"])
|
||||
base.delete_dir("./cef_binary")
|
||||
|
||||
os.chdir(base_dir)
|
||||
|
||||
|
||||
@ -2,21 +2,19 @@
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import config
|
||||
import subprocess
|
||||
import os
|
||||
import base
|
||||
import curl_android
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
if base.is_dir(path + "/build/android"):
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
subprocess.call(["./build-android-curl.sh"])
|
||||
|
||||
curl_android.make()
|
||||
elif (-1 != config.option("platform").find("ios")):
|
||||
if base.is_dir(path + "/build/ios"):
|
||||
os.chdir(old_cur)
|
||||
|
||||
@ -8,7 +8,7 @@ import os
|
||||
|
||||
def clean():
|
||||
if base.is_dir("glew-2.1.0"):
|
||||
base.delete_dir("glew-2.1.0");
|
||||
base.delete_dir("glew-2.1.0")
|
||||
return
|
||||
|
||||
def make():
|
||||
@ -16,7 +16,7 @@ def make():
|
||||
return
|
||||
|
||||
if not config.check_option("module", "mobile"):
|
||||
return;
|
||||
return
|
||||
|
||||
print("[fetch & build]: glew")
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/glew"
|
||||
@ -26,7 +26,7 @@ def make():
|
||||
base.common_check_version("glew", "1", clean)
|
||||
|
||||
if not base.is_dir("glew-2.1.0"):
|
||||
base.download("https://deac-ams.dl.sourceforge.net/project/glew/glew/2.1.0/glew-2.1.0-win32.zip", "./archive.zip")
|
||||
base.download("https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/glew/glew-2.1.0-win32.zip", "./archive.zip")
|
||||
base.extract("./archive.zip", "./")
|
||||
base.delete_file("./archive.zip")
|
||||
|
||||
|
||||
19
scripts/core_common/modules/googletest.py
Normal file
19
scripts/core_common/modules/googletest.py
Normal file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
def make():
|
||||
print("[fetch]: googletest")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/googletest"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
if not base.is_dir("googletest"):
|
||||
base.cmd("git", ["clone", "https://github.com/google/googletest.git", "-b", "v1.13.0"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
403
scripts/core_common/modules/heif.py
Normal file
403
scripts/core_common/modules/heif.py
Normal file
@ -0,0 +1,403 @@
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
import config
|
||||
|
||||
# NOTE:
|
||||
# - requires CMake >= 3.21, < 4.0.0
|
||||
|
||||
# libs versions
|
||||
X265_VERSION = "4.1"
|
||||
DE265_VERSION = "1.0.16"
|
||||
# 1.18.2 - the latest version of libheif supporting C++11 builds (as for now)
|
||||
HEIF_VERSION = "1.18.2"
|
||||
|
||||
# ios cmake toolchain
|
||||
IOS_CMAKE_VERSION = "4.5.0"
|
||||
IOS_CMAKE_TOOLCHAIN_FILE = base.get_script_dir() + "/../../core/Common/3dParty/heif/ios-cmake/ios.toolchain.cmake"
|
||||
|
||||
# android cmake toolchain
|
||||
ANDROID_CMAKE_TOOLCHAIN_FILE = base.get_env("ANDROID_NDK_ROOT") + "/build/cmake/android.toolchain.cmake"
|
||||
|
||||
# linux arm64 cmake toolchain
|
||||
LINUX_SYSTEM_AARCH64_TOOLCHAIN_FILE = base.get_script_dir() + "/../tools/linux/sysroot/system-aarch64.toolchain.cmake"
|
||||
LINUX_CUSTOM_SYSROOT_TOOLCHAIN_FILE = base.get_script_dir() + "/../tools/linux/sysroot/custom-sysroot.toolchain.cmake"
|
||||
|
||||
OLD_ENV = dict()
|
||||
|
||||
def get_vs_version():
|
||||
vs_version = "14 2015"
|
||||
if config.option("vs-version") == "2019":
|
||||
vs_version = "16 2019"
|
||||
return vs_version
|
||||
|
||||
def get_xcode_sdk(platform):
|
||||
xcode_sdk = "iphoneos"
|
||||
if "simulator" in platform:
|
||||
xcode_sdk = "iphonesimulator"
|
||||
return xcode_sdk
|
||||
|
||||
def fetch_repo(repo_url, branch_or_tag):
|
||||
base.cmd("git", ["clone", "--depth", "1", "--branch", branch_or_tag, repo_url])
|
||||
return
|
||||
|
||||
def get_build_dir(base_dir, repo_dir, platform, build_type):
|
||||
return os.path.join(base_dir, repo_dir, "build", platform, build_type.lower())
|
||||
|
||||
# general build function that builds for ONE platform (supposing we are located in the build directory)
|
||||
def build_with_cmake(platform, cmake_args, build_type):
|
||||
# extend cmake arguments
|
||||
cmake_args_ext = []
|
||||
# WINDOWS
|
||||
if "win" in platform:
|
||||
cmake_args_ext = [
|
||||
"-G", f"Visual Studio {get_vs_version()}"
|
||||
]
|
||||
if platform == "win_64" or platform == "win_64_xp":
|
||||
cmake_args_ext += ["-A", "x64"]
|
||||
elif platform == "win_32" or platform == "win_32_xp":
|
||||
cmake_args_ext += ["-A", "Win32"]
|
||||
elif platform == "win_arm64":
|
||||
cmake_args_ext += ["-A", "ARM64"]
|
||||
# LINUX, MAC
|
||||
elif "linux" in platform or "mac" in platform:
|
||||
cmake_args_ext = [
|
||||
"-G", "Unix Makefiles",
|
||||
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON" # on UNIX we need to compile with fPIC
|
||||
]
|
||||
if platform == "mac_64":
|
||||
cmake_args_ext += ["-DCMAKE_OSX_DEPLOYMENT_TARGET=10.11", "-DCMAKE_OSX_ARCHITECTURES=x86_64"]
|
||||
elif platform == "mac_arm64":
|
||||
cmake_args_ext += ["-DCMAKE_OSX_DEPLOYMENT_TARGET=11.0", "-DCMAKE_OSX_ARCHITECTURES=arm64"]
|
||||
elif config.option("sysroot") != "":
|
||||
# force use custom CXXFLAGS with Release/Debug build
|
||||
if (platform == "linux_64"):
|
||||
cmake_args += ["-DCMAKE_TOOLCHAIN_FILE=" + LINUX_CUSTOM_SYSROOT_TOOLCHAIN_FILE]
|
||||
else:
|
||||
cmake_args += ["-DCMAKE_TOOLCHAIN_FILE=" + LINUX_SYSTEM_AARCH64_TOOLCHAIN_FILE]
|
||||
elif platform == "linux_arm64" and not base.is_os_arm():
|
||||
cmake_args += ["-DCMAKE_TOOLCHAIN_FILE=" + LINUX_SYSTEM_AARCH64_TOOLCHAIN_FILE]
|
||||
# IOS
|
||||
elif "ios" in platform:
|
||||
cmake_args_ext = [
|
||||
"-G", "Xcode",
|
||||
"-DCMAKE_TOOLCHAIN_FILE=" + IOS_CMAKE_TOOLCHAIN_FILE,
|
||||
"-DDEPLOYMENT_TARGET=11.0"
|
||||
]
|
||||
if platform == "ios":
|
||||
cmake_args_ext += ["-DPLATFORM=OS64"]
|
||||
elif platform == "ios_simulator":
|
||||
cmake_args_ext += ["-DPLATFORM=SIMULATOR64COMBINED"]
|
||||
# ANDROID
|
||||
elif "android" in platform:
|
||||
cmake_args_ext = [
|
||||
"-G", "Unix Makefiles",
|
||||
"-DCMAKE_TOOLCHAIN_FILE=" + ANDROID_CMAKE_TOOLCHAIN_FILE,
|
||||
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
|
||||
]
|
||||
def get_cmake_args_android(arch, api_level):
|
||||
return [
|
||||
"-DANDROID_ABI=" + arch,
|
||||
"-DANDROID_NATIVE_API_LEVEL=" + api_level
|
||||
]
|
||||
if platform == "android_arm64_v8a":
|
||||
cmake_args_ext += get_cmake_args_android("arm64-v8a", "21")
|
||||
elif platform == "android_armv7":
|
||||
cmake_args_ext += get_cmake_args_android("armeabi-v7a", "16")
|
||||
elif platform == "android_x86":
|
||||
cmake_args_ext += get_cmake_args_android("x86", "16")
|
||||
elif platform == "android_x86_64":
|
||||
cmake_args_ext += get_cmake_args_android("x86_64", "21")
|
||||
|
||||
# env setup for custom sysroot
|
||||
if config.option("sysroot") != "":
|
||||
base.set_sysroot_env(platform)
|
||||
|
||||
# run cmake
|
||||
base.cmd("cmake", cmake_args + cmake_args_ext)
|
||||
|
||||
# build
|
||||
if "Unix Makefiles" in cmake_args_ext:
|
||||
base.cmd("make", ["-j4"])
|
||||
else:
|
||||
base.cmd("cmake", ["--build", ".", "--config", build_type])
|
||||
|
||||
if config.option("sysroot") != "":
|
||||
base.restore_sysroot_env()
|
||||
return
|
||||
|
||||
# general make function that calls `build_func` callback for configured platform(s) with specified cmake arguments
|
||||
def make_common(build_func, cmake_args):
|
||||
# WINDOWS
|
||||
if "windows" == base.host_platform():
|
||||
# win_64
|
||||
if config.check_option("platform", "win_64") or config.check_option("platform", "win_64_xp"):
|
||||
build_func("win_64", cmake_args)
|
||||
# win_32
|
||||
if config.check_option("platform", "win_32") or config.check_option("platform", "win_32_xp"):
|
||||
build_func("win_32", cmake_args)
|
||||
# win_arm64
|
||||
if config.check_option("platform", "win_arm64"):
|
||||
build_func("win_arm64", cmake_args)
|
||||
|
||||
# LINUX
|
||||
elif "linux" == base.host_platform():
|
||||
# linux_64
|
||||
if config.check_option("platform", "linux_64"):
|
||||
build_func("linux_64", cmake_args)
|
||||
# linux_arm64
|
||||
if config.check_option("platform", "linux_arm64"):
|
||||
build_func("linux_arm64", cmake_args)
|
||||
|
||||
# MAC
|
||||
elif "mac" == base.host_platform():
|
||||
# mac_64
|
||||
if config.check_option("platform", "mac_64"):
|
||||
build_func("mac_64", cmake_args)
|
||||
# mac_arm64
|
||||
if config.check_option("platform", "mac_arm64"):
|
||||
build_func("mac_arm64", cmake_args)
|
||||
|
||||
# IOS
|
||||
if -1 != config.option("platform").find("ios"):
|
||||
# ios (arm64)
|
||||
build_func("ios", cmake_args)
|
||||
# ios simulator (x86_64 and arm64 FAT lib)
|
||||
build_func("ios_simulator", cmake_args)
|
||||
|
||||
# ANDROID
|
||||
if -1 != config.option("platform").find("android"):
|
||||
# android_arm64_v8a
|
||||
if config.check_option("platform", "android_arm64_v8a"):
|
||||
build_func("android_arm64_v8a", cmake_args)
|
||||
# android_armv7
|
||||
if config.check_option("platform", "android_armv7"):
|
||||
build_func("android_armv7", cmake_args)
|
||||
# android_x86
|
||||
if config.check_option("platform", "android_x86"):
|
||||
build_func("android_x86", cmake_args)
|
||||
# android_x86_64
|
||||
if config.check_option("platform", "android_x86_64"):
|
||||
build_func("android_x86_64", cmake_args)
|
||||
|
||||
return
|
||||
|
||||
def make_x265(base_dir, build_type):
|
||||
# fetch lib repo
|
||||
if not base.is_dir("x265_git"):
|
||||
fetch_repo("https://bitbucket.org/multicoreware/x265_git.git", f"Release_{X265_VERSION}")
|
||||
# fix x265 version detection so it reads version from x265Version.txt instead of parsing it from .git
|
||||
base.replaceInFile(
|
||||
base_dir + "/x265_git/source/cmake/Version.cmake",
|
||||
"elseif(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/../x265Version.txt)",
|
||||
"endif()\n if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/../x265Version.txt)"
|
||||
)
|
||||
|
||||
# prepare cmake args
|
||||
cmake_dir = base_dir + "/x265_git/source"
|
||||
cmake_args = [
|
||||
cmake_dir,
|
||||
"-DCMAKE_BUILD_TYPE=" + build_type,
|
||||
"-DENABLE_CLI=OFF", # do not build standalone CLI app
|
||||
"-DENABLE_SHARED=OFF", # do not build shared libs
|
||||
"-DENABLE_ASSEMBLY=OFF", # disable assembly optimizations
|
||||
"-DENABLE_LIBNUMA=OFF", # disable libnuma usage (affects Linux only)
|
||||
]
|
||||
|
||||
# lib build function
|
||||
def build_x265(platform, cmake_args):
|
||||
# check if target lib has already been built
|
||||
build_dir = get_build_dir(base_dir, "x265_git", platform, build_type)
|
||||
if platform.find("win") != -1:
|
||||
target_lib = os.path.join(build_dir, build_type, "x265-static.lib")
|
||||
else:
|
||||
target_lib = os.path.join(build_dir, "libx265.a")
|
||||
if base.is_file(target_lib):
|
||||
return
|
||||
# go to the build directory
|
||||
base.create_dir(build_dir)
|
||||
os.chdir(build_dir)
|
||||
# run build
|
||||
build_with_cmake(platform, cmake_args, build_type)
|
||||
# for iOS there is no target for building libx265.a, so we need to form it ourselves from libcommon.a and libencoder.a
|
||||
if platform.find("ios") != -1:
|
||||
xcode_sdk = get_xcode_sdk(platform)
|
||||
base.cmd("libtool", [
|
||||
"-static",
|
||||
"-o", "libx265.a",
|
||||
f"build/common.build/{build_type}-{xcode_sdk}/libcommon.a",
|
||||
f"build/encoder.build/{build_type}-{xcode_sdk}/libencoder.a"
|
||||
])
|
||||
# copy header
|
||||
base.copy_file(base_dir + "/x265_git/source/x265.h", build_dir)
|
||||
# reset directory
|
||||
os.chdir(base_dir)
|
||||
return
|
||||
|
||||
make_common(build_x265, cmake_args)
|
||||
return
|
||||
|
||||
def make_de265(base_dir, build_type):
|
||||
# fetch lib repo
|
||||
if not base.is_dir("libde265"):
|
||||
fetch_repo("https://github.com/strukturag/libde265.git", f"v{DE265_VERSION}")
|
||||
|
||||
# prepare cmake args
|
||||
cmake_dir = base_dir + "/libde265"
|
||||
cmake_args = [
|
||||
cmake_dir,
|
||||
"-DCMAKE_BUILD_TYPE=" + build_type,
|
||||
"-DBUILD_SHARED_LIBS=OFF", # do not build shared libs
|
||||
"-DENABLE_SDL=OFF", # disable SDL
|
||||
"-DENABLE_DECODER=OFF", # do not build decoder CLI executable
|
||||
"-DENABLE_ENCODER=OFF", # do not build encoder CLI executable
|
||||
]
|
||||
|
||||
# lib build function
|
||||
def build_de265(platform, cmake_args):
|
||||
# check if target lib has already been built
|
||||
build_dir = get_build_dir(base_dir, "libde265", platform, build_type)
|
||||
if platform.find("win") != -1:
|
||||
target_lib = os.path.join(build_dir, "libde265", build_type, "libde265.lib")
|
||||
else:
|
||||
target_lib = os.path.join(build_dir, "libde265/libde265.a")
|
||||
if base.is_file(target_lib):
|
||||
return
|
||||
# go to the build directory
|
||||
base.create_dir(build_dir)
|
||||
os.chdir(build_dir)
|
||||
# run build
|
||||
build_with_cmake(platform, cmake_args, build_type)
|
||||
# for ios copy target library from the default build path
|
||||
if platform.find("ios") != -1:
|
||||
xcode_sdk = get_xcode_sdk(platform)
|
||||
base.copy_file(f"libde265/{build_type}-{xcode_sdk}/libde265.a", "libde265")
|
||||
# copy header
|
||||
base.copy_file(base_dir + "/libde265/libde265/de265.h", "libde265")
|
||||
# reset directory
|
||||
os.chdir(base_dir)
|
||||
return
|
||||
|
||||
make_common(build_de265, cmake_args)
|
||||
return
|
||||
|
||||
def make_heif(base_dir, build_type):
|
||||
# fetch lib repo
|
||||
if not base.is_dir("libheif"):
|
||||
fetch_repo("https://github.com/strukturag/libheif.git", f"v{HEIF_VERSION}")
|
||||
# do not build heifio module
|
||||
base.replaceInFile(
|
||||
base_dir + "/libheif/CMakeLists.txt",
|
||||
"add_subdirectory(heifio)",
|
||||
"# add_subdirectory(heifio)"
|
||||
)
|
||||
base.replaceInFile(
|
||||
base_dir + "/libheif/CMakeLists.txt",
|
||||
"if (DOXYGEN_FOUND)",
|
||||
"if (FALSE)"
|
||||
)
|
||||
|
||||
# prepare cmake args
|
||||
cmake_dir = base_dir + "/libheif"
|
||||
cmake_args = [
|
||||
cmake_dir,
|
||||
"--preset=release-noplugins", # preset to disable plugins system
|
||||
"-DCMAKE_BUILD_TYPE=" + build_type,
|
||||
"-DBUILD_SHARED_LIBS=OFF", # do not build shared libs
|
||||
"-DWITH_LIBSHARPYUV=OFF", # do not build libsharpyuv (for RGB <--> YUV color space conversions)
|
||||
"-DWITH_AOM_DECODER=OFF", # do not build AOM V1 decoder (for AVIF image format)
|
||||
"-DWITH_AOM_ENCODER=OFF", # do not build AOM V1 encoder (for AVIF image format)
|
||||
"-DWITH_GDK_PIXBUF=OFF", # do not build gdk-pixbuf plugin (UNIX only)
|
||||
"-DWITH_GNOME=OFF", # do not build gnome plugin (Linux only)
|
||||
"-DWITH_EXAMPLES=OFF", # do not build examples
|
||||
"-DWITH_EXAMPLE_HEIF_VIEW=OFF", # do not build heif-view CLI tool
|
||||
"-DWITH_X265=ON", # enable x265 codec
|
||||
"-DWITH_LIBDE265=ON", # enable de265 codec
|
||||
"-DCMAKE_CXX_FLAGS=-DLIBDE265_STATIC_BUILD", # add macro definition to properly compile with de265 static library
|
||||
"-DCMAKE_C_FLAGS=-DLIBDE265_STATIC_BUILD", # same ^
|
||||
]
|
||||
|
||||
# lib build function
|
||||
def build_heif(platform, cmake_args):
|
||||
# check if target lib has already been built
|
||||
build_dir = get_build_dir(base_dir, "libheif", platform, build_type)
|
||||
if platform.find("win") != -1:
|
||||
target_lib = os.path.join(build_dir, "libheif", build_type, "heif.lib")
|
||||
else:
|
||||
target_lib = os.path.join(build_dir, "libheif/libheif.a")
|
||||
if base.is_file(target_lib):
|
||||
return
|
||||
# go to the build directory
|
||||
base.create_dir(build_dir)
|
||||
os.chdir(build_dir)
|
||||
# add paths to dependent libraries and includes to cmake args
|
||||
de265_build_dir = get_build_dir(base_dir, "libde265", platform, build_type)
|
||||
x265_build_dir = get_build_dir(base_dir, "x265_git", platform, build_type)
|
||||
cmake_args_ext = [
|
||||
f"-DLIBDE265_INCLUDE_DIR={de265_build_dir}",
|
||||
f"-DX265_INCLUDE_DIR={x265_build_dir}"
|
||||
]
|
||||
if platform.find("win") != -1:
|
||||
cmake_args_ext += [
|
||||
f"-DLIBDE265_LIBRARY={de265_build_dir}/libde265/{build_type}/libde265.lib",
|
||||
f"-DX265_LIBRARY={x265_build_dir}/{build_type}/x265-static.lib"
|
||||
]
|
||||
else:
|
||||
cmake_args_ext += [
|
||||
f"-DLIBDE265_LIBRARY={de265_build_dir}/libde265/libde265.a",
|
||||
f"-DX265_LIBRARY={x265_build_dir}/libx265.a"
|
||||
]
|
||||
# run build
|
||||
build_with_cmake(platform, cmake_args + cmake_args_ext, build_type)
|
||||
# for ios copy target library from the default build path
|
||||
if platform.find("ios") != -1:
|
||||
xcode_sdk = get_xcode_sdk(platform)
|
||||
base.copy_file(f"libheif/{build_type}-{xcode_sdk}/libheif.a", "libheif")
|
||||
# reset directory
|
||||
os.chdir(base_dir)
|
||||
return
|
||||
|
||||
make_common(build_heif, cmake_args)
|
||||
return
|
||||
|
||||
def clear_module():
|
||||
if base.is_dir("libde265"):
|
||||
base.delete_dir_with_access_error("libde265")
|
||||
if base.is_dir("x265_git"):
|
||||
base.delete_dir_with_access_error("x265_git")
|
||||
if base.is_dir("libheif"):
|
||||
base.delete_dir_with_access_error("libheif")
|
||||
return
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: heif")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/heif"
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
base.check_module_version("2", clear_module)
|
||||
|
||||
build_type = "Release"
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
build_type = "Debug"
|
||||
|
||||
# fetch custom cmake toolchain for ios
|
||||
if -1 != config.option("platform").find("ios"):
|
||||
if not base.is_file(IOS_CMAKE_TOOLCHAIN_FILE):
|
||||
fetch_repo("https://github.com/leetal/ios-cmake.git", IOS_CMAKE_VERSION)
|
||||
|
||||
# build encoder library
|
||||
make_x265(base_dir, build_type)
|
||||
# build decoder library
|
||||
make_de265(base_dir, build_type)
|
||||
|
||||
# build libheif
|
||||
make_heif(base_dir, build_type)
|
||||
|
||||
os.chdir(old_dir)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
make()
|
||||
@ -7,7 +7,27 @@ import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def clear_module():
|
||||
directories = ["gumbo-parser", "katana-parser"]
|
||||
|
||||
for dir in directories:
|
||||
if base.is_dir(dir):
|
||||
base.delete_dir_with_access_error(dir)
|
||||
|
||||
def make():
|
||||
old_cur_dir = os.getcwd()
|
||||
|
||||
print("[fetch]: html")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/html"
|
||||
|
||||
os.chdir(base_dir)
|
||||
base.check_module_version("2", clear_module)
|
||||
os.chdir(old_cur_dir)
|
||||
|
||||
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make()
|
||||
|
||||
@ -3,6 +3,11 @@ sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
|
||||
def clean():
|
||||
if base.is_dir("hunspell"):
|
||||
base.delete_dir_with_access_error("hunspell")
|
||||
return
|
||||
|
||||
def make(build_js = True):
|
||||
|
||||
old_cur_dir = os.getcwd()
|
||||
@ -11,6 +16,8 @@ def make(build_js = True):
|
||||
core_common_dir = base.get_script_dir() + "/../../core/Common"
|
||||
|
||||
os.chdir(core_common_dir + "/3dParty/hunspell")
|
||||
|
||||
base.common_check_version("hunspell", "1", clean)
|
||||
base.cmd("python", ["./before.py"])
|
||||
|
||||
if (build_js):
|
||||
|
||||
21
scripts/core_common/modules/hyphen.py
Normal file
21
scripts/core_common/modules/hyphen.py
Normal file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
def make():
|
||||
print("[fetch]: hyphen")
|
||||
new_dir = base.get_script_dir() + "/../../core/Common/3dParty/hyphen"
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(new_dir)
|
||||
|
||||
if not base.is_dir("hyphen"):
|
||||
base.cmd("git", ["clone", "https://github.com/hunspell/hyphen"])
|
||||
|
||||
|
||||
os.chdir(old_dir)
|
||||
return
|
||||
|
||||
@ -2,26 +2,59 @@
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import icu_android
|
||||
|
||||
def fetch_icu(major, minor, target_dir="icu"):
|
||||
if (base.is_dir("./icu2")):
|
||||
base.delete_dir_with_access_error("icu2")
|
||||
base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", "./icu2"])
|
||||
base.copy_dir("./icu2/icu4c", target_dir)
|
||||
base.delete_dir_with_access_error("icu2")
|
||||
return
|
||||
|
||||
def clear_module():
|
||||
if base.is_dir("icu"):
|
||||
base.delete_dir_with_access_error("icu")
|
||||
|
||||
# remove build
|
||||
for child in glob.glob("./*"):
|
||||
if base.is_dir(child):
|
||||
base.delete_dir(child)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: icu")
|
||||
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
icu_android.make()
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
icu_major = "58"
|
||||
base.check_module_version("8", clear_module)
|
||||
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
icu_android.make()
|
||||
|
||||
os.chdir(base_dir)
|
||||
|
||||
icu_major = "74"
|
||||
icu_minor = "2"
|
||||
|
||||
if not base.is_dir("icu"):
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
fetch_icu(icu_major, icu_minor)
|
||||
|
||||
# old version for win_xp
|
||||
icu_major_old = "58"
|
||||
icu_minor_old = "3"
|
||||
|
||||
if config.check_option("platform", "win_64_xp") or config.check_option("platform", "win_32_xp"):
|
||||
if not base.is_dir("icu58"):
|
||||
fetch_icu(icu_major_old, icu_minor_old, "icu58")
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
platformToolset = "v140"
|
||||
@ -32,54 +65,109 @@ def make():
|
||||
need_platforms.append("win_64")
|
||||
if (-1 != config.option("platform").find("win_32")):
|
||||
need_platforms.append("win_32")
|
||||
if (-1 != config.option("platform").find("win_arm64")):
|
||||
need_platforms.append("win_64") # for exe files
|
||||
need_platforms.append("win_arm64")
|
||||
|
||||
def build_icu_win(source_dir, out_dir, icu_major):
|
||||
if base.is_dir(out_dir):
|
||||
return
|
||||
|
||||
compile_bat = []
|
||||
compile_bat.append("setlocal")
|
||||
|
||||
args = {
|
||||
"win_32" : {
|
||||
"msbuild_platfrom" : "Win32",
|
||||
"vcvarsall_arch" : "x86",
|
||||
"out_bin_dir" : source_dir + "/bin/",
|
||||
"out_lib_dir" : source_dir + "/lib/"
|
||||
},
|
||||
"win_64" : {
|
||||
"msbuild_platfrom" : "X64",
|
||||
"vcvarsall_arch" : "x64",
|
||||
"out_bin_dir" : source_dir + "/bin64/",
|
||||
"out_lib_dir" : source_dir + "/lib64/"
|
||||
},
|
||||
"win_arm64" : {
|
||||
"msbuild_platfrom" : "ARM64",
|
||||
"vcvarsall_arch" : "x64_arm64",
|
||||
"out_bin_dir" : source_dir + "/binARM64/",
|
||||
"out_lib_dir" : source_dir + "/libARM64/"
|
||||
}
|
||||
}
|
||||
|
||||
platform_args = args[platform]
|
||||
|
||||
compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + platform_args['vcvarsall_arch'])
|
||||
compile_bat.append("call MSBuild.exe " + source_dir + "/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + platform_args['msbuild_platfrom'])
|
||||
compile_bat.append("endlocal")
|
||||
base.run_as_bat(compile_bat)
|
||||
|
||||
base.create_dir(out_dir)
|
||||
base.copy_file(platform_args['out_bin_dir'] + "icudt" + icu_major + ".dll", out_dir)
|
||||
base.copy_file(platform_args['out_bin_dir'] + "icuuc" + icu_major + ".dll", out_dir)
|
||||
base.copy_file(platform_args['out_lib_dir'] + "icudt.lib", out_dir)
|
||||
base.copy_file(platform_args['out_lib_dir'] + "icuuc.lib", out_dir)
|
||||
|
||||
for platform in need_platforms:
|
||||
if not config.check_option("platform", platform) and not config.check_option("platform", platform + "_xp"):
|
||||
continue
|
||||
if not base.is_dir(platform + "/build"):
|
||||
base.create_dir(platform)
|
||||
compile_bat = []
|
||||
compile_bat.append("setlocal")
|
||||
compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + ("Win32" if base.platform_is_32(platform) else "X64"))
|
||||
compile_bat.append("endlocal")
|
||||
base.run_as_bat(compile_bat)
|
||||
bin_dir = "icu/bin64/" if ("win_64" == platform) else "icu/bin/"
|
||||
lib_dir = "icu/lib64/" if ("win_64" == platform) else "icu/lib/"
|
||||
base.create_dir(platform + "/build")
|
||||
base.copy_file(bin_dir + "icudt" + icu_major + ".dll", platform + "/build/")
|
||||
base.copy_file(bin_dir + "icuuc" + icu_major + ".dll", platform + "/build/")
|
||||
base.copy_file(lib_dir + "icudt.lib", platform + "/build/")
|
||||
base.copy_file(lib_dir + "icuuc.lib", platform + "/build/")
|
||||
|
||||
if not (config.check_option("platform", "win_64_xp") or config.check_option("platform", "win_32_xp")):
|
||||
build_icu_win("icu", platform + "/build", icu_major)
|
||||
else:
|
||||
# xp
|
||||
build_icu_win("icu58", platform + "/build/xp", icu_major_old)
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
if not base.is_file("./icu/source/i18n/digitlst.cpp.bak"):
|
||||
base.copy_file("./icu/source/i18n/digitlst.cpp", "./icu/source/i18n/digitlst.cpp.bak")
|
||||
base.replaceInFile("./icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
if base.is_dir(base_dir + "/linux_64"):
|
||||
base.delete_dir(base_dir + "/linux_64")
|
||||
if base.is_dir(base_dir + "/linux_arm64"):
|
||||
base.delete_dir(base_dir + "/linux_arm64")
|
||||
|
||||
if not base.is_dir(base_dir + "/linux_64"):
|
||||
base.create_dir(base_dir + "/icu/cross_build")
|
||||
os.chdir("icu/cross_build")
|
||||
base.cmd("./../source/runConfigureICU", ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
command_configure = "./../source/runConfigureICU"
|
||||
command_compile_addon = "-static-libstdc++ -static-libgcc"
|
||||
if "1" == config.option("use-clang"):
|
||||
command_configure = "CXXFLAGS=-stdlib=libc++ " + command_configure
|
||||
command_compile_addon = "-stdlib=libc++"
|
||||
if "" == config.option("sysroot"):
|
||||
base.cmd(command_configure, ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
|
||||
base.replaceInFile("./../source/icudefs.mk.in", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS)", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS) " + command_compile_addon)
|
||||
else:
|
||||
base.set_sysroot_env("linux_64")
|
||||
sysroot_path = config.option("sysroot_linux_64")
|
||||
sysroot_path_bin = config.get_custom_sysroot_bin("linux_64")
|
||||
base.cmd_exe("./../source/configure", ["--prefix=" + base_dir + "/icu/cross_build_install",
|
||||
"CC=" + sysroot_path_bin + "/gcc", "CXX=" + sysroot_path_bin + "/g++",
|
||||
"AR=" + sysroot_path_bin + "/ar", "RANLIB=" + sysroot_path_bin + "/ranlib",
|
||||
"CFLAGS=--sysroot=" + sysroot_path,
|
||||
"CXXFLAGS=--sysroot=" + sysroot_path + " " + command_compile_addon,
|
||||
"LDFLAGS=--sysroot=" + sysroot_path])
|
||||
|
||||
if "" == config.option("sysroot"):
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
else:
|
||||
base.cmd_exe("make", ["-j4"])
|
||||
base.cmd_exe("make", ["install"], True)
|
||||
base.restore_sysroot_env()
|
||||
|
||||
base.create_dir(base_dir + "/linux_64")
|
||||
base.create_dir(base_dir + "/linux_64/build")
|
||||
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicudata.so." + icu_major)
|
||||
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicuuc.so." + icu_major)
|
||||
base.copy_dir(base_dir + "/icu/cross_build_install/include", base_dir + "/linux_64/build/include")
|
||||
|
||||
|
||||
if config.check_option("platform", "linux_arm64") and not base.is_dir(base_dir + "/linux_arm64") and not base.is_os_arm():
|
||||
base.create_dir(base_dir + "/icu/linux_arm64")
|
||||
os.chdir(base_dir + "/icu/linux_arm64")
|
||||
base_arm_tool_dir = base.get_prefix_cross_compiler_arm64()
|
||||
compiler_gcc_prefix = base.get_compiler_gcc_prefix("linux_arm64")
|
||||
if config.option("sysroot") != "":
|
||||
base.set_sysroot_env("linux_arm64")
|
||||
base.cmd("./../source/configure", ["--host=arm-linux", "--prefix=" + base_dir + "/icu/linux_arm64_install", "--with-cross-build=" + base_dir + "/icu/cross_build",
|
||||
"CC=" + base_arm_tool_dir + "gcc", "CXX=" + base_arm_tool_dir + "g++", "AR=" + base_arm_tool_dir + "ar", "RANLIB=" + base_arm_tool_dir + "ranlib"])
|
||||
"CC=" + compiler_gcc_prefix + "gcc", "CXX=" + compiler_gcc_prefix + "g++", "AR=" + compiler_gcc_prefix + "ar", "RANLIB=" + compiler_gcc_prefix + "ranlib"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
base.create_dir(base_dir + "/linux_arm64")
|
||||
@ -87,6 +175,8 @@ def make():
|
||||
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicudata.so." + icu_major)
|
||||
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicuuc.so." + icu_major)
|
||||
base.copy_dir(base_dir + "/icu/linux_arm64_install/include", base_dir + "/linux_arm64/build/include")
|
||||
if config.option("sysroot") != "":
|
||||
base.restore_sysroot_env()
|
||||
|
||||
os.chdir("../..")
|
||||
|
||||
@ -103,6 +193,6 @@ def make():
|
||||
if (-1 != config.option("platform").find("ios")):
|
||||
if not base.is_dir("build"):
|
||||
base.bash("./icu_ios")
|
||||
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
@ -1,172 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
|
||||
|
||||
toolshains_dir = current_dir + "/toolchains"
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
icu_is_shared = False
|
||||
|
||||
current_path = base.get_env("PATH")
|
||||
|
||||
platforms = {
|
||||
"arm64" : {
|
||||
"arch" : "aarch64-linux-android",
|
||||
"bin" : "aarch64-linux-android"
|
||||
},
|
||||
"arm" : {
|
||||
"arch" : "arm-linux-androideabi",
|
||||
"bin" : "arm-linux-androideabi"
|
||||
},
|
||||
"x86_64" : {
|
||||
"arch" : "x86_64-linux-android",
|
||||
"bin" : "x86_64-linux-android"
|
||||
},
|
||||
"x86" : {
|
||||
"arch" : "x86-linux-android",
|
||||
"bin" : "i686-linux-android"
|
||||
}
|
||||
}
|
||||
|
||||
def build_arch(arch, api_version):
|
||||
print("icu build: " + arch + " ----------------------------------------")
|
||||
|
||||
if base.is_dir(current_dir + "/icu/" + arch):
|
||||
base.delete_dir(current_dir + "/icu/" + arch)
|
||||
base.create_dir(current_dir + "/icu/" + arch)
|
||||
os.chdir(current_dir + "/icu/" + arch)
|
||||
|
||||
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
|
||||
"--platform=android-" + api_version,
|
||||
"--install-dir=" + current_dir + "/toolchain/" + arch,
|
||||
"--toolchain=" + platforms[arch]["arch"],
|
||||
"--force"
|
||||
])
|
||||
|
||||
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
|
||||
|
||||
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
|
||||
if not icu_is_shared:
|
||||
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
|
||||
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
|
||||
|
||||
base.cmd("../source/configure", command_args.split())
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.set_env("PATH", current_path)
|
||||
os.chdir(current_dir)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
if base.is_dir(current_dir + "/build"):
|
||||
return
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
|
||||
print("[fetch & build]: icu_android")
|
||||
os.chdir(current_dir)
|
||||
|
||||
if not base.is_dir("icu"):
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
if ("linux" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
#if ("mac" == base.host_platform()):
|
||||
# base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
|
||||
|
||||
if not base.is_dir(current_dir + "/icu/cross_build"):
|
||||
base.create_dir(current_dir + "/icu/cross_build")
|
||||
os.chdir(current_dir + "/icu/cross_build")
|
||||
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
|
||||
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
build_arch("arm64", "21")
|
||||
build_arch("arm", "16")
|
||||
build_arch("x86_64","21")
|
||||
build_arch("x86", "16")
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_dir(current_dir + "/build")
|
||||
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
|
||||
|
||||
if icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
|
||||
|
||||
# patch elf information
|
||||
os.chdir(current_dir + "/build")
|
||||
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
|
||||
os.chdir("./patchelf")
|
||||
base.cmd("./bootstrap.sh")
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
|
||||
base.cmd("make")
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
|
||||
|
||||
base.delete_dir(current_dir + "/build/patchelf")
|
||||
|
||||
if not icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
@ -5,36 +5,28 @@ sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
def change_icu_defs(current_dir, arch):
|
||||
icudef_file = current_dir + "/icudefs.mk"
|
||||
icudef_file_old = current_dir + "/icudefs.mk.back"
|
||||
def change_icu_defs(arch):
|
||||
old_env = dict(os.environ)
|
||||
|
||||
param = "-arch x86_64"
|
||||
if arch == "arm64":
|
||||
param = "-arch arm64 -isysroot " + base.find_mac_sdk()
|
||||
param = "-arch arm64"
|
||||
|
||||
param += " -isysroot " + base.find_mac_sdk()
|
||||
param += " -mmacosx-version-min=10.12"
|
||||
|
||||
base.copy_file(icudef_file, icudef_file_old)
|
||||
os.environ["CFLAGS"] = param
|
||||
os.environ["CXXFLAGS"] = param + " --std=c++11"
|
||||
os.environ["LDFLAGS"] = param
|
||||
|
||||
base.replaceInFile(icudef_file, "CFLAGS = ", "CFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "CXXFLAGS = ", "CXXFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "RPATHLDFLAGS =", "RPATHLDFLAGS2 =")
|
||||
base.replaceInFile(icudef_file, "LDFLAGS = ", "LDFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "RPATHLDFLAGS2 =", "RPATHLDFLAGS =")
|
||||
return old_env
|
||||
|
||||
def restore_icu_defs(old_env):
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
def restore_icu_defs(current_dir):
|
||||
icudef_file = current_dir + "/icudefs.mk"
|
||||
icudef_file_old = current_dir + "/icudefs.mk.back"
|
||||
|
||||
base.delete_file(icudef_file)
|
||||
base.copy_file(icudef_file_old, icudef_file)
|
||||
base.delete_file(icudef_file_old)
|
||||
return
|
||||
|
||||
icu_major = "58"
|
||||
icu_major = "74"
|
||||
icu_minor = "2"
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
@ -46,29 +38,33 @@ if not base.is_dir(current_dir + "/mac_cross_64"):
|
||||
base.create_dir(current_dir + "/mac_cross_64")
|
||||
os.chdir(current_dir + "/mac_cross_64")
|
||||
|
||||
base.cmd("../icu/source/runConfigureICU", ["MacOSX",
|
||||
"--prefix=" + current_dir + "/mac_cross_64", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
|
||||
old_env = change_icu_defs("x86_64")
|
||||
|
||||
change_icu_defs(current_dir + "/mac_cross_64", "x86_64")
|
||||
base.cmd("../icu/source/runConfigureICU", ["MacOSX",
|
||||
"--prefix=" + current_dir + "/mac_64_install", "--enable-static"])
|
||||
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
restore_icu_defs(current_dir + "/mac_cross_64")
|
||||
restore_icu_defs(old_env)
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
os.chdir(current_dir + "/icu/source")
|
||||
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm_64",
|
||||
"--with-cross-build=" + current_dir + "/mac_cross_64", "VERBOSE=1"])
|
||||
old_env = change_icu_defs("arm64")
|
||||
|
||||
change_icu_defs(current_dir + "/icu/source", "arm64")
|
||||
addon = []
|
||||
if not base.is_os_arm():
|
||||
addon = ["--host=aarch64-apple-darwin"]
|
||||
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm64_install",
|
||||
"--with-cross-build=" + current_dir + "/mac_cross_64", "--enable-static", "VERBOSE=1"] + addon)
|
||||
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
restore_icu_defs(current_dir + "/icu/source")
|
||||
restore_icu_defs(old_env)
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
@ -84,15 +80,26 @@ base.create_dir(current_dir + "/mac_64/build")
|
||||
base.create_dir(current_dir + "/mac_arm64")
|
||||
base.create_dir(current_dir + "/mac_arm64/build")
|
||||
|
||||
base.copy_dir(current_dir + "/mac_cross_64/include", current_dir + "/mac_64/build/include")
|
||||
base.copy_file(current_dir + "/mac_cross_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_cross_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicuuc." + icu_major + ".dylib")
|
||||
base.copy_dir(current_dir + "/mac_64_install/include", current_dir + "/mac_64/build/include")
|
||||
# copy shared libs
|
||||
base.copy_file(current_dir + "/mac_64_install/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_64_install/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicuuc." + icu_major + ".dylib")
|
||||
# copy static libs
|
||||
base.copy_file(current_dir + "/mac_64_install/lib/libicudata.a", current_dir + "/mac_64/build")
|
||||
base.copy_file(current_dir + "/mac_64_install/lib/libicui18n.a", current_dir + "/mac_64/build")
|
||||
base.copy_file(current_dir + "/mac_64_install/lib/libicuuc.a", current_dir + "/mac_64/build")
|
||||
|
||||
base.copy_dir(current_dir + "/mac_arm_64/include", current_dir + "/mac_arm64/build/include")
|
||||
base.copy_file(current_dir + "/mac_arm_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_arm_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicuuc." + icu_major + ".dylib")
|
||||
base.copy_dir(current_dir + "/mac_arm64_install/include", current_dir + "/mac_arm64/build/include")
|
||||
# copy shared libs
|
||||
base.copy_file(current_dir + "/mac_arm64_install/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_arm64_install/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicuuc." + icu_major + ".dylib")
|
||||
# copy static libs
|
||||
base.copy_file(current_dir + "/mac_arm64_install/lib/libicudata.a", current_dir + "/mac_arm64/build")
|
||||
base.copy_file(current_dir + "/mac_arm64_install/lib/libicui18n.a", current_dir + "/mac_arm64/build")
|
||||
base.copy_file(current_dir + "/mac_arm64_install/lib/libicuuc.a", current_dir + "/mac_arm64/build")
|
||||
|
||||
base.delete_dir(current_dir + "/mac_cross_64")
|
||||
base.delete_dir(current_dir + "/mac_arm_64")
|
||||
base.delete_dir(current_dir + "/mac_64_install")
|
||||
base.delete_dir(current_dir + "/mac_arm64_install")
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
|
||||
39
scripts/core_common/modules/iwork.py
Normal file
39
scripts/core_common/modules/iwork.py
Normal file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def clear_module():
|
||||
directories = ["glm", "libetonyek", "libodfgen", "librevenge", "mdds"]
|
||||
|
||||
for dir in directories:
|
||||
if base.is_dir(dir):
|
||||
base.delete_dir_with_access_error(dir)
|
||||
|
||||
def make(use_gperf = True):
|
||||
old_cur_dir = os.getcwd()
|
||||
|
||||
print("[fetch & build]: iwork")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/apple"
|
||||
|
||||
os.chdir(base_dir)
|
||||
base.check_module_version("4", clear_module)
|
||||
os.chdir(old_cur_dir)
|
||||
|
||||
cmd_args = ["fetch.py"]
|
||||
|
||||
if use_gperf:
|
||||
cmd_args.append("--gperf")
|
||||
|
||||
base.cmd_in_dir(base_dir, "python", cmd_args)
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make(False)
|
||||
|
||||
@ -126,11 +126,14 @@ def make():
|
||||
def param_apple(platform, arch):
|
||||
return ["-G","Xcode", "-DDEPLOYMENT_TARGET=10", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
|
||||
|
||||
def param_apple_ios(platform, arch, params=[]):
|
||||
return params + ["-G","Xcode", "-DDEPLOYMENT_TARGET=11", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
|
||||
|
||||
if(platform == "ios"):
|
||||
build_arch("ios", "armv7", param_apple("OS", "armv7"))
|
||||
build_arch("ios", "arm64", param_apple("OS64", "arm64"))
|
||||
build_arch("ios", "i386", param_apple("SIMULATOR", "i386"))
|
||||
build_arch("ios", "x86_64", param_apple("SIMULATOR64", "x86_64"))
|
||||
#build_arch("ios", "armv7", param_apple("OS", "armv7"))
|
||||
build_arch("ios", "arm64", param_apple_ios("OS64", "arm64"))
|
||||
#build_arch("ios", "i386", param_apple_ios("SIMULATOR", "i386"))
|
||||
build_arch("ios", "x86_64", param_apple_ios("SIMULATOR64", "x86_64", ["-DCMAKE_CXX_FLAGS=-std=c++11"]))
|
||||
else:
|
||||
build_arch("mac", "mac_arm64", param_apple("MAC_ARM64", "arm64"))
|
||||
build_arch("mac", "mac_64", param_apple("MAC", "x86_64"))
|
||||
@ -144,7 +147,7 @@ def make():
|
||||
#copy include
|
||||
prefix_dir = current_dir + "/IXWebSocket/build/ios/"
|
||||
postfix_dir = ""
|
||||
if base.is_dir(prefix_dir + "armv7/usr"):
|
||||
if base.is_dir(prefix_dir + "arm64/usr"):
|
||||
postfix_dir = "/usr"
|
||||
|
||||
if base.is_dir(prefix_dir + "armv7" + postfix_dir + "/include"):
|
||||
@ -157,10 +160,16 @@ def make():
|
||||
base.cmd("cp", [ "-r", prefix_dir + "x86_64" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
|
||||
|
||||
# Create fat lib
|
||||
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"-create", "-output",
|
||||
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
|
||||
if (True):
|
||||
base.cmd("lipo", ["IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"-create", "-output",
|
||||
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
|
||||
else:
|
||||
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"-create", "-output",
|
||||
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
|
||||
|
||||
|
||||
elif (-1 != config.option("platform").find("linux")):
|
||||
|
||||
124
scripts/core_common/modules/libvlc.py
Normal file
124
scripts/core_common/modules/libvlc.py
Normal file
@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
def docker_build(image_name, dockerfile_dir, base_dir):
|
||||
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
|
||||
vlc_dir = base_dir + "/vlc"
|
||||
base.cmd("docker", ["run", "--rm", "-v", vlc_dir + ":/vlc", image_name])
|
||||
base.cmd("docker", ["image", "rm", image_name])
|
||||
return
|
||||
|
||||
def form_build_win(src_dir, dest_dir):
|
||||
if not base.is_dir(dest_dir):
|
||||
base.create_dir(dest_dir)
|
||||
# copy include dir
|
||||
base.copy_dir(src_dir + "/sdk/include", dest_dir + "/include")
|
||||
# form lib dir
|
||||
base.create_dir(dest_dir + "/lib")
|
||||
base.copy_file(src_dir + "/sdk/lib/libvlc.lib", dest_dir + "/lib/vlc.lib")
|
||||
base.copy_file(src_dir + "/sdk/lib/libvlccore.lib", dest_dir + "/lib/vlccore.lib")
|
||||
base.copy_dir(src_dir + "/plugins", dest_dir + "/lib/plugins")
|
||||
base.copy_file(src_dir + "/libvlc.dll", dest_dir + "/lib")
|
||||
base.copy_file(src_dir + "/libvlccore.dll", dest_dir + "/lib")
|
||||
base.copy_file(src_dir + "/vlc-cache-gen.exe", dest_dir + "/lib")
|
||||
# generate cache file 'plugins.dat' for plugins loading
|
||||
base.cmd_exe(dest_dir + "/lib/vlc-cache-gen", [dest_dir + "/lib/plugins"])
|
||||
return
|
||||
|
||||
def form_build_linux(src_dir, dest_dir):
|
||||
if not base.is_dir(dest_dir):
|
||||
base.create_dir(dest_dir)
|
||||
# copy include dir
|
||||
base.copy_dir(src_dir + "/include", dest_dir + "/include")
|
||||
# copy and form lib dir
|
||||
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
|
||||
base.delete_dir(dest_dir + "/lib/pkgconfig")
|
||||
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
|
||||
|
||||
def form_build_mac(src_dir, dest_dir):
|
||||
if not base.is_dir(dest_dir):
|
||||
base.create_dir(dest_dir)
|
||||
# copy include dir
|
||||
base.copy_dir(src_dir + "/include", dest_dir + "/include")
|
||||
# copy and form lib dir
|
||||
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
|
||||
base.cmd("find", [dest_dir + "/lib", "-name", "\"*.la\"", "-type", "f", "-delete"])
|
||||
base.delete_dir(dest_dir + "/lib/pkgconfig")
|
||||
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
|
||||
# generate cache file 'plugins.dat' for plugins loading
|
||||
base.run_command("DYLD_LIBRARY_PATH=" + dest_dir + "/lib " + dest_dir + "/lib/vlc/vlc-cache-gen " + dest_dir + "/lib/vlc/plugins")
|
||||
return
|
||||
|
||||
def make():
|
||||
|
||||
print("[fetch & build]: libvlc")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/libvlc"
|
||||
vlc_dir = base_dir + "/vlc"
|
||||
vlc_version = "3.0.18"
|
||||
|
||||
tools_dir = base.get_script_dir() + "/../tools"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
if not base.is_dir(vlc_dir):
|
||||
# temporary disable auto CRLF for Windows
|
||||
if "windows" == base.host_platform():
|
||||
autocrlf_old = base.run_command("git config --global core.autocrlf")['stdout']
|
||||
base.cmd("git", ["config", "--global", "core.autocrlf", "false"])
|
||||
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
|
||||
if "windows" == base.host_platform():
|
||||
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
|
||||
|
||||
base.create_dir("build")
|
||||
base.copy_file("tools/ignore-cache-time.patch", "vlc")
|
||||
|
||||
# windows
|
||||
if "windows" == base.host_platform():
|
||||
if config.check_option("platform", "win_64"):
|
||||
base.copy_file("tools/win_64/build.patch", "vlc")
|
||||
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
|
||||
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
|
||||
|
||||
if config.check_option("platform", "win_32"):
|
||||
base.copy_file("tools/win_32/build.patch", "vlc")
|
||||
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
|
||||
form_build_win(vlc_dir + "/build/win32/vlc-" + vlc_version, base_dir + "/build/win_32")
|
||||
|
||||
# linux
|
||||
if config.check_option("platform", "linux_64"):
|
||||
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
|
||||
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
|
||||
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
|
||||
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
|
||||
|
||||
# mac
|
||||
if "mac" == base.host_platform():
|
||||
os.chdir(vlc_dir)
|
||||
|
||||
base.cmd("git", ["restore", "src/modules/bank.c"])
|
||||
base.cmd("patch", ["-p1", "src/modules/bank.c", "../tools/ignore-cache-time.patch"])
|
||||
|
||||
if config.check_option("platform", "mac_64"):
|
||||
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
|
||||
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_64/build.patch"])
|
||||
base.create_dir("build/mac_64")
|
||||
os.chdir("build/mac_64")
|
||||
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
|
||||
form_build_mac(vlc_dir + "/build/mac_64/vlc_install_dir", base_dir + "/build/mac_64")
|
||||
|
||||
if config.check_option("platform", "mac_arm64"):
|
||||
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
|
||||
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_arm64/build.patch"])
|
||||
base.create_dir("build/mac_arm64")
|
||||
os.chdir("build/mac_arm64")
|
||||
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
|
||||
form_build_mac(vlc_dir + "/build/mac_arm64/vlc_install_dir", base_dir + "/build/mac_arm64")
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
20
scripts/core_common/modules/md.py
Normal file
20
scripts/core_common/modules/md.py
Normal file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def make():
|
||||
print("[fetch]: md")
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/md"
|
||||
|
||||
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make()
|
||||
15
scripts/core_common/modules/oo_brotli.py
Normal file
15
scripts/core_common/modules/oo_brotli.py
Normal file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: brotli")
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../../core/Common/3dParty/brotli", "./make.py")
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make()
|
||||
@ -19,15 +19,16 @@ def make():
|
||||
|
||||
print("[fetch & build]: openssl")
|
||||
|
||||
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
|
||||
openssl_mobile.make()
|
||||
return
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
base.common_check_version("openssl", "3", clean)
|
||||
base.common_check_version("openssl", "4", clean)
|
||||
|
||||
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
|
||||
os.chdir(old_cur)
|
||||
openssl_mobile.make()
|
||||
return
|
||||
|
||||
if not base.is_dir("openssl"):
|
||||
base.cmd("git", ["clone", "--depth=1", "--branch", "OpenSSL_1_1_1f", "https://github.com/openssl/openssl.git"])
|
||||
@ -41,7 +42,7 @@ def make():
|
||||
base.create_dir("./../build/win_64")
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
|
||||
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm")
|
||||
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm enable-md2")
|
||||
qmake_bat.append("call nmake clean")
|
||||
qmake_bat.append("call nmake build_libs install")
|
||||
base.run_as_bat(qmake_bat, True)
|
||||
@ -49,7 +50,15 @@ def make():
|
||||
base.create_dir("./../build/win_32")
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
|
||||
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm")
|
||||
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm enable-md2")
|
||||
qmake_bat.append("call nmake clean")
|
||||
qmake_bat.append("call nmake build_libs install")
|
||||
base.run_as_bat(qmake_bat, True)
|
||||
if (-1 != config.option("platform").find("win_arm64")) and not base.is_dir("../build/win_arm64"):
|
||||
base.create_dir("./../build/win_arm64")
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64_arm64")
|
||||
qmake_bat.append("perl Configure VC-WIN64-ARM --prefix=" + old_cur_dir + "\\build\\win_arm64 --openssldir=" + old_cur_dir + "\\build\\win_arm64 no-shared no-asm enable-md2")
|
||||
qmake_bat.append("call nmake clean")
|
||||
qmake_bat.append("call nmake build_libs install")
|
||||
base.run_as_bat(qmake_bat, True)
|
||||
@ -62,7 +71,7 @@ def make():
|
||||
base.create_dir("./../build/win_64_xp")
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
|
||||
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async")
|
||||
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async enable-md2")
|
||||
qmake_bat.append("call nmake clean")
|
||||
qmake_bat.append("call nmake build_libs install")
|
||||
base.run_as_bat(qmake_bat, True)
|
||||
@ -70,7 +79,7 @@ def make():
|
||||
base.create_dir("./../build/win_32_xp")
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
|
||||
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async")
|
||||
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async enable-md2")
|
||||
qmake_bat.append("call nmake clean")
|
||||
qmake_bat.append("call nmake build_libs install")
|
||||
base.run_as_bat(qmake_bat, True)
|
||||
@ -78,30 +87,50 @@ def make():
|
||||
# -------------------------------------------------------------------------------------------------------
|
||||
return
|
||||
|
||||
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
|
||||
base.cmd("./config", ["no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.cmd("make")
|
||||
base.cmd("make", ["install"])
|
||||
# TODO: support x86
|
||||
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
|
||||
base.cmd("./config", ["enable-md2", "no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
|
||||
if "1" == config.option("use-clang"):
|
||||
base.replaceInFile("./Makefile", "CC=$(CROSS_COMPILE)gcc", "CC=$(CROSS_COMPILE)clang")
|
||||
base.replaceInFile("./Makefile", "CXX=$(CROSS_COMPILE)g++", "CXX=$(CROSS_COMPILE)clang++")
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden -stdlib=libc++")
|
||||
base.replaceInFile("./Makefile", "LDFLAGS=", "LDFLAGS=-stdlib=libc++")
|
||||
elif config.option("sysroot") == "":
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
else:
|
||||
base.replaceInFile("./Makefile", "CROSS_COMPILE=", "CROSS_COMPILE=" + config.get_custom_sysroot_bin("linux_64") + "/")
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden --sysroot=" + config.option("sysroot_linux_64"))
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden --sysroot=" + config.option("sysroot_linux_64"))
|
||||
|
||||
if config.option("sysroot") == "":
|
||||
base.cmd("make", [])
|
||||
base.cmd("make", ["install"])
|
||||
base.cmd("make", ["clean"], True)
|
||||
else:
|
||||
base.set_sysroot_env("linux_64")
|
||||
base.cmd_exe("make", [])
|
||||
base.cmd_exe("make", ["install"])
|
||||
base.cmd_exe("make", ["clean"], True)
|
||||
base.restore_sysroot_env()
|
||||
|
||||
if (-1 != config.option("platform").find("linux_arm64")) and not base.is_dir("../build/linux_arm64"):
|
||||
if ("x86_64" != platform.machine()):
|
||||
if (base.is_os_arm()):
|
||||
base.copy_dir("../build/linux_64", "../build/linux_arm64")
|
||||
else:
|
||||
cross_compiler_arm64 = config.option("arm64-toolchain-bin")
|
||||
if ("" == cross_compiler_arm64):
|
||||
cross_compiler_arm64 = "/usr/bin"
|
||||
cross_compiler_arm64_prefix = cross_compiler_arm64 + "/" + base.get_prefix_cross_compiler_arm64()
|
||||
base.cmd("./Configure", ["linux-aarch64", "--cross-compile-prefix=" + cross_compiler_arm64_prefix, "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
|
||||
if config.option("sysroot") != "":
|
||||
base.set_sysroot_env("linux_arm64")
|
||||
base.cmd("/usr/bin/perl", ["./Configure", "linux-aarch64", "enable-md2", "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
|
||||
#base.cmd("./Configure", ["linux-aarch64", "enable-md2", "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.cmd("make", [], True)
|
||||
base.cmd("make", ["install"], True)
|
||||
if config.option("sysroot") != "":
|
||||
base.restore_sysroot_env()
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
|
||||
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
|
||||
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
|
||||
base.cmd("make", ["build_libs", "install"])
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_arm64"):
|
||||
@ -119,7 +148,7 @@ def make():
|
||||
},\n\
|
||||
\"darwin64-x86_64-cc\" => {"
|
||||
base.replaceInFile(base_dir + "/openssl2/Configurations/10-main.conf", replace1, replace2)
|
||||
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
|
||||
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
|
||||
base.cmd("make", ["build_libs", "install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
@ -1,18 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
sys.path.append('android')
|
||||
import base
|
||||
import config
|
||||
import os
|
||||
import subprocess
|
||||
import openssl_android
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
|
||||
|
||||
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
|
||||
subprocess.call(["./build-android-openssl.sh"])
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
openssl_android.make()
|
||||
|
||||
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
|
||||
subprocess.call(["./build-ios-openssl.sh"])
|
||||
|
||||
74
scripts/core_common/modules/socket_io.py
Normal file
74
scripts/core_common/modules/socket_io.py
Normal file
@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
import glob
|
||||
|
||||
def clean():
|
||||
if base.is_dir("socket.io-client-cpp"):
|
||||
base.delete_dir_with_access_error("socket.io-client-cpp")
|
||||
return
|
||||
|
||||
def correct_namespace(dir):
|
||||
folder = dir
|
||||
if ("/" != folder[-1:]):
|
||||
folder += "/"
|
||||
folder += "*"
|
||||
for file in glob.glob(folder):
|
||||
if base.is_file(file):
|
||||
base.replaceInFile(file, "namespace sio", "namespace sio_no_tls")
|
||||
base.replaceInFile(file, "asio::", "asio_no_tls::")
|
||||
base.replaceInFile(file, "sio::", "sio_no_tls::")
|
||||
base.replaceInFile(file, "asio_no_tls::", "asio::")
|
||||
elif base.is_dir(file):
|
||||
correct_namespace(file)
|
||||
return
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
base.common_check_version("socketio", "2", clean)
|
||||
os.chdir(old_cur)
|
||||
|
||||
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
|
||||
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
|
||||
# patches
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
|
||||
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_encode.patch")
|
||||
|
||||
# no tls realization (remove if socket.io fix this)
|
||||
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"
|
||||
base.copy_dir(base_dir + "/socket.io-client-cpp/src", dst_dir)
|
||||
correct_namespace(dst_dir)
|
||||
base.replaceInFile(dst_dir + "/internal/sio_client_impl.h", "SIO_TLS", "SIO_TLS_NO")
|
||||
base.replaceInFile(dst_dir + "/internal/sio_client_impl.cpp", "SIO_TLS", "SIO_TLS_NO")
|
||||
|
||||
base.replaceInFile(dst_dir + "/sio_socket.h", "SIO_SOCKET_H", "SIO_SOCKET_NO_TLS_H")
|
||||
base.replaceInFile(dst_dir + "/sio_client.h", "SIO_CLIENT_H", "SIO_CLIENT_NO_TLS_H")
|
||||
base.replaceInFile(dst_dir + "/sio_message.h", "__SIO_MESSAGE_H__", "__SIO_MESSAGE_NO_TLS_H__")
|
||||
base.replaceInFile(dst_dir + "/internal/sio_packet.h", "SIO_PACKET_H", "SIO_PACKET_NO_TLS_H")
|
||||
|
||||
old_ping = " m_ping_timeout_timer->expires_from_now(milliseconds(m_ping_interval + m_ping_timeout), ec);"
|
||||
new_ping = "#if defined(PING_TIMEOUT_INTERVAL)\n"
|
||||
new_ping += " m_ping_timeout_timer->expires_from_now(milliseconds(PING_TIMEOUT_INTERVAL), ec);\n"
|
||||
new_ping += "#else\n"
|
||||
new_ping += old_ping
|
||||
new_ping += "\n#endif"
|
||||
|
||||
base.replaceInFile(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", old_ping, new_ping)
|
||||
base.replaceInFile(base_dir + "/socket.io-client-cpp/src_no_tls/internal/sio_client_impl.cpp", old_ping, new_ping)
|
||||
return
|
||||
@ -10,13 +10,17 @@ import config
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketrocket"
|
||||
|
||||
def buildIOS():
|
||||
|
||||
# Build for iphone
|
||||
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-devices.xcarchive", "-sdk", "iphoneos", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
|
||||
base.cmd("xcodebuild", ["-sdk", "iphoneos", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
|
||||
|
||||
# Build for simulator
|
||||
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-simulators.xcarchive", "-sdk", "iphonesimulator", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
|
||||
base.cmd("xcodebuild", ["-sdk", "iphonesimulator", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
|
||||
|
||||
# Package xcframework
|
||||
base.cmd("xcodebuild", ["-create-xcframework", "-library", current_dir + "/build/SocketRocket-devices.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-library", current_dir + "/build/SocketRocket-simulators.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-output", current_dir + "/build/SocketRocket.xcframework"])
|
||||
|
||||
# Remove arm64 for simulator for SDK 14
|
||||
base.cmd("lipo", ["-remove", "arm64", "-output", "build/Release-iphonesimulator/libSocketRocket.a", "build/Release-iphonesimulator/libSocketRocket.a"])
|
||||
|
||||
|
||||
@ -10,10 +10,10 @@ import v8_89
|
||||
|
||||
def clean():
|
||||
if base.is_dir("depot_tools"):
|
||||
base.delete_dir_with_access_error("depot_tools");
|
||||
base.delete_dir_with_access_error("depot_tools")
|
||||
base.delete_dir("depot_tools")
|
||||
if base.is_dir("v8"):
|
||||
base.delete_dir_with_access_error("v8");
|
||||
base.delete_dir_with_access_error("v8")
|
||||
base.delete_dir("v8")
|
||||
if base.is_exist("./.gclient"):
|
||||
base.delete_file("./.gclient")
|
||||
@ -26,6 +26,8 @@ def clean():
|
||||
def is_main_platform():
|
||||
if (config.check_option("platform", "win_64") or config.check_option("platform", "win_32")):
|
||||
return True
|
||||
if (config.check_option("platform", "win_arm64")):
|
||||
return True
|
||||
if (config.check_option("platform", "linux_64") or config.check_option("platform", "linux_32") or config.check_option("platform", "linux_arm64")):
|
||||
return True
|
||||
if config.check_option("platform", "mac_64"):
|
||||
@ -42,14 +44,9 @@ def is_xp_platform():
|
||||
return False
|
||||
|
||||
def is_use_clang():
|
||||
gcc_version = base.get_gcc_version()
|
||||
|
||||
is_clang = "false"
|
||||
if (gcc_version >= 6000):
|
||||
is_clang = "true"
|
||||
|
||||
print("gcc version: " + str(gcc_version) + ", use clang:" + is_clang)
|
||||
return is_clang
|
||||
if config.option("sysroot") == "" and "1" == config.option("use-clang"):
|
||||
return "true"
|
||||
return "false"
|
||||
|
||||
def make():
|
||||
if not is_main_platform():
|
||||
@ -68,9 +65,9 @@ def make():
|
||||
if ("mac" == base.host_platform()) and (-1 == config.option("config").find("use_v8")):
|
||||
return
|
||||
|
||||
use_v8_89 = False
|
||||
if (-1 != config.option("config").lower().find("v8_version_89")):
|
||||
use_v8_89 = True
|
||||
use_v8_89 = True
|
||||
if config.check_option("config", "v8_version_60"):
|
||||
use_v8_89 = False
|
||||
|
||||
if (use_v8_89):
|
||||
v8_89.make()
|
||||
@ -93,6 +90,7 @@ def make():
|
||||
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
v8_89.change_bootstrap()
|
||||
if ("windows" == base.host_platform()):
|
||||
# hack for 32 bit system!!!
|
||||
if base.is_file("depot_tools/cipd.ps1"):
|
||||
@ -118,7 +116,7 @@ def make():
|
||||
# windows hack (delete later) ----------------------
|
||||
if ("windows" == base.host_platform()):
|
||||
base.delete_dir_with_access_error("v8/buildtools/win")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
else:
|
||||
base.cmd("gclient", ["sync"], True)
|
||||
@ -179,6 +177,7 @@ def make():
|
||||
base.cmd2("gn", ["gen", "out.gn/mac_64", "--args=\"is_debug=false " + base_args64 + "\""])
|
||||
base.cmd("ninja", ["-C", "out.gn/mac_64"])
|
||||
|
||||
# add enable_iterator_debugging=false for disable _ITERATOR_DEBUG_LEVEL
|
||||
if config.check_option("platform", "win_64"):
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
base.cmd2("gn", ["gen", "out.gn/win_64/debug", "--args=\"is_debug=true " + base_args64 + " is_clang=false\""])
|
||||
@ -224,13 +223,16 @@ def make_xp():
|
||||
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
v8_89.change_bootstrap()
|
||||
if ("windows" == base.host_platform()):
|
||||
# hack for 32 bit system!!!
|
||||
if base.is_file("depot_tools/cipd.ps1"):
|
||||
base.replaceInFile("depot_tools/cipd.ps1", "windows-386", "windows-amd64")
|
||||
|
||||
# old variant
|
||||
path_to_python2 = "/depot_tools/bootstrap-2@3_11_8_chromium_35_bin/python/bin"
|
||||
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
|
||||
base_dir + "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin",
|
||||
base_dir + path_to_python2,
|
||||
config.option("vs-path") + "/../Common7/IDE",
|
||||
os.environ["PATH"]])
|
||||
|
||||
@ -240,7 +242,7 @@ def make_xp():
|
||||
base.cmd("./depot_tools/fetch", ["v8"], True)
|
||||
base.cmd("./depot_tools/gclient", ["sync", "-r", "4.10.253"], True)
|
||||
base.delete_dir_with_access_error("v8/buildtools/win")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
|
||||
# save common py script
|
||||
@ -263,8 +265,16 @@ def make_xp():
|
||||
"for file in projects:",
|
||||
" replaceInFile(file, '<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>')",
|
||||
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
|
||||
]);
|
||||
])
|
||||
|
||||
programFilesDir = base.get_env("ProgramFiles")
|
||||
if ("" != base.get_env("ProgramFiles(x86)")):
|
||||
programFilesDir = base.get_env("ProgramFiles(x86)")
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
|
||||
if (base.is_dir(dev_path)):
|
||||
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
|
||||
if config.check_option("platform", "win_64_xp"):
|
||||
if not base.is_dir("win_64/release"):
|
||||
base.run_as_bat(["call python v8/build/gyp_v8 -Dtarget_arch=x64", "call python v8/build/common_xp.py", "call devenv v8/tools/gyp/v8.sln /Rebuild Release"])
|
||||
|
||||
37
scripts/core_common/modules/v8_89.patch
Normal file
37
scripts/core_common/modules/v8_89.patch
Normal file
@ -0,0 +1,37 @@
|
||||
class StrongRootBlockAllocator {
|
||||
public:
|
||||
using pointer = Address*;
|
||||
using const_pointer = const Address*;
|
||||
using reference = Address&;
|
||||
using const_reference = const Address&;
|
||||
using value_type = Address;
|
||||
using size_type = size_t;
|
||||
using difference_type = ptrdiff_t;
|
||||
template <class U>
|
||||
struct rebind;
|
||||
|
||||
explicit StrongRootBlockAllocator(Heap* heap) : heap_(heap) {}
|
||||
|
||||
Address* allocate(size_t n);
|
||||
void deallocate(Address* p, size_t n) noexcept;
|
||||
|
||||
private:
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
// Rebinding to Address gives another StrongRootBlockAllocator.
|
||||
template <>
|
||||
struct StrongRootBlockAllocator::rebind<Address> {
|
||||
using other = StrongRootBlockAllocator;
|
||||
};
|
||||
|
||||
// Rebinding to something other than Address gives a std::allocator that
|
||||
// is copy-constructable from StrongRootBlockAllocator.
|
||||
template <class U>
|
||||
struct StrongRootBlockAllocator::rebind {
|
||||
class other : public std::allocator<U> {
|
||||
public:
|
||||
// NOLINTNEXTLINE
|
||||
other(const StrongRootBlockAllocator&) {}
|
||||
};
|
||||
};
|
||||
@ -7,6 +7,77 @@ import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def clean():
|
||||
if base.is_dir("depot_tools"):
|
||||
base.delete_dir_with_access_error("depot_tools")
|
||||
base.delete_dir("depot_tools")
|
||||
if base.is_dir("v8"):
|
||||
base.delete_dir_with_access_error("v8")
|
||||
base.delete_dir("v8")
|
||||
if base.is_exist("./.gclient"):
|
||||
base.delete_file("./.gclient")
|
||||
if base.is_exist("./.gclient_entries"):
|
||||
base.delete_file("./.gclient_entries")
|
||||
if base.is_exist("./.gclient_previous_sync_commits"):
|
||||
base.delete_file("./.gclient_previous_sync_commits")
|
||||
if base.is_exist("./.gcs_entries"):
|
||||
base.delete_file("./.gcs_entries")
|
||||
if base.is_exist("./.cipd"):
|
||||
base.delete_dir("./.cipd")
|
||||
return
|
||||
|
||||
def change_bootstrap():
|
||||
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
|
||||
content = "# changed by build_tools\n\n"
|
||||
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
|
||||
|
||||
content += "@Subdir python\n"
|
||||
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
|
||||
|
||||
content += "@Subdir python3\n"
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
content += "infra/3pp/tools/cpython3/${platform} version:2@3.11.8.chromium.35\n\n"
|
||||
else:
|
||||
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
|
||||
|
||||
content += "@Subdir git\n"
|
||||
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
|
||||
|
||||
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
|
||||
"raise subprocess.CalledProcessError(proc.returncode, argv, None)", "return")
|
||||
|
||||
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
|
||||
" _win_git_bootstrap_config()", " #_win_git_bootstrap_config()")
|
||||
|
||||
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
|
||||
return
|
||||
|
||||
def is_ubuntu_24_or_higher():
|
||||
try:
|
||||
with open('/etc/os-release') as f:
|
||||
for line in f:
|
||||
if line.startswith('VERSION_ID='):
|
||||
version = line.split('=')[1].strip().strip('"')
|
||||
return float(version) >= 24
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
def fix_ubuntu24():
|
||||
#if not is_ubuntu_24_or_higher():
|
||||
# return
|
||||
|
||||
if "" == config.option("sysroot"):
|
||||
return
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir("third_party/llvm-build/Release+Asserts/lib")
|
||||
base.cmd("mv", ["libstdc++.so.6", "libstdc++.so.6.old"])
|
||||
base.cmd("ln", ["-s", "/usr/lib/x86_64-linux-gnu/libstdc++.so.6", "libstdc++.so.6"])
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
def make_args(args, platform, is_64=True, is_debug=False):
|
||||
args_copy = args[:]
|
||||
if is_64:
|
||||
@ -20,36 +91,117 @@ def make_args(args, platform, is_64=True, is_debug=False):
|
||||
args_copy = args[:]
|
||||
args_copy.append("target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("v8_target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("use_sysroot=true")
|
||||
|
||||
|
||||
if (platform == "win_arm64"):
|
||||
args_copy = args[:]
|
||||
args_copy.append("target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("v8_target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("is_clang=false")
|
||||
|
||||
if is_debug:
|
||||
args_copy.append("is_debug=true")
|
||||
if (platform == "windows"):
|
||||
args_copy.append("enable_iterator_debugging=true")
|
||||
else:
|
||||
args_copy.append("is_debug=false")
|
||||
|
||||
if (platform == "linux"):
|
||||
args_copy.append("is_clang=true")
|
||||
args_copy.append("use_sysroot=false")
|
||||
linux_clang = False
|
||||
if platform == "linux":
|
||||
if "" != config.option("sysroot"):
|
||||
args_copy.append("use_sysroot=true")
|
||||
args_copy.append("is_clang=false")
|
||||
if is_ubuntu_24_or_higher():
|
||||
args_copy.append("use_gold=false")
|
||||
args_copy.append("sysroot=\\\"" + config.option("sysroot_linux_64") + "\\\"")
|
||||
args_copy.append("target_sysroot=\\\"" + config.option("sysroot_linux_64") + "\\\"")
|
||||
else:
|
||||
args_copy.append("is_clang=true")
|
||||
if "1" == config.option("use-clang"):
|
||||
linux_clang = True
|
||||
else:
|
||||
args_copy.append("use_sysroot=false")
|
||||
|
||||
if platform == "linux_arm64":
|
||||
if "" != config.option("sysroot"):
|
||||
args_copy.append("use_sysroot=true")
|
||||
if is_ubuntu_24_or_higher():
|
||||
args_copy.append("use_gold=false")
|
||||
#args_copy.append("sysroot=\\\"" + config.option("sysroot_linux_64") + "\\\"")
|
||||
args_copy.append("target_sysroot=\\\"" + config.option("sysroot_linux_arm64") + "\\\"")
|
||||
else:
|
||||
args_copy.append("is_clang=true")
|
||||
if "1" == config.option("use-clang"):
|
||||
linux_clang = True
|
||||
else:
|
||||
args_copy.append("use_sysroot=false")
|
||||
|
||||
|
||||
if (platform == "windows"):
|
||||
args_copy.append("is_clang=false")
|
||||
args_copy.append("is_clang=false")
|
||||
|
||||
if (platform == "mac") and base.is_os_arm():
|
||||
args_copy.append("host_cpu=\\\"x64\\\"")
|
||||
|
||||
if linux_clang != True:
|
||||
args_copy.append("use_custom_libcxx=false")
|
||||
|
||||
return "--args=\"" + " ".join(args_copy) + "\""
|
||||
|
||||
def ninja_windows_make(args, is_64=True, is_debug=False):
|
||||
def ninja_windows_make(args, is_64=True, is_debug=False, is_arm=False):
|
||||
directory_out = "out.gn/"
|
||||
directory_out += ("win_64/" if is_64 else "win_32/")
|
||||
|
||||
if is_arm:
|
||||
directory_out += "win_arm64/"
|
||||
else:
|
||||
directory_out += ("win_64/" if is_64 else "win_32/")
|
||||
|
||||
directory_out += ("debug" if is_debug else "release")
|
||||
|
||||
base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)])
|
||||
if is_arm:
|
||||
base.cmd2("gn", ["gen", directory_out, make_args(args, "win_arm64", is_64, is_debug)])
|
||||
else:
|
||||
base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)])
|
||||
|
||||
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
|
||||
|
||||
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
|
||||
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
|
||||
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
|
||||
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
|
||||
|
||||
|
||||
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
|
||||
if is_arm:
|
||||
base.copy_file('./' + directory_out + '/obj/v8_wrappers.lib', './' + directory_out + '/x64/obj/v8_wrappers.lib')
|
||||
base.cmd("ninja", ["-C", directory_out])
|
||||
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
|
||||
base.move_file("./" + directory_out + "/obj/v8_wrappers.ninja.bak", "./" + directory_out + "/obj/v8_wrappers.ninja")
|
||||
return
|
||||
|
||||
# patch v8 for build ---------------------------------------------------
|
||||
def patch_windows_debug():
|
||||
# v8 8.9 version does not built with enable_iterator_debugging flag
|
||||
# patch heap.h file:
|
||||
file_patch = "./src/heap/heap.h"
|
||||
base.copy_file(file_patch, file_patch + ".bak")
|
||||
content_old = base.readFile(file_patch)
|
||||
posStart = content_old.find("class StrongRootBlockAllocator {")
|
||||
posEnd = content_old.find("};", posStart + 1)
|
||||
posEnd = content_old.find("};", posEnd + 1)
|
||||
content = content_old[0:posStart]
|
||||
content += base.readFile("./../../../../../build_tools/scripts/core_common/modules/v8_89.patch")
|
||||
content += content_old[posEnd + 2:]
|
||||
base.writeFile(file_patch, content)
|
||||
return
|
||||
|
||||
def unpatch_windows_debug():
|
||||
file_patch = "./src/heap/heap.h"
|
||||
base.move_file(file_patch + ".bak", file_patch)
|
||||
return
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
old_cur = os.getcwd()
|
||||
@ -58,9 +210,15 @@ def make():
|
||||
if not base.is_dir(base_dir):
|
||||
base.create_dir(base_dir)
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
base.cmd("git", ["config", "--global", "http.postBuffer", "157286400"], True)
|
||||
|
||||
os.chdir(base_dir)
|
||||
base.common_check_version("v8", "1", clean)
|
||||
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
change_bootstrap()
|
||||
|
||||
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
|
||||
|
||||
@ -70,31 +228,78 @@ def make():
|
||||
|
||||
if not base.is_dir("v8"):
|
||||
base.cmd("./depot_tools/fetch", ["v8"], True)
|
||||
base.copy_dir("./v8/third_party", "./v8/third_party_new")
|
||||
if ("windows" == base.host_platform()):
|
||||
os.chdir("v8")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
|
||||
os.chdir("../")
|
||||
base.cmd("./depot_tools/gclient", ["sync", "-r", "remotes/branch-heads/8.9"], True)
|
||||
v8_branch_version = "remotes/branch-heads/8.9"
|
||||
if ("mac" == base.host_platform()):
|
||||
v8_branch_version = "remotes/branch-heads/9.9"
|
||||
base.cmd("./depot_tools/gclient", ["sync", "-r", v8_branch_version], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
base.copy_dir("./v8/third_party_new/ninja", "./v8/third_party/ninja")
|
||||
if ("linux" == base.host_platform()):
|
||||
if not base.is_file("./depot_tools/python3_bin_reldir.txt"):
|
||||
base.cmd_in_dir("./depot_tools", "./ensure_bootstrap", [], True)
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
|
||||
|
||||
|
||||
# fix for new depot_tools and vs2019, as VC folder contains a folder with a symbol in the name
|
||||
# sorting is done by increasing version, so 0 is a dummy value
|
||||
replace_src = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return x"
|
||||
replace_dst = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return 0"
|
||||
base.replaceInFile("v8/build/vs_toolchain.py", replace_src, replace_dst)
|
||||
|
||||
|
||||
if not base.is_file("v8/src/base/platform/wrappers.cc"):
|
||||
base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n")
|
||||
|
||||
if config.check_option("platform", "win_arm64"):
|
||||
base.replaceInFile("v8/build/toolchain/win/setup_toolchain.py", "SDK_VERSION = \'10.0.26100.0\'", "SDK_VERSION = \'10.0.22621.0\'")
|
||||
else:
|
||||
base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "")
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
if not base.is_file("v8/build/config/compiler/BUILD.gn.bak"):
|
||||
base.copy_file("v8/build/config/compiler/BUILD.gn", "v8/build/config/compiler/BUILD.gn.bak")
|
||||
base.replaceInFile("v8/build/config/compiler/BUILD.gn", "\"-Wloop-analysis\",", "\"-Wloop-analysis\", \"-D_Float16=short\",")
|
||||
|
||||
if not base.is_file("v8/third_party/jinja2/tests.py.bak"):
|
||||
base.copy_file("v8/third_party/jinja2/tests.py", "v8/third_party/jinja2/tests.py.bak")
|
||||
base.replaceInFile("v8/third_party/jinja2/tests.py", "from collections import Mapping", "try:\n from collections.abc import Mapping\nexcept ImportError:\n from collections import Mapping")
|
||||
|
||||
os.chdir("v8")
|
||||
|
||||
is_ubuntu24 = is_ubuntu_24_or_higher()
|
||||
fix_ubuntu24()
|
||||
|
||||
gn_args = ["v8_static_library=true",
|
||||
"is_component_build=false",
|
||||
"v8_monolithic=true",
|
||||
"v8_use_external_startup_data=false",
|
||||
"use_custom_libcxx=false",
|
||||
"treat_warnings_as_errors=false"]
|
||||
|
||||
if config.check_option("platform", "linux_64"):
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")])
|
||||
base.cmd("ninja", ["-C", "out.gn/linux_64"])
|
||||
if config.option("sysroot") != "":
|
||||
sysroot_path = config.option("sysroot_linux_64")
|
||||
sysroot_path_bin = config.get_custom_sysroot_bin("linux_64")
|
||||
|
||||
old_env = dict(os.environ)
|
||||
base.set_sysroot_env("linux_64")
|
||||
|
||||
pkg_old = os.environ.get("PKG_CONFIG_PATH", "")
|
||||
os.environ["PKG_CONFIG_PATH"] = sysroot_path + "/usr/lib/x86_64-linux-gnu/pkgconfig:" + sysroot_path + "/usr/lib/pkgconfig:" + sysroot_path + "/usr/share/pkgconfig"
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")], False)
|
||||
os.environ["PKG_CONFIG_PATH"] = pkg_old
|
||||
|
||||
base.cmd2("ninja", ["-C", "out.gn/linux_64"], False)
|
||||
base.restore_sysroot_env()
|
||||
else:
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")], False)
|
||||
base.cmd2("ninja", ["-C", "out.gn/linux_64"], False)
|
||||
|
||||
|
||||
if config.check_option("platform", "linux_32"):
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_32", make_args(gn_args, "linux", False)])
|
||||
@ -102,17 +307,28 @@ def make():
|
||||
|
||||
if config.check_option("platform", "linux_arm64"):
|
||||
base.cmd("build/linux/sysroot_scripts/install-sysroot.py", ["--arch=arm64"], False)
|
||||
|
||||
sysroot_path = config.option("sysroot_linux_64")
|
||||
pkg_old = os.environ.get("PKG_CONFIG_PATH", "")
|
||||
os.environ["PKG_CONFIG_PATH"] = sysroot_path + "/usr/lib/x86_64-linux-gnu/pkgconfig:" + sysroot_path + "/usr/lib/pkgconfig:" + sysroot_path + "/usr/share/pkgconfig"
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_arm64", make_args(gn_args, "linux_arm64", False)])
|
||||
os.environ["PKG_CONFIG_PATH"] = pkg_old
|
||||
|
||||
base.cmd("ninja", ["-C", "out.gn/linux_arm64"])
|
||||
|
||||
if config.check_option("platform", "mac_64"):
|
||||
base.cmd2("gn", ["gen", "out.gn/mac_64", make_args(gn_args, "mac")])
|
||||
base.cmd("ninja", ["-C", "out.gn/mac_64"])
|
||||
|
||||
if config.check_option("platform", "win_arm64") and not base.is_file("out.gn/win_arm64/release/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args, True, False, True)
|
||||
|
||||
if config.check_option("platform", "win_64"):
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
if not base.is_file("out.gn/win_64/debug/obj/v8_monolith.lib"):
|
||||
patch_windows_debug()
|
||||
ninja_windows_make(gn_args, True, True)
|
||||
unpatch_windows_debug()
|
||||
|
||||
if not base.is_file("out.gn/win_64/release/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args)
|
||||
@ -120,7 +336,9 @@ def make():
|
||||
if config.check_option("platform", "win_32"):
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
if not base.is_file("out.gn/win_32/debug/obj/v8_monolith.lib"):
|
||||
patch_windows_debug()
|
||||
ninja_windows_make(gn_args, False, True)
|
||||
unpatch_windows_debug()
|
||||
|
||||
if not base.is_file("out.gn/win_32/release/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args, False)
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import ixwebsocket
|
||||
import socketrocket
|
||||
|
||||
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
|
||||
|
||||
def make():
|
||||
ixwebsocket.make()
|
||||
socketrocket.make()
|
||||
|
||||
return
|
||||
16
scripts/core_common/modules/websocket_all.py
Normal file
16
scripts/core_common/modules/websocket_all.py
Normal file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
#import ixwebsocket
|
||||
#import socketrocket
|
||||
import socket_io
|
||||
|
||||
def make():
|
||||
#ixwebsocket.make()
|
||||
#socketrocket.make()
|
||||
socket_io.make()
|
||||
|
||||
return
|
||||
@ -7,6 +7,7 @@ import deploy_builder
|
||||
import deploy_server
|
||||
import deploy_core
|
||||
import deploy_mobile
|
||||
import deploy_osign
|
||||
|
||||
def make():
|
||||
if config.check_option("module", "desktop"):
|
||||
@ -19,4 +20,8 @@ def make():
|
||||
deploy_core.make()
|
||||
if config.check_option("module", "mobile"):
|
||||
deploy_mobile.make()
|
||||
if config.check_option("module", "osign"):
|
||||
deploy_osign.make()
|
||||
if base.is_use_create_artifacts_qemu_any_platform():
|
||||
base.create_artifacts_qemu_any_platform()
|
||||
return
|
||||
|
||||
@ -15,6 +15,7 @@ def make():
|
||||
continue
|
||||
|
||||
root_dir = base_dir + ("/" + native_platform + "/" + branding + ("/DocumentBuilder" if base.is_windows() else "/documentbuilder"))
|
||||
root_dir_win64 = base_dir + "/win_64/" + branding + "/DocumentBuilder"
|
||||
if (base.is_dir(root_dir)):
|
||||
base.delete_dir(root_dir)
|
||||
base.create_dir(root_dir)
|
||||
@ -36,36 +37,30 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "OFDFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "StarMathConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "ooxmlsignature", "xp" if isWindowsXP else "")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
if ("ios" == platform):
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
|
||||
else:
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
|
||||
#if (native_platform == "linux_64"):
|
||||
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib")
|
||||
base.deploy_icu(core_dir, root_dir, native_platform)
|
||||
|
||||
# doctrenderer
|
||||
if isWindowsXP:
|
||||
@ -76,12 +71,21 @@ def make():
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
|
||||
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
|
||||
# python wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
|
||||
# java wrapper
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
|
||||
|
||||
# app
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
|
||||
base.copy_dir(git_dir + "/document-templates/new/en-US", root_dir + "/empty")
|
||||
|
||||
# dictionaries
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
|
||||
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", root_dir + "/sdkjs")
|
||||
base.create_dir(root_dir + "/sdkjs/vendor")
|
||||
@ -92,22 +96,54 @@ def make():
|
||||
base.create_dir(root_dir + "/include")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/common_deploy.h", root_dir + "/include/common.h")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.h", root_dir + "/include/docbuilder.h")
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder_midl.h", root_dir + "/include/docbuilder_midl.h")
|
||||
base.replaceInFile(root_dir + "/include/docbuilder.h", "Q_DECL_EXPORT", "BUILDING_DOCBUILDER")
|
||||
|
||||
|
||||
if ("win_64" == platform):
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/x64/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_64/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_64/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
|
||||
|
||||
elif ("win_32" == platform):
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/Win32/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_32/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_32/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
|
||||
|
||||
# correct ios frameworks
|
||||
if ("ios" == platform):
|
||||
base.generate_plist(root_dir)
|
||||
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.linux_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.for_each_framework(root_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
|
||||
base.mac_correct_rpath_x2t(root_dir)
|
||||
base.mac_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
return
|
||||
|
||||
base.create_x2t_js_cache(root_dir, "builder", platform)
|
||||
|
||||
base.create_dir(root_dir + "/fonts")
|
||||
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
|
||||
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
|
||||
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
|
||||
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
|
||||
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
|
||||
|
||||
# delete unnecessary builder files
|
||||
def delete_files(files):
|
||||
for file in files:
|
||||
base.delete_file(file)
|
||||
|
||||
delete_files(base.find_files(root_dir, "*.wasm"))
|
||||
delete_files(base.find_files(root_dir, "*_ie.js"))
|
||||
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/cmap.bin")
|
||||
if 0 != platform.find("mac"):
|
||||
delete_files(base.find_files(root_dir, "sdk-all.js"))
|
||||
delete_files(base.find_files(root_dir, "sdk-all-min.js"))
|
||||
base.delete_dir(root_dir + "/sdkjs/slide/themes")
|
||||
base.delete_dir(root_dir + "/sdkjs/cell/css")
|
||||
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/viewer.js")
|
||||
base.delete_file(root_dir + "/sdkjs/common/spell/spell/spell.js.mem")
|
||||
base.delete_dir(root_dir + "/sdkjs/common/Images")
|
||||
|
||||
return
|
||||
|
||||
@ -25,21 +25,27 @@ def make():
|
||||
|
||||
platform = native_platform
|
||||
platform_postfix = platform + base.qt_dst_postfix()
|
||||
isWindowsXP = False if (-1 == native_platform.find("_xp")) else True
|
||||
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "OFDFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "StarMathConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "ooxmlsignature", "xp" if isWindowsXP else "")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
|
||||
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", archive_dir + "/sdkjs")
|
||||
@ -50,16 +56,30 @@ def make():
|
||||
if ("windows" == base.host_platform()):
|
||||
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*.dll", archive_dir + "/")
|
||||
else:
|
||||
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*", archive_dir + "/")
|
||||
if not (0 == platform.find("mac") and config.check_option("config", "bundle_dylibs")):
|
||||
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*", archive_dir + "/")
|
||||
base.copy_v8_files(core_dir, archive_dir, platform)
|
||||
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allthemesgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "pluginsmanager")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "standardtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2ttester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
|
||||
|
||||
if base.is_file(archive_dir + "/core.7z"):
|
||||
base.delete_file(archive_dir + "/core.7z")
|
||||
base.archive_folder(archive_dir, archive_dir + "/core.7z")
|
||||
# correct mac frameworks
|
||||
if (0 == platform.find("mac")):
|
||||
base.for_each_framework(archive_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
|
||||
base.mac_correct_rpath_x2t(archive_dir)
|
||||
|
||||
# js cache
|
||||
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
|
||||
base.create_x2t_js_cache(archive_dir, "core", platform)
|
||||
base.delete_file(archive_dir + "/DoctRenderer.config")
|
||||
|
||||
# dictionaries
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
|
||||
return
|
||||
|
||||
|
||||
@ -4,6 +4,19 @@ import config
|
||||
import base
|
||||
import os
|
||||
import platform
|
||||
import glob
|
||||
|
||||
def copy_lib_with_links(src_dir, dst_dir, lib, version):
|
||||
lib_full_name = lib + "." + version
|
||||
major_version = version[:version.find(".")]
|
||||
lib_major_name = lib + "." + major_version
|
||||
|
||||
base.copy_file(src_dir + "/" + lib_full_name, dst_dir + "/" + lib_full_name)
|
||||
|
||||
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_full_name, "./" + lib_major_name])
|
||||
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_major_name, "./" + lib])
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
@ -27,7 +40,7 @@ def make():
|
||||
isWindowsXP = False if (-1 == native_platform.find("_xp")) else True
|
||||
platform = native_platform[0:-3] if isWindowsXP else native_platform
|
||||
|
||||
apps_postfix = "build" + base.qt_dst_postfix();
|
||||
apps_postfix = "build" + base.qt_dst_postfix()
|
||||
if ("" != config.option("branding")):
|
||||
apps_postfix += ("/" + config.option("branding"))
|
||||
apps_postfix += "/"
|
||||
@ -41,91 +54,106 @@ def make():
|
||||
|
||||
platform_postfix = platform + base.qt_dst_postfix()
|
||||
|
||||
build_libraries_path = core_build_dir + "/lib/" + platform_postfix
|
||||
|
||||
# x2t
|
||||
base.create_dir(root_dir + "/converter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DocxRenderer")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel_network")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "UnicodeConverter")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "graphics")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "PdfFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "DjVuFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "XpsFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "OFDFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlFile2")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "Fb2File")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "EpubFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "IWorkFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "HWPFile")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "DocxRenderer")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "StarMathConverter")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "ooxmlsignature", "xp" if isWindowsXP else "")
|
||||
|
||||
if ("ios" == platform):
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "x2t")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "x2t")
|
||||
else:
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
|
||||
#if (native_platform == "linux_64"):
|
||||
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/converter/icudt58.dll")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/converter/icuuc58.dll")
|
||||
base.copy_file(git_dir + "/desktop-apps/common/converter/package.config", root_dir + "/converter/package.config")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/converter/libicudata.so.58")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/converter/libicuuc.so.58")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/converter/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/converter/libicuuc.58.dylib")
|
||||
base.deploy_icu(core_dir, root_dir + "/converter", native_platform)
|
||||
|
||||
# doctrenderer
|
||||
if isWindowsXP:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir + "/converter", "doctrenderer")
|
||||
base.copy_lib(build_libraries_path + "/xp", root_dir + "/converter", "doctrenderer")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
|
||||
base.copy_lib(build_libraries_path, root_dir + "/converter", "doctrenderer")
|
||||
base.copy_v8_files(core_dir, root_dir + "/converter", platform, isWindowsXP)
|
||||
|
||||
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop")
|
||||
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop", "", "../dictionaries")
|
||||
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/templates", root_dir + "/converter/templates")
|
||||
|
||||
# dictionaries
|
||||
base.create_dir(root_dir + "/dictionaries")
|
||||
base.copy_dir_content(git_dir + "/dictionaries", root_dir + "/dictionaries", "", ".git")
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries")
|
||||
|
||||
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
|
||||
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
|
||||
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
|
||||
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
|
||||
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
|
||||
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
|
||||
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/package/fonts", root_dir + "/fonts")
|
||||
base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE")
|
||||
|
||||
# cef
|
||||
build_dir_name = "build"
|
||||
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
|
||||
build_dir_name = "build_107"
|
||||
elif (0 == platform.find("mac")) and (config.check_option("config", "use_v8")):
|
||||
build_dir_name = "build_103"
|
||||
|
||||
if not isWindowsXP:
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", root_dir)
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/" + build_dir_name + "/*", root_dir)
|
||||
else:
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/build/*", root_dir)
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/" + build_dir_name + "/*", root_dir)
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
dir_base_old = os.getcwd()
|
||||
os.chdir(root_dir + "/Chromium Embedded Framework.framework")
|
||||
base.create_dir("Versions")
|
||||
base.create_dir("Versions/A")
|
||||
base.move_file("Chromium Embedded Framework", "Versions/A/Chromium Embedded Framework")
|
||||
base.move_dir("Resources", "Versions/A/Resources")
|
||||
base.move_dir("Libraries", "Versions/A/Libraries")
|
||||
base.cmd("ln", ["-s", "Versions/A/Chromium Embedded Framework", "Chromium Embedded Framework"])
|
||||
base.cmd("ln", ["-s", "Versions/A/Resources", "Resources"])
|
||||
base.cmd("ln", ["-s", "Versions/A/Libraries", "Libraries"])
|
||||
base.cmd("ln", ["-s", "A", "Versions/Current"])
|
||||
os.chdir(dir_base_old);
|
||||
|
||||
isUseQt = True
|
||||
if (0 == platform.find("mac")) or (0 == platform.find("ios")):
|
||||
isUseQt = False
|
||||
|
||||
# libraries
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "hunspell")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
|
||||
base.copy_lib(build_libraries_path, root_dir, "hunspell")
|
||||
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
|
||||
if (0 != platform.find("mac")):
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
|
||||
|
||||
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_dir(core_build_dir + "/bin/" + platform_postfix + "/editors_helper.app", root_dir + "/editors_helper.app")
|
||||
else:
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "editors_helper")
|
||||
|
||||
|
||||
if isUseQt:
|
||||
base.qt_copy_lib("Qt5Core", root_dir)
|
||||
base.qt_copy_lib("Qt5Gui", root_dir)
|
||||
base.qt_copy_lib("Qt5PrintSupport", root_dir)
|
||||
base.qt_copy_lib("Qt5Svg", root_dir)
|
||||
base.qt_copy_lib("Qt5Widgets", root_dir)
|
||||
base.qt_copy_lib("Qt5Multimedia", root_dir)
|
||||
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
|
||||
base.qt_copy_lib("Qt5Network", root_dir)
|
||||
base.qt_copy_lib("Qt5OpenGL", root_dir)
|
||||
|
||||
@ -135,74 +163,70 @@ def make():
|
||||
base.qt_copy_plugin("platforms", root_dir)
|
||||
base.qt_copy_plugin("platforminputcontexts", root_dir)
|
||||
base.qt_copy_plugin("printsupport", root_dir)
|
||||
base.qt_copy_plugin("mediaservice", root_dir)
|
||||
base.qt_copy_plugin("playlistformats", root_dir)
|
||||
|
||||
base.qt_copy_plugin("platformthemes", root_dir)
|
||||
base.qt_copy_plugin("xcbglintegrations", root_dir)
|
||||
|
||||
if not base.check_congig_option_with_platfom(platform, "libvlc"):
|
||||
base.qt_copy_lib("Qt5Multimedia", root_dir)
|
||||
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
|
||||
base.qt_copy_plugin("mediaservice", root_dir)
|
||||
base.qt_copy_plugin("playlistformats", root_dir)
|
||||
|
||||
base.qt_copy_plugin("styles", root_dir)
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.qt_copy_lib("Qt5DBus", root_dir)
|
||||
base.qt_copy_lib("Qt5X11Extras", root_dir)
|
||||
base.qt_copy_lib("Qt5XcbQpa", root_dir)
|
||||
base.qt_copy_icu(root_dir)
|
||||
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
|
||||
base.qt_copy_icu(root_dir, platform)
|
||||
if not base.check_congig_option_with_platfom(platform, "libvlc"):
|
||||
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/projicons/" + apps_postfix + "/projicons.exe", root_dir + "/DesktopEditors.exe")
|
||||
if not isWindowsXP:
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/update-daemon/" + apps_postfix + "/updatesvc.exe", root_dir + "/updatesvc.exe")
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors.exe", root_dir + "/editors.exe")
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/res/icons/desktopeditors.ico", root_dir + "/app.ico")
|
||||
elif (0 == platform.find("linux")):
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors", root_dir + "/DesktopEditors")
|
||||
|
||||
if ("" != base.get_env("VIDEO_PLAYER_VLC_DIR")):
|
||||
vlc_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc/"
|
||||
if base.check_congig_option_with_platfom(platform, "libvlc"):
|
||||
vlc_dir = git_dir + "/core/Common/3dParty/libvlc/build/" + platform + "/lib"
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.dll", root_dir + "/libvlc.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.dll", root_dir + "/libvlccore.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.dll", root_dir + "/VLCQtCore.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.dll", root_dir + "/VLCQtWidgets.dll")
|
||||
else:
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.so", root_dir + "/libvlc.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.so.5", root_dir + "/libvlc.so.5")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so", root_dir + "/libvlccore.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so.8", root_dir + "/libvlccore.so.8")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.so", root_dir + "/VLCQtCore.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.so", root_dir + "/VLCQtWidgets.so")
|
||||
base.copy_dir(vlc_dir + "/plugins", root_dir + "/plugins")
|
||||
base.copy_files(vlc_dir + "/*.dll", root_dir)
|
||||
base.copy_file(vlc_dir + "/vlc-cache-gen.exe", root_dir + "/vlc-cache-gen.exe")
|
||||
elif (0 == platform.find("linux")):
|
||||
base.copy_dir(vlc_dir + "/vlc/plugins", root_dir + "/plugins")
|
||||
base.copy_file(vlc_dir + "/vlc/libcompat.a", root_dir + "/libcompat.a")
|
||||
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_pulse.so", "0.0.0")
|
||||
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_vdpau.so", "0.0.0")
|
||||
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_xcb_events.so", "0.0.0")
|
||||
copy_lib_with_links(vlc_dir, root_dir, "libvlc.so", "5.6.1")
|
||||
copy_lib_with_links(vlc_dir, root_dir, "libvlccore.so", "9.0.1")
|
||||
base.copy_file(vlc_dir + "/vlc/vlc-cache-gen", root_dir + "/vlc-cache-gen")
|
||||
|
||||
if isWindowsXP:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer/xp", root_dir, "videoplayer")
|
||||
base.copy_lib(build_libraries_path + "/mediaplayer/xp", root_dir, "videoplayer")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer", root_dir, "videoplayer")
|
||||
|
||||
base.copy_dir(vlc_dir + platform + "/bin/plugins", root_dir + "/plugins")
|
||||
base.copy_lib(build_libraries_path + "/mediaplayer", root_dir, "videoplayer")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
|
||||
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
|
||||
|
||||
base.create_dir(root_dir + "/editors")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
|
||||
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
|
||||
base.delete_file(file)
|
||||
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
|
||||
|
||||
# desktopeditors-help
|
||||
root_help_dir = root_dir + "-help"
|
||||
if (base.is_dir(root_help_dir)):
|
||||
base.delete_dir(root_help_dir)
|
||||
for i in ["documenteditor", "presentationeditor", "spreadsheeteditor"]:
|
||||
base.copy_dir(
|
||||
base_dir + "/js/" + branding + "/desktop/web-apps/apps/%s/main/resources/help" % i,
|
||||
root_help_dir + "/editors/web-apps/apps/%s/main/resources/help" % i)
|
||||
|
||||
if ("1" != config.option("preinstalled-help")):
|
||||
# remove help from install until web-apps containes help
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/documenteditor/main/resources/help")
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/presentationeditor/main/resources/help")
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/spreadsheeteditor/main/resources/help")
|
||||
|
||||
base.create_dir(root_dir + "/editors/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
|
||||
if not isWindowsXP:
|
||||
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
|
||||
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
|
||||
# remove some default plugins
|
||||
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
|
||||
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
|
||||
@ -214,18 +238,42 @@ def make():
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", root_dir + "/editors/sdkjs-plugins/v1/plugins.css")
|
||||
base.support_old_versions_plugins(root_dir + "/editors/sdkjs-plugins")
|
||||
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "manager", True)
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt", root_dir + "/editors/sdkjs-plugins", "advanced2", True)
|
||||
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/common/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}", root_dir + "/editors/sdkjs-plugins/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}")
|
||||
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/engine/database/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}", root_dir + "/editors/sdkjs-plugins/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}")
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
|
||||
|
||||
isUseAgent = True
|
||||
if isWindowsXP:
|
||||
isUseAgent = False
|
||||
if (0 == platform.find("mac")) and (config.check_option("config", "use_v8")):
|
||||
isUseAgent = False
|
||||
|
||||
if (isUseAgent):
|
||||
agent_plugin_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/ai-agent"
|
||||
if (False):
|
||||
base.cmd_in_dir(agent_plugin_dir, "npm", ["install"], True)
|
||||
base.cmd_in_dir(agent_plugin_dir, "npm", ["run", "build"], True)
|
||||
base.copy_dir(agent_plugin_dir + "/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}", root_dir + "/editors/sdkjs-plugins/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}")
|
||||
else:
|
||||
base.copy_dir(agent_plugin_dir + "/deploy/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}", root_dir + "/editors/sdkjs-plugins/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}")
|
||||
|
||||
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
|
||||
base.create_dir(root_dir + "/editors/webext")
|
||||
base.copy_file(base_dir + "/js/" + branding + "/desktop/noconnect.html", root_dir + "/editors/webext/noconnect.html")
|
||||
|
||||
if isWindowsXP:
|
||||
base.create_dir(root_dir + "/providers")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers/onlyoffice", root_dir + "/providers/onlyoffice")
|
||||
else:
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
|
||||
|
||||
isUseJSC = False
|
||||
if (0 == platform.find("mac")):
|
||||
file_size_doctrenderer = os.path.getsize(root_dir + "/converter/libdoctrenderer.dylib")
|
||||
doctrenderer_lib = "libdoctrenderer.dylib"
|
||||
if config.check_option("config", "bundle_dylibs"):
|
||||
doctrenderer_lib = "doctrenderer.framework/doctrenderer"
|
||||
file_size_doctrenderer = os.path.getsize(root_dir + "/converter/" + doctrenderer_lib)
|
||||
print("file_size_doctrenderer: " + str(file_size_doctrenderer))
|
||||
if (file_size_doctrenderer < 5*1024*1024):
|
||||
isUseJSC = True
|
||||
@ -233,43 +281,68 @@ def make():
|
||||
if isUseJSC:
|
||||
base.delete_file(root_dir + "/converter/icudtl.dat")
|
||||
|
||||
base.create_x2t_js_cache(root_dir + "/converter", "desktop", platform)
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_lib(git_dir + "/desktop-apps/win-linux/3dparty/WinSparkle/" + platform, root_dir, "WinSparkle")
|
||||
base.delete_file(root_dir + "/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/libcef.lib")
|
||||
|
||||
isMacArmPlaformOnIntel = False
|
||||
is_host_not_arm = False
|
||||
host_platform = ""
|
||||
|
||||
# TODO: fix this on mac_arm64 (qemu)
|
||||
# on windows we are using qemu
|
||||
if (platform == "mac_arm64") and not base.is_os_arm():
|
||||
isMacArmPlaformOnIntel = True
|
||||
is_host_not_arm = True
|
||||
host_platform = "mac_64"
|
||||
|
||||
# all themes generate ----
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allthemesgen")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
# gen plists with max_depth 2 because frameworks are only located in root_dir and converter subdirectory
|
||||
base.for_each_framework(root_dir, "mac", callbacks=[base.generate_plist], max_depth=2)
|
||||
base.mac_correct_rpath_desktop(root_dir)
|
||||
|
||||
if isMacArmPlaformOnIntel:
|
||||
if is_host_not_arm:
|
||||
sdkjs_dir = root_dir + "/editors/sdkjs"
|
||||
end_find_platform = sdkjs_dir.rfind("/mac_arm64/")
|
||||
sdkjs_dir_mac64 = sdkjs_dir[0:end_find_platform] + "/mac_64/" + sdkjs_dir[end_find_platform+11:]
|
||||
str1 = "/" + platform + "/"
|
||||
str2 = "/" + host_platform + "/"
|
||||
sdkjs_dir_host = sdkjs_dir.replace(str1, str2)
|
||||
base.delete_dir(sdkjs_dir)
|
||||
base.copy_dir(sdkjs_dir_mac64, sdkjs_dir)
|
||||
base.copy_dir(sdkjs_dir_host, sdkjs_dir)
|
||||
else:
|
||||
themes_params = []
|
||||
if ("" != config.option("themesparams")):
|
||||
themes_params = ["--params=\"" + config.option("themesparams") + "\""]
|
||||
base.cmd_exe(root_dir + "/converter/allfontsgen", ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""])
|
||||
base.cmd_exe(root_dir + "/converter/allthemesgen", ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params)
|
||||
|
||||
params_allfontsgen = ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""]
|
||||
params_allthemesgen = ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params
|
||||
if (0 == platform.find("linux_arm") and not base.is_os_arm()):
|
||||
x2t_origin = ""
|
||||
if (config.option("sysroot") != ""):
|
||||
x2t_origin = base.create_qemu_wrapper(root_dir + "/converter/x2t", platform)
|
||||
|
||||
base.cmd_in_dir_qemu(platform, root_dir + "/converter", "./allfontsgen", params_allfontsgen, True)
|
||||
base.cmd_in_dir_qemu(platform, root_dir + "/converter", "./allthemesgen", params_allthemesgen, True)
|
||||
|
||||
if "" != x2t_origin:
|
||||
base.delete_file(root_dir + "/converter/x2t")
|
||||
base.move_file(x2t_origin, root_dir + "/converter/x2t")
|
||||
else:
|
||||
base.cmd_exe(root_dir + "/converter/allfontsgen", params_allfontsgen, True)
|
||||
base.cmd_exe(root_dir + "/converter/allthemesgen", params_allthemesgen, True)
|
||||
|
||||
base.delete_file(root_dir + "/converter/AllFonts.js")
|
||||
base.delete_file(root_dir + "/converter/font_selection.bin")
|
||||
base.delete_file(root_dir + "/converter/fonts.log")
|
||||
|
||||
base.delete_exe(root_dir + "/converter/allfontsgen")
|
||||
base.delete_exe(root_dir + "/converter/allthemesgen")
|
||||
if not base.is_use_create_artifacts_qemu(platform):
|
||||
base.delete_exe(root_dir + "/converter/allfontsgen")
|
||||
base.delete_exe(root_dir + "/converter/allthemesgen")
|
||||
|
||||
if not isUseJSC:
|
||||
base.delete_file(root_dir + "/editors/sdkjs/slide/sdk-all.cache")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -6,7 +6,19 @@ import base
|
||||
def exclude_arch(directory, frameworks):
|
||||
for lib in frameworks:
|
||||
base.cmd("lipo", ["-remove", "arm64", directory + "/" + lib + ".framework/" + lib, "-o", directory + "/" + lib + ".framework/" + lib])
|
||||
return
|
||||
return
|
||||
|
||||
def deploy_fonts(git_dir, root_dir, platform=""):
|
||||
base.create_dir(root_dir + "/fonts")
|
||||
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
|
||||
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
|
||||
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
|
||||
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
|
||||
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
|
||||
if (platform == "android"):
|
||||
base.copy_dir(git_dir + "/core-fonts/dejavu", root_dir + "/fonts/dejavu")
|
||||
base.copy_dir(git_dir + "/core-fonts/liberation", root_dir + "/fonts/liberation")
|
||||
return
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
@ -23,7 +35,7 @@ def make():
|
||||
|
||||
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
|
||||
return
|
||||
|
||||
|
||||
if (base.is_dir(root_dir)):
|
||||
base.delete_dir(root_dir)
|
||||
base.create_dir(root_dir)
|
||||
@ -42,52 +54,37 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "OFDFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "StarMathConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "ooxmlsignature")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
if (0 == platform.find("win") or 0 == platform.find("linux") or 0 == platform.find("mac")):
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
|
||||
|
||||
if ("ios" == platform) and config.check_option("config", "bundle_dylibs") and config.check_option("config", "simulator"):
|
||||
exclude_arch(root_dir, ["kernel", "kernel_network", "UnicodeConverter", "graphics", "PdfWriter",
|
||||
"PdfReader", "DjVuFile", "XpsFile", "HtmlFile2", "HtmlRenderer", "doctrenderer",
|
||||
"Fb2File", "EpubFile", "x2t"])
|
||||
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib")
|
||||
|
||||
if (0 == platform.find("android")):
|
||||
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicudata.so", root_dir + "/libicudata.so")
|
||||
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicuuc.so", root_dir + "/libicuuc.so")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/icudt58l.dat", root_dir + "/icudt58l.dat")
|
||||
base.deploy_icu(core_dir, root_dir, platform)
|
||||
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
|
||||
|
||||
# correct ios frameworks
|
||||
if ("ios" == platform):
|
||||
base.generate_plist(root_dir)
|
||||
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
|
||||
deploy_fonts(git_dir, root_dir)
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.mac_correct_rpath_x2t(root_dir)
|
||||
@ -101,8 +98,11 @@ def make():
|
||||
base.create_dir(root_dir)
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
|
||||
# fonts
|
||||
deploy_fonts(git_dir, root_dir, "android")
|
||||
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
|
||||
# app
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
|
||||
libs_dir = root_dir + "/lib"
|
||||
base.create_dir(libs_dir + "/arm64-v8a")
|
||||
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so", libs_dir + "/arm64-v8a")
|
||||
|
||||
60
scripts/deploy_osign.py
Normal file
60
scripts/deploy_osign.py
Normal file
@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
core_dir = git_dir + "/core"
|
||||
branding = config.branding()
|
||||
|
||||
platforms = config.option("platform").split()
|
||||
for native_platform in platforms:
|
||||
if not native_platform in config.platforms:
|
||||
continue
|
||||
|
||||
root_dir = base_dir + "/" + native_platform + "/" + branding + "/osign"
|
||||
|
||||
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
|
||||
return
|
||||
|
||||
if (base.is_dir(root_dir)):
|
||||
base.delete_dir(root_dir)
|
||||
base.create_dir(root_dir)
|
||||
|
||||
qt_dir = base.qt_setup(native_platform)
|
||||
platform = native_platform
|
||||
|
||||
core_build_dir = core_dir + "/build"
|
||||
if ("" != config.option("branding")):
|
||||
core_build_dir += ("/" + config.option("branding"))
|
||||
|
||||
platform_postfix = platform + base.qt_dst_postfix()
|
||||
|
||||
# x2t
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "osign")
|
||||
|
||||
# correct ios frameworks
|
||||
if ("ios" == platform):
|
||||
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
|
||||
|
||||
for native_platform in platforms:
|
||||
if native_platform == "android":
|
||||
# make full version
|
||||
root_dir = base_dir + "/android/" + branding + "/osign"
|
||||
if (base.is_dir(root_dir)):
|
||||
base.delete_dir(root_dir)
|
||||
base.create_dir(root_dir)
|
||||
libs_dir = root_dir + "/lib"
|
||||
base.create_dir(libs_dir + "/arm64-v8a")
|
||||
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/osign/*.so", libs_dir + "/arm64-v8a")
|
||||
base.create_dir(libs_dir + "/armeabi-v7a")
|
||||
base.copy_files(base_dir + "/android_armv7/" + branding + "/osign/*.so", libs_dir + "/armeabi-v7a")
|
||||
base.create_dir(libs_dir + "/x86")
|
||||
base.copy_files(base_dir + "/android_x86/" + branding + "/osign/*.so", libs_dir + "/x86")
|
||||
base.create_dir(libs_dir + "/x86_64")
|
||||
base.copy_files(base_dir + "/android_x86_64/" + branding + "/osign/*.so", libs_dir + "/x86_64")
|
||||
break
|
||||
|
||||
return
|
||||
@ -5,6 +5,7 @@ import base
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import glob
|
||||
from tempfile import mkstemp
|
||||
|
||||
def make():
|
||||
@ -40,24 +41,32 @@ def make():
|
||||
|
||||
build_server_dir = root_dir + '/server'
|
||||
server_dir = base.get_script_dir() + "/../../server"
|
||||
bin_server_dir = server_dir + "/build/server"
|
||||
server_admin_panel_dir = base.get_script_dir() + "/../../server-admin-panel"
|
||||
|
||||
base.create_dir(build_server_dir + '/DocService')
|
||||
|
||||
base.copy_dir(bin_server_dir + '/Common/config', build_server_dir + '/Common/config')
|
||||
base.copy_dir(server_dir + '/Common/config', build_server_dir + '/Common/config')
|
||||
|
||||
base.create_dir(build_server_dir + '/DocService')
|
||||
base.copy_exe(bin_server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
|
||||
base.copy_exe(server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
|
||||
|
||||
base.create_dir(build_server_dir + '/FileConverter')
|
||||
base.copy_exe(bin_server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
|
||||
base.copy_exe(server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
|
||||
|
||||
base.create_dir(build_server_dir + '/Metrics')
|
||||
base.copy_exe(bin_server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
|
||||
base.copy_dir(bin_server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
|
||||
base.copy_exe(server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
|
||||
base.copy_dir(server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
|
||||
base.create_dir(build_server_dir + '/Metrics/node_modules/modern-syslog/build/Release')
|
||||
base.copy_file(bin_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
|
||||
base.copy_file(server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
|
||||
|
||||
if "server-admin-panel" in base.get_server_addons() and base.is_exist(server_admin_panel_dir):
|
||||
# AdminPanel server part
|
||||
base.create_dir(build_server_dir + '/AdminPanel/server')
|
||||
base.copy_exe(server_admin_panel_dir + "/server", build_server_dir + '/AdminPanel/server', "adminpanel")
|
||||
|
||||
# AdminPanel client part
|
||||
base.create_dir(build_server_dir + '/AdminPanel/client/build')
|
||||
base.copy_dir(server_admin_panel_dir + '/client/build', build_server_dir + '/AdminPanel/client/build')
|
||||
|
||||
qt_dir = base.qt_setup(native_platform)
|
||||
platform = native_platform
|
||||
@ -75,74 +84,81 @@ def make():
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "OFDFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "IWorkFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HWPFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DocxRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "StarMathConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "ooxmlsignature")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", converter_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
|
||||
#if (native_platform == "linux_64"):
|
||||
# base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
|
||||
|
||||
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server")
|
||||
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server", "", "../../../dictionaries")
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", converter_dir + "/icudt58.dll")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", converter_dir + "/icuuc58.dll")
|
||||
base.deploy_icu(core_dir, converter_dir, platform)
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", converter_dir + "/libicudata.so.58")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", converter_dir + "/libicuuc.so.58")
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", converter_dir + "/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", converter_dir + "/libicuuc.58.dylib")
|
||||
|
||||
base.copy_v8_files(core_dir, converter_dir, platform)
|
||||
|
||||
# builder
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "docbuilder")
|
||||
base.copy_dir(git_dir + "/document-templates/new/en-US", converter_dir + "/empty")
|
||||
|
||||
# correct mac frameworks
|
||||
if (0 == platform.find("mac")):
|
||||
base.for_each_framework(converter_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
|
||||
|
||||
# js
|
||||
js_dir = root_dir
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
|
||||
|
||||
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
|
||||
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
|
||||
base.delete_file(file)
|
||||
|
||||
base.create_x2t_js_cache(converter_dir, "server", platform)
|
||||
|
||||
# add embed worker code
|
||||
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
|
||||
|
||||
# plugins
|
||||
base.create_dir(js_dir + "/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
|
||||
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
|
||||
base.copy_marketplace_plugin(js_dir + "/sdkjs-plugins", False, True)
|
||||
if ("1" == config.option("preinstalled-plugins")):
|
||||
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
|
||||
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
|
||||
else:
|
||||
base.generate_sdkjs_plugin_list(js_dir + "/sdkjs-plugins/plugin-list-default.json")
|
||||
base.create_dir(js_dir + "/sdkjs-plugins/v1")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", js_dir + "/sdkjs-plugins/v1/plugins.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", js_dir + "/sdkjs-plugins/v1/plugins-ui.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", js_dir + "/sdkjs-plugins/v1/plugins.css")
|
||||
base.support_old_versions_plugins(js_dir + "/sdkjs-plugins")
|
||||
|
||||
base.clone_marketplace_plugin(root_dir + "/sdkjs-plugins")
|
||||
|
||||
# tools
|
||||
tools_dir = root_dir + "/server/tools"
|
||||
base.create_dir(tools_dir)
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allthemesgen")
|
||||
if ("1" != config.option("preinstalled-plugins")):
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "pluginsmanager")
|
||||
|
||||
branding_dir = server_dir + "/branding"
|
||||
if("" != config.option("branding") and "onlyoffice" != config.option("branding")):
|
||||
branding_dir = git_dir + '/' + config.option("branding") + '/server'
|
||||
|
||||
#dictionaries
|
||||
spellchecker_dictionaries = root_dir + '/dictionaries'
|
||||
spellchecker_dictionaries_files = server_dir + '/../dictionaries/*_*'
|
||||
base.create_dir(spellchecker_dictionaries)
|
||||
base.copy_files(spellchecker_dictionaries_files, spellchecker_dictionaries)
|
||||
base.copy_dictionaries(server_dir + "/../dictionaries", root_dir + "/dictionaries")
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
exec_ext = '.exe'
|
||||
@ -164,8 +180,14 @@ def make():
|
||||
#document-templates
|
||||
document_templates_files = server_dir + '/../document-templates'
|
||||
document_templates = build_server_dir + '/../document-templates'
|
||||
base.create_dir(document_templates)
|
||||
base.copy_dir_content(document_templates_files, document_templates, "", ".git")
|
||||
base.copy_dir(document_templates_files + '/new', document_templates + '/new')
|
||||
base.copy_dir(document_templates_files + '/sample', document_templates + '/sample')
|
||||
|
||||
#document-formats
|
||||
document_formats_files = server_dir + '/../document-formats'
|
||||
document_formats = build_server_dir + '/../document-formats'
|
||||
base.create_dir(document_formats)
|
||||
base.copy_file(document_formats_files + '/onlyoffice-docs-formats.json', document_formats + '/onlyoffice-docs-formats.json')
|
||||
|
||||
#license
|
||||
license_file1 = server_dir + '/LICENSE.txt'
|
||||
@ -175,6 +197,7 @@ def make():
|
||||
base.copy_file(license_file1, build_server_dir)
|
||||
base.copy_file(license_file2, build_server_dir)
|
||||
base.copy_dir(license_dir, license)
|
||||
base.copy_dir(server_dir + '/dictionaries', build_server_dir + '/dictionaries')
|
||||
|
||||
#branding
|
||||
welcome_files = branding_dir + '/welcome'
|
||||
@ -201,15 +224,15 @@ def make():
|
||||
base.delete_dir(root_dir_snap)
|
||||
base.create_dir(root_dir_snap)
|
||||
base.copy_dir(root_dir, root_dir_snap)
|
||||
base.copy_dir(bin_server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
|
||||
base.copy_dir(bin_server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
|
||||
base.copy_dir(server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
|
||||
base.copy_dir(server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
|
||||
base.copy_dir(server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
|
||||
base.delete_file(root_dir_snap + '/server/DocService/docservice')
|
||||
base.copy_dir(bin_server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
|
||||
base.copy_dir(server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
|
||||
base.copy_dir(server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
|
||||
base.delete_file(root_dir_snap + '/server/FileConverter/converter')
|
||||
base.copy_dir(bin_server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
|
||||
base.copy_dir(server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
|
||||
base.copy_dir(server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
|
||||
if (base.is_dir(root_dir_snap_example)):
|
||||
base.delete_dir(root_dir_snap_example)
|
||||
base.create_dir(root_dir_snap_example)
|
||||
@ -217,4 +240,3 @@ def make():
|
||||
base.delete_file(root_dir_snap + '/example/nodejs/example')
|
||||
|
||||
return
|
||||
|
||||
|
||||
104
scripts/develop/build_lo_linux.py
Normal file
104
scripts/develop/build_lo_linux.py
Normal file
@ -0,0 +1,104 @@
|
||||
# This script was successfully executed on Ubuntu 22.04.5 LTS
|
||||
|
||||
# Before starting, make sure that:
|
||||
# 1. Python >= 3.9
|
||||
# 2. The current working folder with the script and its path do not contain spaces and use Latin characters.
|
||||
# 3. Antivirus is turned off
|
||||
# 4. There is enough free space on the disk (50GB Libre Office and during the unpacking of packages, it's recommended that you allocate at least 80 gigabytes of free space)
|
||||
# 5. The current working folder with the script and its path do not contain spaces and use Latin characters.
|
||||
|
||||
# If the error "You must put some 'source' URIs in your sources.list" occurs, you need to run the command:
|
||||
# software-properties-gtk
|
||||
# in the terminal, and then under the "Ubuntu Software" tab, click "Source code" if it's not turned on and submit
|
||||
|
||||
# after completion, the file will appear:
|
||||
# current_folder_with_script/libreoffice_build/instdir/soffice
|
||||
# debugging can be done via MVS 2022
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
|
||||
# or via VS Code with c/c++ tools
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Visual_Studio_Code_(VSCode)
|
||||
# or via Qt Creator
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Qt_Creator
|
||||
# or via attatch to the soffice.bin process
|
||||
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
CONFIGURE_PARAMS = [
|
||||
"--enable-dbgutil",
|
||||
"--without-doxygen",
|
||||
"--enable-pch",
|
||||
"--disable-ccache",
|
||||
# "--with-visual-studio=2022",
|
||||
'--enable-symbols="all"'
|
||||
]
|
||||
|
||||
SUDO_DEPENDENCIES = [
|
||||
"git", "build-essential", "zip", "ccache", "junit4", "libkrb5-dev", "nasm", "graphviz", "python3",
|
||||
"python3-dev", "python3-setuptools", "qtbase5-dev", "libkf5coreaddons-dev", "libkf5i18n-dev",
|
||||
"libkf5config-dev", "libkf5windowsystem-dev", "libkf5kio-dev", "libqt5x11extras5-dev", "autoconf",
|
||||
"libcups2-dev", "libfontconfig1-dev", "gperf", "openjdk-17-jdk", "doxygen", "libxslt1-dev",
|
||||
"xsltproc", "libxml2-utils", "libxrandr-dev", "libx11-dev", "bison", "flex", "libgtk-3-dev",
|
||||
"libgstreamer-plugins-base1.0-dev", "libgstreamer1.0-dev", "ant", "ant-optional", "libnss3-dev",
|
||||
"libavahi-client-dev", "libxt-dev"
|
||||
]
|
||||
|
||||
DIR_NAME = "libreoffice"
|
||||
OFFICE_PATH = "instdir/program/soffice"
|
||||
|
||||
class bcolors:
|
||||
OKBLUE = '\033[94m'
|
||||
OKCYAN = '\033[96m'
|
||||
OKGREEN = '\033[92m'
|
||||
FAIL = '\033[91m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
def run_command(command, exit_on_error=True):
|
||||
try:
|
||||
subprocess.run(command, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"{bcolors.FAIL}Error executing command: {command}{bcolors.RESET}")
|
||||
if exit_on_error:
|
||||
sys.exit(1)
|
||||
|
||||
def install_dependencies():
|
||||
print("Updating package list...")
|
||||
run_command("sudo apt update")
|
||||
|
||||
print("Adding PPA for GCC/G++ update...")
|
||||
run_command("sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test")
|
||||
run_command("sudo apt update")
|
||||
|
||||
print("Installing dependencies for LibreOffice...")
|
||||
run_command("sudo apt-get build-dep -y libreoffice")
|
||||
run_command(f"sudo apt-get install {' '.join(map(str, SUDO_DEPENDENCIES))}")
|
||||
|
||||
print("Updating GCC/G++ to v12...")
|
||||
run_command("sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 60 --slave /usr/bin/g++ g++ /usr/bin/g++-12", exit_on_error=False)
|
||||
|
||||
print(bcolors.OKGREEN + "All dependencies successfully installed!" + bcolors.RESET)
|
||||
|
||||
def build_libreoffice():
|
||||
print("Cloning LibreOffice repository...")
|
||||
run_command(f"git clone https://git.libreoffice.org/core {DIR_NAME}", exit_on_error=False)
|
||||
|
||||
print("Changing to build directory...")
|
||||
os.chdir(f"./{DIR_NAME}")
|
||||
|
||||
print("Start configurator autogen.sh...")
|
||||
run_command(f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}")
|
||||
|
||||
print(bcolors.OKCYAN + "Starting libreoffice build, this may take up to 24 hours and takes up about 20 GB of drive space. You will also most likely need at least 8 GBs of RAM, otherwise the machine might fall into swap and appear to freeze up..." + bcolors.RESET)
|
||||
run_command("make")
|
||||
|
||||
print(bcolors.OKGREEN + "LibreOffice build completed!" + bcolors.RESET)
|
||||
|
||||
# print(bcolors.OKCYAN + "Running LibreOffice..." + bcolors.RESET)
|
||||
# run_command(OFFICE_PATH, exit_on_error=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
install_dependencies()
|
||||
build_libreoffice()
|
||||
202
scripts/develop/build_lo_windows.py
Normal file
202
scripts/develop/build_lo_windows.py
Normal file
@ -0,0 +1,202 @@
|
||||
# Before starting, make sure that:
|
||||
# 1. MVS 2022 is installed and the necessary individual components are in its installer
|
||||
# <20> Windows Universal C Runtime
|
||||
# <20> .NET Framework 4.x SDK (.NET Framework 5.x SDK and later are currently not supported. These don't register their information to registry, don't have csc.exe and they use dotnet command with csc.dll instead for compiling.)
|
||||
# <20> C++ 20xx Redistributable MSMs (only required to build MSI installer)
|
||||
# <20> C++ Clang Compiler for Windows (x.x.x)
|
||||
# 2. Java JDK >= 17
|
||||
# 3. Antivirus is turned off
|
||||
# 4. There is enough free space on the disk (50GB Libre Office, 50Gb cygwin64)
|
||||
|
||||
# after completion, the files will appear:
|
||||
# {LO_BUILD_PATH}/sources/libo-core/instdir/program/soffice.exe
|
||||
# {LO_BUILD_PATH}/sources/libo-core/LibreOffice.sln
|
||||
# debugging can be done via MVS 2022
|
||||
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
|
||||
# or via attatch to the soffice.bin process
|
||||
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append('../../scripts')
|
||||
import threading
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import argparse
|
||||
import base
|
||||
|
||||
CYGWIN_DOWNLOAD_URL = 'https://cygwin.com/setup-x86_64.exe'
|
||||
CYGWIN_TEMP_PATH = './tmp'
|
||||
CYGWIN_SETUP_FILENAME = 'setup-x86_64.exe'
|
||||
CYGWIN_SETUP_PARAMS = [
|
||||
"-P", "autoconf",
|
||||
"-P", "automake",
|
||||
"-P", "bison",
|
||||
"-P", "cabextract",
|
||||
"-P", "doxygen",
|
||||
"-P", "flex",
|
||||
"-P", "gawk=5.2.2-1",
|
||||
"-P", "gcc-g++",
|
||||
"-P", "gettext-devel",
|
||||
"-P", "git",
|
||||
"-P", "gnupg",
|
||||
"-P", "gperf",
|
||||
"-P", "make",
|
||||
"-P", "mintty",
|
||||
"-P", "nasm",
|
||||
"-P", "openssh",
|
||||
"-P", "openssl",
|
||||
"-P", "patch",
|
||||
"-P", "perl",
|
||||
"-P", "python",
|
||||
"-P", "python3",
|
||||
"-P", "pkg-config",
|
||||
"-P", "rsync",
|
||||
"-P", "unzip",
|
||||
"-P", "vim",
|
||||
"-P", "wget",
|
||||
"-P", "zip",
|
||||
"-P", "perl-Archive-Zip",
|
||||
"-P", "perl-Font-TTF",
|
||||
"-P", "perl-IO-String",
|
||||
"--no-admin",
|
||||
"--quiet-mode"
|
||||
]
|
||||
CYGWIN_BAT_PATH = 'C:/cygwin64/Cygwin.bat'
|
||||
LO_BUILD_PATH = os.path.normpath(os.path.join(os.getcwd(), '../../../LO'))
|
||||
|
||||
CONFIGURE_PARAMS = [f'--with-external-tar="{LO_BUILD_PATH}/sources/lo-externalsrc"',
|
||||
f'--with-junit="{LO_BUILD_PATH}/sources/junit-4.10.jar"',
|
||||
f'--with-ant-home="{LO_BUILD_PATH}/sources/apache-ant-1.9.5"',
|
||||
"--enable-pch",
|
||||
"--disable-ccache",
|
||||
"--with-visual-studio=2022",
|
||||
"--enable-dbgutil",
|
||||
'--enable-symbols="all"']
|
||||
|
||||
|
||||
def create_folder_safe(folder_path):
|
||||
if not os.path.exists(folder_path):
|
||||
try:
|
||||
os.mkdir(folder_path)
|
||||
print(f"Folder '{folder_path}' created successfully.")
|
||||
except Exception as e:
|
||||
print(f"Error creating folder: {e}")
|
||||
else:
|
||||
print(f"Folder '{folder_path}' already exists.")
|
||||
|
||||
|
||||
class CygwinRunner:
|
||||
@staticmethod
|
||||
def process_commands(commands: list[str]):
|
||||
proc = subprocess.Popen(
|
||||
[CYGWIN_BAT_PATH], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,
|
||||
shell=True, creationflags=subprocess.CREATE_NEW_CONSOLE
|
||||
)
|
||||
|
||||
def read_stdout():
|
||||
for line in iter(proc.stdout.readline, ''):
|
||||
sys.stdout.write(line)
|
||||
proc.stdout.close()
|
||||
|
||||
def read_stderr():
|
||||
for line in iter(proc.stderr.readline, ''):
|
||||
sys.stderr.write(line)
|
||||
proc.stderr.close()
|
||||
|
||||
stdout_thread = threading.Thread(target=read_stdout)
|
||||
stderr_thread = threading.Thread(target=read_stderr)
|
||||
|
||||
stdout_thread.start()
|
||||
stderr_thread.start()
|
||||
|
||||
for command in commands:
|
||||
proc.stdin.write(command + '\n')
|
||||
proc.stdin.flush()
|
||||
|
||||
stdout_thread.join()
|
||||
stderr_thread.join()
|
||||
|
||||
proc.stdin.close()
|
||||
|
||||
proc.wait()
|
||||
|
||||
@staticmethod
|
||||
def install_gnu_make():
|
||||
base.print_info("install_gnu_make")
|
||||
commands = ['mkdir -p /opt/lo/bin',
|
||||
'cd /opt/lo/bin',
|
||||
'wget https://dev-www.libreoffice.org/bin/cygwin/make-4.2.1-msvc.exe',
|
||||
'cp make-4.2.1-msvc.exe make',
|
||||
'chmod +x make',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def install_ant_and_junit():
|
||||
base.print_info("install_ant_and_junit")
|
||||
commands = [f'mkdir -p {LO_BUILD_PATH}/sources',
|
||||
f'cd {LO_BUILD_PATH}/sources',
|
||||
'wget https://archive.apache.org/dist/ant/binaries/apache-ant-1.9.5-bin.tar.bz2',
|
||||
'tar -xjvf apache-ant-1.9.5-bin.tar.bz2',
|
||||
'wget http://downloads.sourceforge.net/project/junit/junit/4.10/junit-4.10.jar',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def clone_lo():
|
||||
base.print_info("clone_lo")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources',
|
||||
'git clone https://gerrit.libreoffice.org/core libo-core',
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def build_autogen():
|
||||
base.print_info("build_autogen")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}",
|
||||
'exit']
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def run_make_build():
|
||||
base.print_info("run_make")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f'/opt/lo/bin/make gb_COLOR=1',
|
||||
"exit"]
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
@staticmethod
|
||||
def build_vs_integration():
|
||||
base.print_info("run_make")
|
||||
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
|
||||
f'/opt/lo/bin/make gb_COLOR=1 vs-ide-integration',
|
||||
"exit"]
|
||||
CygwinRunner.process_commands(commands)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="options")
|
||||
parser.add_argument("--lo_build_path", dest="build_path", default=f'../../../LO')
|
||||
parser.add_argument("--disable_sln", dest="disable_sln", action=argparse.BooleanOptionalAction)
|
||||
args = parser.parse_args()
|
||||
|
||||
LO_BUILD_PATH = args.build_path
|
||||
DISABLE_SLN = args.disable_sln
|
||||
create_folder_safe(f'{LO_BUILD_PATH}/sources/lo-externalsrc')
|
||||
create_folder_safe(CYGWIN_TEMP_PATH)
|
||||
os.chdir(CYGWIN_TEMP_PATH)
|
||||
base.download(CYGWIN_DOWNLOAD_URL, CYGWIN_SETUP_FILENAME)
|
||||
subprocess.run([CYGWIN_SETUP_FILENAME] + CYGWIN_SETUP_PARAMS)
|
||||
os.chdir('..')
|
||||
shutil.rmtree(CYGWIN_TEMP_PATH)
|
||||
CygwinRunner.install_gnu_make()
|
||||
CygwinRunner.install_ant_and_junit()
|
||||
CygwinRunner.clone_lo()
|
||||
CygwinRunner.build_autogen()
|
||||
CygwinRunner.run_make_build()
|
||||
if not DISABLE_SLN:
|
||||
CygwinRunner.build_vs_integration()
|
||||
@ -5,9 +5,6 @@ import base
|
||||
import os
|
||||
import json
|
||||
|
||||
def get_core_url(arch, branch):
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
|
||||
|
||||
def make():
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
old_cur = os.getcwd()
|
||||
@ -18,16 +15,10 @@ def make():
|
||||
|
||||
os.chdir(work_dir)
|
||||
|
||||
arch = "x64"
|
||||
arch2 = "_64"
|
||||
if ("windows" == base.host_platform()) and not base.host_platform_is64():
|
||||
arch = "x86"
|
||||
arch2 = "_32"
|
||||
|
||||
url = get_core_url(arch, config.option("branch"))
|
||||
url = base.get_autobuild_version("core", "", config.option("branch"))
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
if (data_url == "" and config.option("branch") != "develop"):
|
||||
url = get_core_url(arch, "develop")
|
||||
url = base.get_autobuild_version("core", "", "develop")
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
|
||||
old_data_url = base.readFile("./core.7z.data")
|
||||
@ -49,22 +40,25 @@ def make():
|
||||
base.extract("./core.7z", "./")
|
||||
base.writeFile("./core.7z.data", data_url)
|
||||
|
||||
platform = ""
|
||||
if ("windows" == base.host_platform()):
|
||||
platform = "win" + arch2
|
||||
else:
|
||||
platform = base.host_platform() + arch2
|
||||
|
||||
base.copy_files("./core/*", "./")
|
||||
else:
|
||||
print("-----------------------------------------------------------")
|
||||
print("Core is up to date. ---------------------------------------")
|
||||
print("-----------------------------------------------------------")
|
||||
|
||||
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
|
||||
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
|
||||
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
|
||||
|
||||
base.clone_marketplace_plugin(git_dir + "/sdkjs-plugins")
|
||||
if not base.is_dir(git_dir + "/sdkjs-plugins"):
|
||||
base.create_dir(git_dir + "/sdkjs-plugins")
|
||||
|
||||
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
|
||||
base.create_dir(git_dir + "/sdkjs-plugins/v1")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
|
||||
|
||||
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
|
||||
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
|
||||
|
||||
if not base.is_dir(git_dir + "/fonts"):
|
||||
base.create_dir(git_dir + "/fonts")
|
||||
@ -98,7 +92,8 @@ def make():
|
||||
server_addons = []
|
||||
if (config.option("server-addons") != ""):
|
||||
server_addons = config.option("server-addons").rsplit(", ")
|
||||
if ("server-lockstorage" in server_addons):
|
||||
#server-lockstorage is private
|
||||
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
|
||||
server_config["editorDataStorage"] = "editorDataRedis"
|
||||
|
||||
sdkjs_addons = []
|
||||
@ -122,6 +117,8 @@ def make():
|
||||
sql["type"] = config.option("sql-type")
|
||||
if (config.option("db-port") != ""):
|
||||
sql["dbPort"] = config.option("db-port")
|
||||
if (config.option("db-name") != ""):
|
||||
sql["dbName"] = config.option("db-name")
|
||||
if (config.option("db-user") != ""):
|
||||
sql["dbUser"] = config.option("db-user")
|
||||
if (config.option("db-pass") != ""):
|
||||
@ -134,12 +131,17 @@ def make():
|
||||
|
||||
#site url
|
||||
example_config = {}
|
||||
example_config["port"] = 80
|
||||
if (base.host_platform() == "linux"):
|
||||
example_config["port"] = 3000
|
||||
else:
|
||||
example_config["port"] = 80
|
||||
example_config["siteUrl"] = "http://" + config.option("siteUrl") + ":8000/"
|
||||
example_config["apiUrl"] = "web-apps/apps/api/documents/api.js"
|
||||
example_config["preloaderUrl"] = "web-apps/apps/api/documents/cache-scripts.html"
|
||||
json_file = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/local-development-" + base.host_platform() + ".json"
|
||||
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
|
||||
json_dir = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/"
|
||||
json_file = json_dir + "/local-development-" + base.host_platform() + ".json"
|
||||
if base.is_exist(json_dir):
|
||||
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
@ -66,6 +66,28 @@ class CDependencies:
|
||||
res += ['--remove-path', item]
|
||||
return res
|
||||
|
||||
def check__docker_dependencies():
|
||||
if (host_platform == 'windows' and not check_vc_components()):
|
||||
return False
|
||||
if (host_platform == 'mac'):
|
||||
return True
|
||||
|
||||
checksResult = CDependencies()
|
||||
checksResult.append(check_nodejs())
|
||||
checksResult.append(check_7z())
|
||||
if (len(checksResult.install) > 0):
|
||||
install_args = ['install.py']
|
||||
install_args += checksResult.get_uninstall()
|
||||
install_args += checksResult.get_removepath()
|
||||
install_args += checksResult.get_install()
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
install_args[0] = './scripts/develop/' + install_args[0]
|
||||
if (host_platform == 'windows'):
|
||||
code = libwindows.sudo(unicode(sys.executable), install_args)
|
||||
elif (host_platform == 'linux'):
|
||||
get_updates()
|
||||
base.cmd_in_dir(base_dir + "/../../", 'python', install_args, False)
|
||||
|
||||
def check_dependencies():
|
||||
if (host_platform == 'windows' and not check_vc_components()):
|
||||
return False
|
||||
@ -89,7 +111,8 @@ def check_dependencies():
|
||||
if (host_platform == 'windows'):
|
||||
checksResult.append(check_nodejs())
|
||||
|
||||
if (config.option("sql-type") == 'mysql' and host_platform == 'windows'):
|
||||
sql_type = config.option("sql-type")
|
||||
if (sql_type == 'mysql' and host_platform == 'windows'):
|
||||
checksResult.append(check_mysqlServer())
|
||||
else:
|
||||
checksResult.append(check_postgreSQL())
|
||||
@ -168,21 +191,21 @@ def check_nodejs():
|
||||
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
|
||||
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
|
||||
print('Installed Node.js version: ' + nodejs_version[1:])
|
||||
nodejs_min_version = '14.14'
|
||||
nodejs_min_version = '18'
|
||||
nodejs_min_version_minor = 0
|
||||
major_minor_min_version = nodejs_min_version.split('.')
|
||||
nodejs_min_version_major = int(major_minor_min_version[0])
|
||||
if len(major_minor_min_version) > 1:
|
||||
nodejs_min_version_minor = int(major_minor_min_version[1])
|
||||
nodejs_max_version = '14'
|
||||
nodejs_max_version = ""
|
||||
nodejs_max_version_minor = float("inf")
|
||||
major_minor_max_version = nodejs_max_version.split('.')
|
||||
nodejs_max_version_major = int(major_minor_max_version[0])
|
||||
# nodejs_max_version_major = int(major_minor_max_version[0])
|
||||
nodejs_max_version_major = float("inf")
|
||||
if len(major_minor_max_version) > 1:
|
||||
nodejs_max_version_minor = int(major_minor_max_version[1])
|
||||
|
||||
if (nodejs_min_version_major > nodejs_cur_version_major or nodejs_cur_version_major > nodejs_max_version_major):
|
||||
print('Installed Node.js version must be 14.14 to 14.x')
|
||||
isNeedReinstall = True
|
||||
elif (nodejs_min_version_major == nodejs_cur_version_major):
|
||||
if (nodejs_min_version_minor > nodejs_cur_version_minor):
|
||||
@ -192,7 +215,7 @@ def check_nodejs():
|
||||
isNeedReinstall = True
|
||||
|
||||
if (True == isNeedReinstall):
|
||||
print('Installed Node.js version must be 14.14 to 14.x')
|
||||
print('Installed Node.js version must be 18 or higher.')
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_uninstall('Node.js')
|
||||
dependence.append_install('Node.js')
|
||||
@ -209,18 +232,24 @@ def check_java():
|
||||
dependence = CDependencies()
|
||||
|
||||
base.print_info('Check installed Java')
|
||||
java_version = base.run_command('java -version')['stderr']
|
||||
java_info = base.run_command('java -version')['stderr']
|
||||
|
||||
if (java_version.find('64-Bit') != -1):
|
||||
version_pos = java_info.find('version "')
|
||||
java_v = 0
|
||||
if (version_pos != -1):
|
||||
try:
|
||||
java_v = float(java_info[version_pos + len('version "'): version_pos + len('version "') + 2])
|
||||
except:
|
||||
pass
|
||||
|
||||
if (java_info.find('64-Bit') != -1 and java_v >= 11):
|
||||
print('Installed Java is valid')
|
||||
return dependence
|
||||
|
||||
if (java_version.find('32-Bit') != -1):
|
||||
print('Installed Java must be x64')
|
||||
else:
|
||||
print('Java not found')
|
||||
|
||||
dependence.append_install('Java')
|
||||
else:
|
||||
print('Requires Java version 11+ x64-bit')
|
||||
dependence.append_install('Java')
|
||||
if (version_pos != -1):
|
||||
dependence.append_uninstall('Java')
|
||||
|
||||
return dependence
|
||||
|
||||
def get_erlang_path_to_bin():
|
||||
@ -272,9 +301,21 @@ def check_rabbitmq():
|
||||
print('RabbitMQ is installed')
|
||||
return dependence
|
||||
elif (host_platform == 'linux'):
|
||||
result = base.run_command('service rabbitmq-server status')['stdout']
|
||||
if (result != ''):
|
||||
print('Installed RabbitMQ is valid')
|
||||
result = ''
|
||||
|
||||
# Prefer systemctl for systemd
|
||||
systemctl_result = base.run_command('systemctl status rabbitmq-server')
|
||||
if systemctl_result['returncode'] == 0 and systemctl_result['stdout']:
|
||||
result = systemctl_result['stdout']
|
||||
|
||||
# Fallback to service for SysV
|
||||
if result == '':
|
||||
command_result = base.run_command('service rabbitmq-server status')
|
||||
if command_result['returncode'] == 0 and command_result['stdout']:
|
||||
result = command_result['stdout']
|
||||
|
||||
if result != '':
|
||||
print('RabbitMQ is installed')
|
||||
return dependence
|
||||
|
||||
print('RabbitMQ not found')
|
||||
@ -455,8 +496,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
|
||||
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
|
||||
mysqlPath += 'bin'
|
||||
return mysqlPath
|
||||
def get_mysqlLoginSrting():
|
||||
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
|
||||
def get_mysqlLoginString():
|
||||
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
|
||||
def get_mysqlServersInfo():
|
||||
arrInfo = []
|
||||
|
||||
@ -483,14 +524,15 @@ def get_mysqlServersInfo():
|
||||
def check_mysqlServer():
|
||||
base.print_info('Check MySQL Server')
|
||||
dependence = CDependencies()
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
|
||||
|
||||
if (host_platform != 'windows'):
|
||||
result = os.system(mysqlLoginSrt + ' -e "exit"')
|
||||
if (result == 0):
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
connectionResult = base.run_command(connectionString)
|
||||
expected_port = config.option("db-port")
|
||||
if (connectionResult['stdout'].find('port') != -1 and connectionResult['stdout'].find(expected_port) != -1):
|
||||
print('MySQL configuration is valid')
|
||||
dependence.sqlPath = 'mysql'
|
||||
return dependence
|
||||
@ -505,13 +547,17 @@ def check_mysqlServer():
|
||||
continue
|
||||
|
||||
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
|
||||
|
||||
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
mysql_bin_path = get_mysql_path_to_bin(info['Location'])
|
||||
connectionResult = base.run_command_in_dir(mysql_bin_path, connectionString)
|
||||
|
||||
expected_port = config.option("db-port")
|
||||
if (connectionResult['stdout'].find('port') != -1 and connectionResult['stdout'].find(expected_port) != -1):
|
||||
print(mysql_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
print(mysql_full_name + 'configuration is not valid')
|
||||
# if path exists, then further removal and installation fails(according to startup statistics). it is better to fix issue manually.
|
||||
return dependence
|
||||
|
||||
print('Valid MySQL Server not found')
|
||||
dependence.append_uninstall('MySQL Server')
|
||||
@ -531,23 +577,43 @@ def check_mysqlServer():
|
||||
return dependence
|
||||
def check_MySQLConfig(mysqlPath = ''):
|
||||
result = True
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
|
||||
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
|
||||
print('Database onlyoffice not found')
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
|
||||
print('Database "' + config.option("db-name") + '" not found')
|
||||
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
|
||||
if (not result):
|
||||
return False
|
||||
print('Creating ' + config.option("db-name") + ' tables ...')
|
||||
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
|
||||
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
|
||||
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
|
||||
print('Password encryption is not valid')
|
||||
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
|
||||
|
||||
return result
|
||||
def execMySQLScript(mysql_path_to_bin, scriptPath):
|
||||
print('Execution ' + scriptPath)
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
print('CREATE DATABASE ' + dbName + ';')
|
||||
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
|
||||
print('failed CREATE DATABASE ' + dbName + ';')
|
||||
return False
|
||||
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
|
||||
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
|
||||
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
|
||||
# return False
|
||||
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
|
||||
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
|
||||
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
|
||||
# return False
|
||||
return True
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
|
||||
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
|
||||
print('Execution ' + scriptPath)
|
||||
mysqlLoginSrt = get_mysqlLoginString()
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
|
||||
if (code != 0):
|
||||
print('Execution failed!')
|
||||
return False
|
||||
@ -556,7 +622,7 @@ def execMySQLScript(mysql_path_to_bin, scriptPath):
|
||||
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
|
||||
print('Setting MySQL password encrypting...')
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
|
||||
if (code != 0):
|
||||
print('Setting password encryption failed!')
|
||||
return False
|
||||
@ -582,8 +648,8 @@ def get_postrgre_path_to_bin(postgrePath = ''):
|
||||
return postgrePath
|
||||
def get_postgreLoginSrting(userName):
|
||||
if (host_platform == 'windows'):
|
||||
return 'psql -U' + userName + ' '
|
||||
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
|
||||
return 'psql -U ' + userName + ' -h localhost '
|
||||
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U ' + userName + ' -h localhost '
|
||||
def get_postgreSQLInfoByFlag(flag):
|
||||
arrInfo = []
|
||||
|
||||
@ -617,9 +683,10 @@ def check_postgreSQL():
|
||||
|
||||
if (host_platform == 'linux'):
|
||||
result = os.system(postgreLoginSrt + ' -c "\q"')
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
connectionResult = base.run_command(connectionString)
|
||||
expected_port = config.option("db-port")
|
||||
|
||||
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
|
||||
if (result != 0 or connectionResult['stdout'].find(expected_port) == -1):
|
||||
print('Valid PostgreSQL not found!')
|
||||
dependence.append_install('PostgreSQL')
|
||||
dependence.append_uninstall('PostgreSQL')
|
||||
@ -629,19 +696,21 @@ def check_postgreSQL():
|
||||
return dependence
|
||||
|
||||
arrInfo = get_postgreSQLInfo()
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
base.set_env('PGPASSWORD', config.option("db-pass"))
|
||||
|
||||
for info in arrInfo:
|
||||
if (base.is_dir(info['Location']) == False):
|
||||
continue
|
||||
|
||||
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
|
||||
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
|
||||
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
|
||||
postgre_bin_path = get_postrgre_path_to_bin(info['Location'])
|
||||
connectionResult = base.run_command_in_dir(postgre_bin_path, connectionString)
|
||||
|
||||
expected_port = config.option("db-port")
|
||||
if (connectionResult['stdout'].find(expected_port) != -1):
|
||||
print(postgre_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
print(postgre_full_name + 'configuration is not valid')
|
||||
|
||||
print('Valid PostgreSQL not found')
|
||||
|
||||
@ -654,60 +723,87 @@ def check_postgreSQL():
|
||||
return dependence
|
||||
def check_postgreConfig(postgrePath = ''):
|
||||
result = True
|
||||
base.print_info('Checking PostgreSQL configuration')
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
base.set_env('PGPASSWORD', config.option("db-pass"))
|
||||
|
||||
rootUser = install_params['PostgreSQL']['root']
|
||||
dbUser = install_params['PostgreSQL']['dbUser']
|
||||
dbName = install_params['PostgreSQL']['dbName']
|
||||
dbPass = install_params['PostgreSQL']['dbPass']
|
||||
dbUser = config.option("db-user")
|
||||
dbName = config.option("db-name")
|
||||
dbPass = config.option("db-pass")
|
||||
|
||||
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
|
||||
postgreLoginRoot = get_postgreLoginSrting(rootUser)
|
||||
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
|
||||
creatdb_path = base.get_script_dir() + "/../../server/schema/postgresql/createdb.sql"
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "\du ' + dbUser + '"')['stdout'].find(dbUser) != -1):
|
||||
print('User ' + dbUser + ' is exist')
|
||||
# Check if user exists
|
||||
user_check_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "\du ' + dbUser + '"')
|
||||
|
||||
if (user_check_result['stdout'].find(dbUser) != -1):
|
||||
# User exists, check password
|
||||
if (os.system(postgreLoginDbUser + '-c "\q"') != 0):
|
||||
print('Invalid user password!')
|
||||
base.print_info('Changing password...')
|
||||
print('Invalid user password, changing...')
|
||||
result = change_userPass(dbUser, dbPass, postgre_path_to_bin) and result
|
||||
else:
|
||||
print('User ' + dbUser + ' not exist!')
|
||||
base.print_info('Creating ' + dbName + ' user...')
|
||||
print('Creating user ' + dbUser + '...')
|
||||
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
|
||||
print('Database ' + dbName + ' not found')
|
||||
base.print_info('Creating ' + dbName + ' database...')
|
||||
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
|
||||
# Check if database exists
|
||||
db_check_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')
|
||||
|
||||
if (db_check_result['stdout'].find(dbName) == -1):
|
||||
print('Creating database ' + dbName + '...')
|
||||
create_result = create_postgreDb(dbName, postgre_path_to_bin)
|
||||
|
||||
if create_result:
|
||||
# Grant privileges to user on database and schema
|
||||
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + dbUser + ';"')
|
||||
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-d ' + dbName + ' -c "GRANT ALL ON SCHEMA public TO ' + dbUser + ';"')
|
||||
|
||||
configure_result = configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
|
||||
result = create_result and configure_result
|
||||
else:
|
||||
result = False
|
||||
else:
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "SELECT pg_size_pretty(pg_database_size(' + "'" + dbName + "'" + '));"')['stdout'].find('7559 kB') != -1):
|
||||
print('Database ' + dbName + ' not configured')
|
||||
base.print_info('Configuring ' + dbName + ' database...')
|
||||
result = configureDb(dbName, creatdb_path, postgre_path_to_bin) and result
|
||||
print('Database ' + dbName + ' is valid')
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "\l+ ' + dbName + '"')['stdout'].find(dbUser +'=CTc/' + rootUser) == -1):
|
||||
print('User ' + dbUser + ' has no database privileges!')
|
||||
base.print_info('Setting database privileges for user ' + dbUser + '...')
|
||||
result = set_dbPrivilegesForUser(dbUser, dbName, postgre_path_to_bin) and result
|
||||
print('User ' + dbUser + ' has database privileges')
|
||||
|
||||
# Database exists - check if tables need to be created
|
||||
table_count_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "SELECT count(*) FROM information_schema.tables WHERE table_schema = \'public\';"')
|
||||
|
||||
needs_configure = False
|
||||
|
||||
if table_count_result['stdout'].find(' 0') != -1:
|
||||
# No tables - need to configure
|
||||
needs_configure = True
|
||||
|
||||
if needs_configure:
|
||||
# Grant privileges and configure
|
||||
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + dbUser + ';"')
|
||||
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-d ' + dbName + ' -c "GRANT ALL ON SCHEMA public TO ' + dbUser + ';"')
|
||||
|
||||
configure_result = configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
|
||||
result = configure_result and result
|
||||
|
||||
return result
|
||||
def create_postgreDb(dbName, postgre_path_to_bin = ''):
|
||||
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "CREATE DATABASE ' + dbName +';"') != 0):
|
||||
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "CREATE DATABASE ' + dbName +';"')
|
||||
if (result['returncode'] != 0):
|
||||
print('Database creation failed!')
|
||||
return False
|
||||
return True
|
||||
def set_dbPrivilegesForUser(userName, dbName, postgre_path_to_bin = ''):
|
||||
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + userName + ';"') != 0):
|
||||
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + userName + ';"')
|
||||
if (result['returncode'] != 0):
|
||||
print('Grant privileges failed!')
|
||||
return False
|
||||
return True
|
||||
def create_postgreUser(userName, userPass, postgre_path_to_bin = ''):
|
||||
postgreLoginRoot = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "CREATE USER ' + userName + ' WITH password ' + "'" + userPass + "'" + ';"') != 0):
|
||||
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "CREATE USER ' + userName + ' WITH password ' + "'" + userPass + "'" + ';"')
|
||||
if (result['returncode'] != 0):
|
||||
print('User creation failed!')
|
||||
return False
|
||||
return True
|
||||
def change_userPass(userName, userPass, postgre_path_to_bin = ''):
|
||||
@ -716,13 +812,24 @@ def change_userPass(userName, userPass, postgre_path_to_bin = ''):
|
||||
return False
|
||||
return True
|
||||
def configureDb(userName, dbName, scriptPath, postgre_path_to_bin = ''):
|
||||
print('Execution ' + scriptPath)
|
||||
postgreLoginSrt = get_postgreLoginSrting(userName)
|
||||
|
||||
code = base.exec_command_in_dir(postgre_path_to_bin, postgreLoginSrt + ' -d ' + dbName + ' -f "' + scriptPath + '"')
|
||||
if (code != 0):
|
||||
print('Execution failed!')
|
||||
print('Executing ' + scriptPath)
|
||||
|
||||
if not base.is_file(scriptPath):
|
||||
print('ERROR: Script file does not exist!')
|
||||
return False
|
||||
|
||||
postgreLoginSrt = get_postgreLoginSrting(userName)
|
||||
full_command = postgreLoginSrt + ' -d ' + dbName + ' -f "' + scriptPath + '"'
|
||||
|
||||
# Use run_command_in_dir to capture output
|
||||
result = base.run_command_in_dir(postgre_path_to_bin, full_command)
|
||||
|
||||
if (result['returncode'] != 0):
|
||||
print('Execution failed!')
|
||||
if result['stderr']:
|
||||
print('Error: ' + result['stderr'])
|
||||
return False
|
||||
|
||||
print('Execution completed')
|
||||
return True
|
||||
def uninstall_postgresql():
|
||||
@ -825,6 +932,7 @@ def installProgram(sName):
|
||||
print(install_command)
|
||||
code = os.system(install_command)
|
||||
base.delete_file(file_name)
|
||||
|
||||
elif (host_platform == 'linux'):
|
||||
if (sName in install_special):
|
||||
code = install_special[sName]()
|
||||
@ -855,13 +963,13 @@ def install_gruntcli():
|
||||
|
||||
def install_mysqlserver():
|
||||
if (host_platform == 'windows'):
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
|
||||
elif (host_platform == 'linux'):
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
|
||||
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
|
||||
code = os.system('sudo apt-get -y install zsh htop') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
|
||||
return os.system('yes | sudo apt install mysql-server') and code
|
||||
return 1
|
||||
|
||||
@ -883,7 +991,7 @@ def install_postgresql():
|
||||
file_name = "install.exe"
|
||||
base.download(download_url, file_name)
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
|
||||
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
|
||||
else:
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = 'sudo apt install postgresql -y'
|
||||
@ -894,12 +1002,12 @@ def install_postgresql():
|
||||
if (host_platform == 'windows'):
|
||||
base.delete_file(file_name)
|
||||
else:
|
||||
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
|
||||
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
|
||||
|
||||
return code
|
||||
|
||||
def install_nodejs():
|
||||
os.system('curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -')
|
||||
os.system('curl -sSL https://deb.nodesource.com/setup_18.x | sudo -E bash -')
|
||||
base.print_info("Install node.js...")
|
||||
install_command = 'yes | sudo apt install nodejs'
|
||||
print(install_command)
|
||||
@ -908,8 +1016,8 @@ def install_nodejs():
|
||||
downloads_list = {
|
||||
'Windows': {
|
||||
'Git': 'https://github.com/git-for-windows/git/releases/download/v2.29.0.windows.1/Git-2.29.0-64-bit.exe',
|
||||
'Node.js': 'https://nodejs.org/download/release/v14.17.2/node-v14.17.2-x64.msi',
|
||||
'Java': 'https://javadl.oracle.com/webapps/download/AutoDL?BundleId=242990_a4634525489241b9a9e1aa73d9e118e6',
|
||||
'Node.js': 'https://nodejs.org/dist/v18.17.1/node-v18.17.1-x64.msi',
|
||||
'Java': 'https://aka.ms/download-jdk/microsoft-jdk-11.0.18-windows-x64.msi',
|
||||
'RabbitMQ': 'https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.9/rabbitmq-server-3.8.9.exe',
|
||||
'Erlang': 'http://erlang.org/download/otp_win64_23.1.exe',
|
||||
'VC2019x64': 'https://aka.ms/vs/17/release/vc_redist.x64.exe',
|
||||
@ -944,23 +1052,14 @@ uninstall_special = {
|
||||
install_params = {
|
||||
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
|
||||
'Git': '/VERYSILENT /NORESTART',
|
||||
'Java': '/s',
|
||||
'MySQLServer': {
|
||||
'port': '3306',
|
||||
'user': 'root',
|
||||
'pass': 'onlyoffice',
|
||||
'version': '8.0.21'
|
||||
},
|
||||
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
|
||||
'PostgreSQL': {
|
||||
'root': 'postgres',
|
||||
'dbPort': '5432',
|
||||
'dbName': 'onlyoffice',
|
||||
'dbUser': 'onlyoffice',
|
||||
'dbPass': 'onlyoffice'
|
||||
'root': 'postgres'
|
||||
}
|
||||
}
|
||||
uninstall_params = {
|
||||
'PostgreSQL': '--mode unattended --unattendedmodeui none'
|
||||
}
|
||||
|
||||
|
||||
@ -10,11 +10,15 @@ import config_server as develop_config_server
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
def make():
|
||||
if ("1" != config.option("develop")):
|
||||
return
|
||||
if not dependence.check_dependencies():
|
||||
exit(1)
|
||||
def build_docker_server():
|
||||
dependence.check__docker_dependencies()
|
||||
build_develop_server()
|
||||
|
||||
def build_docker_sdk_web_apps(dir):
|
||||
dependence.check__docker_dependencies()
|
||||
build_js.build_js_develop(dir)
|
||||
|
||||
def build_develop_server():
|
||||
build_server.build_server_develop()
|
||||
build_js.build_js_develop(base_dir + "/../../..")
|
||||
develop_config_server.make()
|
||||
@ -22,5 +26,12 @@ def make():
|
||||
branding_develop_script_dir = base_dir + "/../../../" + config.option("branding") + "/build_tools/scripts"
|
||||
if base.is_file(branding_develop_script_dir + "/develop.py"):
|
||||
base.cmd_in_dir(branding_develop_script_dir, "python", ["develop.py"], True)
|
||||
|
||||
def make():
|
||||
if ("1" != config.option("develop")):
|
||||
return
|
||||
if not dependence.check_dependencies():
|
||||
exit(1)
|
||||
build_develop_server()
|
||||
exit(0)
|
||||
|
||||
342
scripts/develop/git_operations.py
Normal file
342
scripts/develop/git_operations.py
Normal file
@ -0,0 +1,342 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Git Operations Script
|
||||
Provides functionality to clone repositories and create branches.
|
||||
Uses existing methods from base module and integrates with release.py patterns.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
# Add parent directory to path to import modules
|
||||
sys.path.append('../')
|
||||
import base
|
||||
import config
|
||||
import dependence
|
||||
|
||||
# Setup logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitOperations:
|
||||
"""Class to handle git clone and branch creation using existing base module methods."""
|
||||
|
||||
def __init__(self, branding: str = "onlyoffice", base_branch: str = "develop",
|
||||
branding_url: str = "ONLYOFFICE/onlyoffice.git", branch_name: str = None,
|
||||
modules: str = "core desktop builder server mobile"):
|
||||
"""
|
||||
Initialize GitOperations with branding configuration and configure repositories.
|
||||
|
||||
Args:
|
||||
branding: Branding name (default: onlyoffice)
|
||||
base_branch: Base branch to work from (default: develop)
|
||||
branding_url: Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)
|
||||
branch_name: Name of the branch to create (required for branch operations)
|
||||
modules: Modules to include (default: core desktop builder server mobile)
|
||||
"""
|
||||
self.branding = branding
|
||||
self.base_branch = base_branch
|
||||
self.branding_url = branding_url
|
||||
self.branch_name = branch_name
|
||||
self.modules = modules
|
||||
self.work_dir = None
|
||||
|
||||
# Configure repositories immediately
|
||||
self._configure()
|
||||
|
||||
# Update repositories after configuration
|
||||
repositories = self.get_configured_repositories()
|
||||
#base.update_repositories(repositories)
|
||||
|
||||
def create_branch(self, branch_name: str, repo_dir: str = None) -> bool:
|
||||
"""
|
||||
Create a new branch using base.cmd_in_dir.
|
||||
|
||||
Args:
|
||||
branch_name: Name of the new branch
|
||||
repo_dir: Repository directory (optional, uses current if not specified)
|
||||
from_branch: Branch to create from (optional, uses current if not specified)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
work_dir = repo_dir or self.work_dir
|
||||
logger.info(f"Creating branch '{branch_name}' in {work_dir}")
|
||||
|
||||
try:
|
||||
# Create and checkout new branch
|
||||
base.cmd_in_dir(work_dir, "git", ["checkout", "-b", branch_name], True)
|
||||
logger.info(f"Successfully created branch: {branch_name}")
|
||||
return True
|
||||
except SystemExit:
|
||||
logger.error(f"Failed to create branch: {branch_name}")
|
||||
return False
|
||||
|
||||
def push_branch(self, branch_name: str, repo_dir: str = None, set_upstream: bool = True) -> bool:
|
||||
"""
|
||||
Push a branch to remote repository using base.cmd_in_dir.
|
||||
|
||||
Args:
|
||||
branch_name: Name of the branch to push
|
||||
repo_dir: Repository directory (optional, uses current if not specified)
|
||||
set_upstream: Whether to set upstream tracking (default: True)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
work_dir = repo_dir or self.work_dir
|
||||
logger.info(f"Pushing branch '{branch_name}' in {work_dir}")
|
||||
|
||||
try:
|
||||
if set_upstream:
|
||||
# Push branch and set upstream tracking
|
||||
base.cmd_in_dir(work_dir, "git", ["push", "-u", "origin", branch_name], True)
|
||||
else:
|
||||
# Just push the branch
|
||||
base.cmd_in_dir(work_dir, "git", ["push", "origin", branch_name], True)
|
||||
|
||||
logger.info(f"Successfully pushed branch: {branch_name}")
|
||||
return True
|
||||
except SystemExit:
|
||||
logger.error(f"Failed to push branch: {branch_name}")
|
||||
return False
|
||||
|
||||
def _configure(self) -> bool:
|
||||
"""
|
||||
Configure repositories using existing configure.py pattern from release.py.
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Configuring and cloning repositories for branch: {self.base_branch}")
|
||||
|
||||
try:
|
||||
# Get build_tools origin and construct branding URL from git host base
|
||||
build_tools_origin = base.git_get_origin()
|
||||
# Extract git host base (everything up to the host)
|
||||
# For https://github.com/ORG/build_tools.git -> https://github.com/
|
||||
# For git@github.com:ORG/build_tools.git -> git@github.com:
|
||||
if '://' in build_tools_origin: # HTTPS
|
||||
host_base = build_tools_origin.split('/', 3)[0] + '//' + build_tools_origin.split('/', 3)[2] + '/'
|
||||
else: # SSH
|
||||
host_base = build_tools_origin.split(':', 1)[0] + ':'
|
||||
|
||||
branding_url = host_base + self.branding_url
|
||||
|
||||
logger.info(f"Build tools origin: {build_tools_origin}")
|
||||
logger.info(f"Git host base: {host_base}")
|
||||
logger.info(f"Using branding URL: {branding_url}")
|
||||
|
||||
# Check platform and dependencies like in release.py
|
||||
platform = base.host_platform()
|
||||
if platform == "windows":
|
||||
dependence.check_pythonPath()
|
||||
dependence.check_gitPath()
|
||||
|
||||
# Run configure.py like in release.py
|
||||
configure_args = [
|
||||
'configure.py',
|
||||
'--branding', self.branding,
|
||||
'--branding-url', branding_url,
|
||||
'--branch', self.base_branch,
|
||||
'--module', self.modules,
|
||||
'--update', '1',
|
||||
'--clean', '0'
|
||||
]
|
||||
|
||||
base.cmd_in_dir('../../', 'python', configure_args)
|
||||
|
||||
# Parse configuration like in release.py
|
||||
config.parse()
|
||||
|
||||
# Update build_tools repository
|
||||
base.git_update('build_tools')
|
||||
|
||||
# Update branding repository
|
||||
base.git_update(self.branding)
|
||||
|
||||
# Correct defaults (the branding repo is already updated)
|
||||
config.parse_defaults()
|
||||
|
||||
logger.info("Successfully configured")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to configure and clone: {e}")
|
||||
return False
|
||||
|
||||
def get_configured_repositories(self) -> Dict:
|
||||
"""Get repositories using existing base.get_repositories() pattern from release.py."""
|
||||
repositories = base.get_repositories()
|
||||
repositories['core-ext'] = [True, False]
|
||||
repositories['build_tools'] = [True, False]
|
||||
repositories[self.branding] = [True, False]
|
||||
return repositories
|
||||
|
||||
def _iterate_repositories(self, operation_func, operation_name: str) -> bool:
|
||||
"""
|
||||
Iterate over all repositories and apply the given operation function.
|
||||
|
||||
Args:
|
||||
operation_func: Function to apply to each repository (takes repo_name and repo_path)
|
||||
operation_name: Name of the operation for logging
|
||||
|
||||
Returns:
|
||||
bool: True if at least one operation succeeded, False otherwise
|
||||
"""
|
||||
repositories = self.get_configured_repositories()
|
||||
success_count = 0
|
||||
total_count = len(repositories)
|
||||
|
||||
for repo_name in repositories:
|
||||
current_dir = repositories[repo_name][1]
|
||||
repo_path = f"../../../{repo_name}" if current_dir == False else current_dir
|
||||
|
||||
if base.is_dir(repo_path):
|
||||
if operation_func(repo_name, repo_path):
|
||||
success_count += 1
|
||||
else:
|
||||
logger.warning(f"✗ Failed to {operation_name} in {repo_name}")
|
||||
else:
|
||||
logger.warning(f"Repository {repo_name} not found at {repo_path}")
|
||||
|
||||
logger.info(f"{operation_name.capitalize()} completed in {success_count}/{total_count} repositories")
|
||||
return success_count > 0
|
||||
|
||||
def delete_branch(self, branch_name: str, repo_dir: str = None, force: bool = False) -> bool:
|
||||
"""
|
||||
Delete a branch using base.cmd_in_dir.
|
||||
|
||||
Args:
|
||||
branch_name: Name of the branch to delete
|
||||
repo_dir: Repository directory (optional, uses current if not specified)
|
||||
force: Whether to force delete the branch (default: False)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
work_dir = repo_dir or self.work_dir
|
||||
logger.info(f"Deleting branch '{branch_name}' in {work_dir}")
|
||||
|
||||
try:
|
||||
# Switch to base branch first to avoid deleting current branch
|
||||
base.cmd_in_dir(work_dir, "git", ["checkout", self.base_branch], True)
|
||||
|
||||
# Delete local branch
|
||||
delete_flag = "-D" if force else "-d"
|
||||
base.cmd_in_dir(work_dir, "git", ["branch", delete_flag, branch_name], True)
|
||||
logger.info(f"Successfully deleted local branch: {branch_name}")
|
||||
|
||||
# Delete remote branch
|
||||
try:
|
||||
base.cmd_in_dir(work_dir, "git", ["push", "origin", "--delete", branch_name], True)
|
||||
logger.info(f"Successfully deleted remote branch: {branch_name}")
|
||||
except SystemExit:
|
||||
logger.warning(f"Failed to delete remote branch: {branch_name} (may not exist)")
|
||||
|
||||
return True
|
||||
except SystemExit:
|
||||
logger.error(f"Failed to delete branch: {branch_name}")
|
||||
return False
|
||||
|
||||
def create_branches(self) -> bool:
|
||||
"""
|
||||
Create a branch with the given name in all repositories.
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Creating branch '{self.branch_name}' in all repositories")
|
||||
|
||||
def create_and_push_branch(repo_name: str, repo_path: str) -> bool:
|
||||
"""Create and push branch for a single repository."""
|
||||
if self.create_branch(self.branch_name, repo_path):
|
||||
logger.info(f"✓ Created branch '{self.branch_name}' in {repo_name}")
|
||||
# Push the created branch
|
||||
if self.push_branch(self.branch_name, repo_path):
|
||||
logger.info(f"✓ Pushed branch '{self.branch_name}' in {repo_name}")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"✗ Failed to push branch '{self.branch_name}' in {repo_name}")
|
||||
return False
|
||||
else:
|
||||
logger.warning(f"✗ Failed to create branch '{self.branch_name}' in {repo_name}")
|
||||
return False
|
||||
|
||||
try:
|
||||
return self._iterate_repositories(create_and_push_branch, f"create and push branch '{self.branch_name}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create branch in all repositories: {e}")
|
||||
return False
|
||||
|
||||
def remove_branches(self, force: bool = False) -> bool:
|
||||
"""
|
||||
Remove a branch with the given name from all repositories.
|
||||
|
||||
Args:
|
||||
force: Whether to force delete the branch (default: False)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Removing branch '{self.branch_name}' from all repositories")
|
||||
|
||||
def delete_branch_operation(repo_name: str, repo_path: str) -> bool:
|
||||
"""Delete branch for a single repository."""
|
||||
if self.delete_branch(self.branch_name, repo_path, force):
|
||||
logger.info(f"✓ Removed branch '{self.branch_name}' from {repo_name}")
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"✗ Failed to remove branch '{self.branch_name}' from {repo_name}")
|
||||
return False
|
||||
|
||||
try:
|
||||
return self._iterate_repositories(delete_branch_operation, f"remove branch '{self.branch_name}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove branch from all repositories: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to handle command line arguments."""
|
||||
parser = argparse.ArgumentParser(description='Git Operations Tool - Create and Remove Branches')
|
||||
subparsers = parser.add_subparsers(dest='command', help='Available commands')
|
||||
|
||||
# Create branch command (configure, clone and create branch in all repositories)
|
||||
branch_parser = subparsers.add_parser('create', help='Configure, clone and create branch in all repositories')
|
||||
branch_parser.add_argument('branch_name', help='Name of the branch to create')
|
||||
branch_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)')
|
||||
branch_parser.add_argument('--branding', default='onlyoffice', help='Branding name')
|
||||
branch_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)')
|
||||
branch_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include')
|
||||
|
||||
# Remove branch command (configure, clone and remove branch from all repositories)
|
||||
remove_parser = subparsers.add_parser('remove', help='Configure, clone and remove branch from all repositories')
|
||||
remove_parser.add_argument('branch_name', help='Name of the branch to remove')
|
||||
remove_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)')
|
||||
remove_parser.add_argument('--branding', default='onlyoffice', help='Branding name')
|
||||
remove_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)')
|
||||
remove_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include')
|
||||
remove_parser.add_argument('--force', action='store_true', help='Force delete the branch (equivalent to git branch -D)')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
git_ops = GitOperations(args.branding, args.base_branch, args.branding_url, args.branch_name, args.modules)
|
||||
|
||||
if args.command == 'create':
|
||||
success = git_ops.create_branches()
|
||||
sys.exit(0 if success else 1)
|
||||
elif args.command == 'remove':
|
||||
success = git_ops.remove_branches(args.force)
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -6,6 +6,7 @@ import base
|
||||
import shutil
|
||||
import optparse
|
||||
import dependence
|
||||
import config
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
@ -17,6 +18,10 @@ parser.add_option("--remove-path", action="append", type="string", dest="remove-
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
configOptions = vars(options)
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
|
||||
for item in configOptions["uninstall"]:
|
||||
dependence.uninstallProgram(item)
|
||||
for item in configOptions["remove-path"]:
|
||||
|
||||
@ -34,9 +34,6 @@ config.parse_defaults()
|
||||
|
||||
repositories = base.get_repositories()
|
||||
|
||||
# Add other plugins
|
||||
repositories.update(base.get_plugins('autocomplete, easybib, wordpress'))
|
||||
|
||||
# Add other repositories
|
||||
if config.check_option("module", "builder"):
|
||||
repositories['document-builder-package'] = [False, False]
|
||||
|
||||
@ -39,8 +39,6 @@ config.parse_defaults()
|
||||
|
||||
repositories = base.get_repositories()
|
||||
|
||||
# Add other plugins
|
||||
repositories.update(base.get_plugins('autocomplete, easybib, glavred, wordpress'))
|
||||
# Add other repositories
|
||||
repositories['core-ext'] = [True, False]
|
||||
|
||||
|
||||
@ -6,6 +6,16 @@ import os
|
||||
import base
|
||||
import dependence
|
||||
import traceback
|
||||
import develop
|
||||
|
||||
# if (sys.version_info[0] >= 3):
|
||||
# unicode = str
|
||||
|
||||
# host_platform = base.host_platform()
|
||||
# if (host_platform == 'windows'):
|
||||
# import libwindows
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
def install_module(path):
|
||||
base.print_info('Install: ' + path)
|
||||
@ -18,11 +28,18 @@ def find_rabbitmqctl(base_path):
|
||||
return base.find_file(os.path.join(base_path, 'RabbitMQ Server'), 'rabbitmqctl.bat')
|
||||
|
||||
def restart_win_rabbit():
|
||||
# todo maybe restarting is not relevant after many years and versions?
|
||||
base.print_info('restart RabbitMQ node to prevent "Erl.exe high CPU usage every Monday morning on Windows" https://groups.google.com/forum/#!topic/rabbitmq-users/myl74gsYyYg')
|
||||
rabbitmqctl = find_rabbitmqctl(os.environ['PROGRAMW6432']) or find_rabbitmqctl(os.environ['ProgramFiles(x86)'])
|
||||
if rabbitmqctl is not None:
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
|
||||
try:
|
||||
# code = libwindows.sudo(unicode(sys.executable), ['net', 'stop', 'rabbitmq'])
|
||||
# code = libwindows.sudo(unicode(sys.executable), ['net', 'start', 'rabbitmq'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
|
||||
except SystemExit:
|
||||
base.print_error('Perhaps Erlang cookies are different: Replace %userprofile%/.erlang.cookie with %WINDIR%/System32/config/systemprofile/.erlang.cookie')
|
||||
raise
|
||||
else:
|
||||
base.print_info('Missing rabbitmqctl.bat')
|
||||
|
||||
@ -41,56 +58,98 @@ def start_linux_services():
|
||||
os.system('sudo service rabbitmq-server restart')
|
||||
|
||||
def run_integration_example():
|
||||
base.cmd_in_dir('../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
|
||||
if base.is_exist(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs'):
|
||||
base.cmd_in_dir(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
|
||||
|
||||
def start_linux_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
|
||||
|
||||
def update_config(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
|
||||
def make_start():
|
||||
base.configure_common_apps()
|
||||
|
||||
platform = base.host_platform()
|
||||
if ("windows" == platform):
|
||||
dependence.check_pythonPath()
|
||||
dependence.check_gitPath()
|
||||
restart_win_rabbit()
|
||||
elif ("mac" == platform):
|
||||
start_mac_services()
|
||||
elif ("linux" == platform):
|
||||
start_linux_services()
|
||||
|
||||
def make_configure(args):
|
||||
base.print_info('Build modules')
|
||||
update_config(args)
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
|
||||
def make_install():
|
||||
platform = base.host_platform()
|
||||
run_integration_example()
|
||||
|
||||
base.create_dir(base_dir + '/../../../server/App_Data')
|
||||
|
||||
install_module(base_dir + '/../../../server/DocService')
|
||||
install_module(base_dir + '/../../../server/Common')
|
||||
install_module(base_dir + '/../../../server/FileConverter')
|
||||
|
||||
def make_run():
|
||||
platform = base.host_platform()
|
||||
base.set_env('NODE_ENV', 'development-' + platform)
|
||||
base.set_env('NODE_CONFIG_DIR', '../Common/config')
|
||||
|
||||
if ("mac" == platform):
|
||||
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
elif ("linux" == platform):
|
||||
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
|
||||
run_module(base_dir + '/../../../server/DocService', ['sources/server.js'])
|
||||
#run_module(base_dir + '/../../../server/DocService', ['sources/gc.js'])
|
||||
run_module(base_dir + '/../../../server/FileConverter', ['sources/convertermaster.js'])
|
||||
#run_module(base_dir + '/../../../server/SpellChecker', ['sources/server.js'])
|
||||
|
||||
def run_docker_server(args = []):
|
||||
try:
|
||||
make_start()
|
||||
develop.build_docker_server()
|
||||
make_install()
|
||||
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
|
||||
|
||||
def run_docker_sdk_web_apps(dir):
|
||||
try:
|
||||
develop.build_docker_sdk_web_apps(dir)
|
||||
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
|
||||
|
||||
def make(args = []):
|
||||
try:
|
||||
base.configure_common_apps()
|
||||
|
||||
platform = base.host_platform()
|
||||
if ("windows" == platform):
|
||||
dependence.check_pythonPath()
|
||||
dependence.check_gitPath()
|
||||
restart_win_rabbit()
|
||||
elif ("mac" == platform):
|
||||
start_mac_services()
|
||||
elif ("linux" == platform):
|
||||
start_linux_services()
|
||||
|
||||
make_start()
|
||||
make_configure(args)
|
||||
make_install()
|
||||
make_run()
|
||||
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
base.print_info('Build modules')
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
base.cmd_in_dir('../../', 'python', ['make.py'])
|
||||
|
||||
run_integration_example()
|
||||
|
||||
base.create_dir('../../../server/App_Data')
|
||||
|
||||
install_module('../../../server/DocService')
|
||||
install_module('../../../server/Common')
|
||||
install_module('../../../server/FileConverter')
|
||||
|
||||
base.set_env('NODE_ENV', 'development-' + platform)
|
||||
base.set_env('NODE_CONFIG_DIR', '../Common/config')
|
||||
|
||||
if ("mac" == platform):
|
||||
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
elif ("linux" == platform):
|
||||
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
|
||||
run_module('../../../server/DocService', ['sources/server.js'])
|
||||
# run_module('../../../server/DocService', ['sources/gc.js'])
|
||||
run_module('../../../server/FileConverter', ['sources/convertermaster.js'])
|
||||
# run_module('../../../server/SpellChecker', ['sources/server.js'])
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
|
||||
162
scripts/license_checker/Readme.md
Normal file
162
scripts/license_checker/Readme.md
Normal file
@ -0,0 +1,162 @@
|
||||
# license_checker
|
||||
|
||||
## Overview
|
||||
|
||||
**license_checker** allow you to automatically check
|
||||
licenses inside specified code files.
|
||||
|
||||
## How to use
|
||||
|
||||
### Running
|
||||
|
||||
**Note**: Pyhton 3.9 and above required
|
||||
(otherwise `TypeError: 'type' object is not subscriptable`)
|
||||
|
||||
* Linux
|
||||
|
||||
```bash
|
||||
python3 license_checker.py
|
||||
```
|
||||
|
||||
* Windows
|
||||
|
||||
```bash
|
||||
python license_checker.py
|
||||
```
|
||||
|
||||
## How to configure
|
||||
|
||||
The checker settings are specified in the `config.json`.
|
||||
The path to the license template is indicated there.
|
||||
|
||||
### How to specify a license template
|
||||
|
||||
The license template is a plain text
|
||||
file where the license text is indicated
|
||||
as you would like to see the license at
|
||||
the beginning of the file.
|
||||
|
||||
### How to configure `config.json`
|
||||
|
||||
#### Сonfig parameters
|
||||
|
||||
* `basePath` specifies which folder the
|
||||
paths will be relative to.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"basePath": "../../../"
|
||||
```
|
||||
|
||||
* `reportFolder` specifies in which folder to
|
||||
save text files with reports.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"reportFolder": "build_tools/scripts/license_checker/reports"
|
||||
```
|
||||
|
||||
* `printChecking` specifies whether to output
|
||||
information about which file is
|
||||
being checked to the console.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"printChecking": false
|
||||
```
|
||||
|
||||
* `printReports` specifies whether to output
|
||||
reports to the console.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"printReports": false
|
||||
```
|
||||
|
||||
* `fix` specifies which categories of reports
|
||||
should be repaired automatically.
|
||||
Possible array values:
|
||||
`"OUTDATED"`,
|
||||
`"NO_LICENSE"`,
|
||||
`"INVALID_LICENSE"`,
|
||||
`"LEN_MISMATCH"`.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"fix": ["OUTDATED", "NO_LICENSE"],
|
||||
```
|
||||
|
||||
Automatically repair files where the license is outdated or not found.
|
||||
|
||||
* `configs` license check and repair configurations.
|
||||
|
||||
* `dir` folder to check.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"dir": "sdkjs"
|
||||
```
|
||||
|
||||
* `fileExtensions` file extensions to check.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"fileExtensions": [".js"]
|
||||
```
|
||||
|
||||
* `licensePath` specifies the path to the license template.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"licensePath": "header.license"
|
||||
```
|
||||
|
||||
* `ignoreListDir` folder paths to ignore.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"ignoreListDir": [
|
||||
"sdkjs/deploy",
|
||||
"sdkjs/develop",
|
||||
"sdkjs/configs",
|
||||
"sdkjs/common/AllFonts.js",
|
||||
"sdkjs/slide/themes/themes.js"
|
||||
]
|
||||
```
|
||||
|
||||
* `ignoreListDirName` folder names to ignore.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
]
|
||||
```
|
||||
|
||||
* `ignoreListFile` file paths to ignore.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"ignoreListFile": [
|
||||
"sdkjs/develop/awesomeFileToIgnore.js",
|
||||
]
|
||||
```
|
||||
|
||||
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
|
||||
**For example:**
|
||||
|
||||
```json
|
||||
"ignoreListDir": [
|
||||
"sdkjs/develop"
|
||||
],
|
||||
"allowListFile": [
|
||||
"sdkjs/develop/awesomeFileToAllow.js",
|
||||
]
|
||||
```
|
||||
|
||||
Any number of configurations can be
|
||||
specified, they can overlap
|
||||
if we need to check
|
||||
files in the same folder in different ways.
|
||||
|
||||
208
scripts/license_checker/config.json
Normal file
208
scripts/license_checker/config.json
Normal file
@ -0,0 +1,208 @@
|
||||
{
|
||||
"basePath": "../../../",
|
||||
"reportFolder": "build_tools/scripts/license_checker/reports",
|
||||
"printChecking": false,
|
||||
"printReports": false,
|
||||
"fix": ["OUTDATED"],
|
||||
"configs": [
|
||||
{
|
||||
"dir": "core",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"core/build",
|
||||
"core/Common/cfcpp/test",
|
||||
"core/Common/js",
|
||||
"core/DesktopEditor/agg-2.4",
|
||||
"core/DesktopEditor/cximage",
|
||||
"core/DesktopEditor/freetype_names/freetype-2.5.3",
|
||||
"core/DesktopEditor/freetype-2.5.2",
|
||||
"core/DesktopEditor/freetype-2.10.4",
|
||||
"core/DesktopEditor/raster/JBig2",
|
||||
"core/DesktopEditor/raster/Jp2",
|
||||
"core/DesktopEditor/xml/libxml2",
|
||||
"core/DesktopEditor/xmlsec",
|
||||
"core/DjVuFile/libdjvu",
|
||||
"core/DjVuFile/wasm",
|
||||
"core/EpubFile",
|
||||
"core/OOXML/PPTXFormat/Limit/pri",
|
||||
"core/Fb2File",
|
||||
"core/HtmlFile2",
|
||||
"core/Apple",
|
||||
"core/HwpFile",
|
||||
"core/OdfFile/Common/utf8cpp",
|
||||
"core/OfficeUtils/js/emsdk",
|
||||
"core/OfficeUtils/src/zlib-1.2.11",
|
||||
"core/PdfFile/lib",
|
||||
"core/UnicodeConverter/icubuilds-mac",
|
||||
"core/UnicodeConverter/icubuilds-win32"
|
||||
],
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor",
|
||||
"3dParty"
|
||||
],
|
||||
"ignoreListFile": [
|
||||
"core/Test/CoAuthoring/settings.js",
|
||||
"core/OdfFile/Projects/Linux/precompiled.h",
|
||||
"core/MsBinaryFile/Projects/XlsFormatLib/Linux/precompiled.h"
|
||||
],
|
||||
"allowListFile": [
|
||||
"core/DesktopEditor/freetype_names/FontMaps/FontMaps.cpp",
|
||||
"core/Common/3dParty/openssl/test/main.cpp ",
|
||||
"core/Common/3dParty/openssl/common/common_openssl.h",
|
||||
"core/Common/3dParty/openssl/common/common_openssl.cpp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "core-ext",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"core-ext/AutoTester",
|
||||
"core-ext/cell_android",
|
||||
"core-ext/cell_android",
|
||||
"core-ext/desktop-sdk-private",
|
||||
"core-ext/docbuilder",
|
||||
"core-ext/Registration",
|
||||
"core-ext/slide_android",
|
||||
"core-ext/test",
|
||||
"core-ext/word_android",
|
||||
"core-ext/word_ios"
|
||||
],
|
||||
"ignoreListFile": [
|
||||
"core-ext/native_base/json.hpp",
|
||||
"core-ext/native_base/android_base/libeditors/src/main/cpp/workaround/swab/swab.h"
|
||||
]
|
||||
|
||||
},
|
||||
{
|
||||
"dir": "sdkjs",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"sdkjs/deploy",
|
||||
"sdkjs/develop",
|
||||
"sdkjs/configs"
|
||||
],
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
],
|
||||
"ignoreListFile": [
|
||||
"sdkjs/common/externs/jquery-3.2.js",
|
||||
"sdkjs/common/externs/socket.io.js",
|
||||
"sdkjs/common/Native/jquery_native.js",
|
||||
"sdkjs/common/AllFonts.js",
|
||||
"sdkjs/slide/themes/themes.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "sdkjs-forms",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "sdkjs-ooxml",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "web-apps",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor",
|
||||
"search"
|
||||
],
|
||||
"ignoreListDir": [
|
||||
"web-apps/apps/common/mobile",
|
||||
"web-apps/apps/common/main/lib/mods",
|
||||
"web-apps/apps/documenteditor/mobile",
|
||||
"web-apps/apps/spreadsheeteditor/mobile",
|
||||
"web-apps/apps/presentationeditor/mobile",
|
||||
"web-apps/build/plugins/grunt-inline"
|
||||
],
|
||||
"ignoreListFile": [
|
||||
"web-apps/apps/api/documents/api.js",
|
||||
"web-apps/apps/common/main/lib/core/application.js",
|
||||
"web-apps/apps/common/main/lib/core/keymaster.js",
|
||||
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "web-apps-mobile",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules",
|
||||
"vendor"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "server",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDir": [
|
||||
"server/FileConverter/bin"
|
||||
],
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "server-lockstorage",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "server-license",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "server-license-key",
|
||||
"fileExtensions": [".js"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"node_modules"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "editors-ios",
|
||||
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
|
||||
"licensePath": "header.license",
|
||||
"ignoreListDirName": [
|
||||
"vendor",
|
||||
"Vendor",
|
||||
"3dParty"
|
||||
],
|
||||
"allowListFile": [
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.h",
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.m",
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h",
|
||||
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m"
|
||||
]
|
||||
},
|
||||
{
|
||||
"dir": "editors-webview-ios",
|
||||
"fileExtensions": [".swift", ".xcconfig"],
|
||||
"licensePath": "header.license",
|
||||
}
|
||||
]
|
||||
}
|
||||
31
scripts/license_checker/header.license
Normal file
31
scripts/license_checker/header.license
Normal file
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* (c) Copyright Ascensio System SIA 2010-2025
|
||||
*
|
||||
* This program is a free software product. You can redistribute it and/or
|
||||
* modify it under the terms of the GNU Affero General Public License (AGPL)
|
||||
* version 3 as published by the Free Software Foundation. In accordance with
|
||||
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
|
||||
* that Ascensio System SIA expressly excludes the warranty of non-infringement
|
||||
* of any third-party rights.
|
||||
*
|
||||
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
|
||||
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
|
||||
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
|
||||
*
|
||||
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
|
||||
* street, Riga, Latvia, EU, LV-1050.
|
||||
*
|
||||
* The interactive user interfaces in modified source and object code versions
|
||||
* of the Program must display Appropriate Legal Notices, as required under
|
||||
* Section 5 of the GNU AGPL version 3.
|
||||
*
|
||||
* Pursuant to Section 7(b) of the License you must retain the original Product
|
||||
* logo when distributing the program. Pursuant to Section 7(e) we decline to
|
||||
* grant you any rights under trademark law for use of our trademarks.
|
||||
*
|
||||
* All the Product's GUI elements, including illustrations and icon sets, as
|
||||
* well as technical writing content are licensed under the terms of the
|
||||
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
|
||||
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
|
||||
*
|
||||
*/
|
||||
339
scripts/license_checker/license_checker.py
Normal file
339
scripts/license_checker/license_checker.py
Normal file
@ -0,0 +1,339 @@
|
||||
import os
|
||||
import re
|
||||
import enum
|
||||
import json
|
||||
import codecs
|
||||
|
||||
CONFIG_PATH = 'config.json'
|
||||
|
||||
class ErrorType(enum.Enum):
|
||||
INVALID_LICENSE = 1
|
||||
NO_LICENSE = 2
|
||||
OUTDATED = 3
|
||||
LEN_MISMATCH = 4
|
||||
|
||||
FIX_TYPES = {
|
||||
'OUTDATED': ErrorType.OUTDATED,
|
||||
'NO_LICENSE': ErrorType.NO_LICENSE,
|
||||
'INVALID_LICENSE': ErrorType.INVALID_LICENSE,
|
||||
'LEN_MISMATCH': ErrorType.LEN_MISMATCH
|
||||
}
|
||||
|
||||
class Config(object):
|
||||
"""
|
||||
License checker configuration.
|
||||
Attributes:
|
||||
dir: Directory to check.
|
||||
fileExtensions: file extensions to check.
|
||||
ignoreListDir: Ignored folder paths.
|
||||
ignoreListDirName: Ignored folder names.
|
||||
ignoreListFile: Ignored file paths.
|
||||
allowListFile: allow file paths.
|
||||
"""
|
||||
def __init__(self,
|
||||
dir: str,
|
||||
fileExtensions: list[str],
|
||||
licensePath: str = 'header.license',
|
||||
allowListFile: list[str] = [],
|
||||
ignoreListDir: list[str] = [],
|
||||
ignoreListDirName: list[str] = [],
|
||||
ignoreListFile: list[str] = []) -> None:
|
||||
|
||||
self._dir = dir
|
||||
self._fileExtensions = fileExtensions
|
||||
self._allowListFile = allowListFile
|
||||
self._ignoreListDir = ignoreListDir
|
||||
self._ignoreListDirName = ignoreListDirName
|
||||
self._ignoreListFile = ignoreListFile
|
||||
"""Read license template."""
|
||||
with open(licensePath, 'r', encoding="utf8") as file:
|
||||
lines = file.readlines()
|
||||
if not lines:
|
||||
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
|
||||
non_empty_lines = [s for s in lines if not s.isspace()]
|
||||
self._startMultiComm = non_empty_lines[0]
|
||||
self._endMultiComm = non_empty_lines[-1]
|
||||
self._license_lines = lines
|
||||
|
||||
def getDir(self) -> str:
|
||||
return self._dir
|
||||
def getFileExtensions(self) -> list[str]:
|
||||
return self._fileExtensions
|
||||
def getStartMultiComm(self) -> str:
|
||||
return self._startMultiComm
|
||||
def getEndMultiComm(self) -> str:
|
||||
return self._endMultiComm
|
||||
def getLicense(self) -> list[str]:
|
||||
return self._license_lines
|
||||
def getAllowListFile(self) -> list[str]:
|
||||
return self._allowListFile
|
||||
def getIgnoreListDir(self) -> list[str]:
|
||||
return self._ignoreListDir
|
||||
def getIgnoreListDirName(self) -> list[str]:
|
||||
return self._ignoreListDirName
|
||||
def getIgnoreListFile(self) -> list[str]:
|
||||
return self._ignoreListFile
|
||||
|
||||
with open(CONFIG_PATH, 'r') as j:
|
||||
_json: dict = json.load(j)
|
||||
BASE_PATH: str = _json.get('basePath') or '../../../'
|
||||
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
|
||||
if (_json.get('fix')):
|
||||
try:
|
||||
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
|
||||
except KeyError:
|
||||
raise Exception(f'KeyError. "fix" cannot process value. It must be an array of strings. Check {CONFIG_PATH}. Possible array values: "OUTDATED", "NO_LICENSE", "INVALID_LICENSE", "LEN_MISMATCH"')
|
||||
else:
|
||||
FIX = False
|
||||
PRINT_CHECKING: bool = _json.get('printChecking')
|
||||
PRINT_REPORTS: bool = _json.get('printReports')
|
||||
CONFIGS: list[Config] = []
|
||||
for i in _json.get('configs'):
|
||||
CONFIGS.append(Config(**i))
|
||||
|
||||
os.chdir(BASE_PATH)
|
||||
|
||||
class Error(object):
|
||||
def __init__(self, errorType: ErrorType) -> None:
|
||||
self._errorType = errorType
|
||||
self._errorMessages = {
|
||||
ErrorType.INVALID_LICENSE: 'Detected license is invalid',
|
||||
ErrorType.NO_LICENSE: 'The license was not found',
|
||||
ErrorType.OUTDATED: 'Detected license is outdated',
|
||||
ErrorType.LEN_MISMATCH: 'Detected license length does not match pattern'
|
||||
}
|
||||
def getErrorType(self) -> ErrorType:
|
||||
return self._errorType
|
||||
def getErrorMessage(self) -> str:
|
||||
return self._errorMessages.get(self._errorType)
|
||||
|
||||
class Report(object):
|
||||
def __init__(self, pathToFile: str, error: Error, message:str = '') -> None:
|
||||
self._pathToFile = pathToFile
|
||||
self._error = error
|
||||
self._message = message
|
||||
def getPathToFile(self) -> str:
|
||||
return self._pathToFile
|
||||
def getError(self) -> Error:
|
||||
return self._error
|
||||
def getMessage(self) -> str:
|
||||
return self._message
|
||||
def report(self) -> str:
|
||||
return f'{self.getPathToFile()}: {self.getError().getErrorMessage()}. {self.getMessage()}.'
|
||||
|
||||
class Checker(object):
|
||||
def __init__(self, config: Config) -> None:
|
||||
self._config = config
|
||||
self._reports: list[Report] = []
|
||||
def getReports(self):
|
||||
return self._reports
|
||||
def _checkLine(self, line: str, prefix: str) -> bool:
|
||||
"""Checks if a line has a prefix."""
|
||||
"""Trim to catch invalid license without leading spaces"""
|
||||
prefix = prefix.lstrip()
|
||||
if (re.search(re.escape(prefix), line)):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
def findLicense(self, lines: list[str]) -> list[str]:
|
||||
"""Looks for consecutive comments in a list of strings."""
|
||||
result = []
|
||||
isStarted = False
|
||||
for line in lines:
|
||||
if line == '\n': continue
|
||||
if (self._checkLine(line=line, prefix=self._config.getStartMultiComm())):
|
||||
result.append(line)
|
||||
isStarted = True
|
||||
elif(self._checkLine(line=line, prefix=self._config.getEndMultiComm())):
|
||||
result.append(line)
|
||||
break
|
||||
elif (isStarted):
|
||||
result.append(line)
|
||||
else:
|
||||
break
|
||||
return result
|
||||
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
|
||||
license = self._config.getLicense()
|
||||
if len(license) != len(test):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.LEN_MISMATCH),
|
||||
message=f'Found {len(test)} lines, expected {len(license)}')
|
||||
invalidLinesCount = 0
|
||||
lastWrongLine = 0
|
||||
for i in range(len(license)):
|
||||
if (license[i] != test[i]):
|
||||
invalidLinesCount += 1
|
||||
lastWrongLine = i
|
||||
if (invalidLinesCount == 1):
|
||||
r = r'\d\d\d\d'
|
||||
testDate = re.findall(r, test[lastWrongLine])
|
||||
licenseDate = re.findall(r, license[lastWrongLine])
|
||||
|
||||
if not (testDate and licenseDate):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
message=f'Something wrong...')
|
||||
|
||||
testLastYear = int(testDate[-1])
|
||||
licenseLastYear = int(licenseDate[-1])
|
||||
if (testLastYear < licenseLastYear):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.OUTDATED),
|
||||
message=f'Found date {testLastYear}, expected {licenseLastYear}')
|
||||
else:
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
|
||||
elif (invalidLinesCount > 0):
|
||||
return Report(pathToFile=pathToFile,
|
||||
error=Error(errorType=ErrorType.INVALID_LICENSE),
|
||||
message=f'Found {invalidLinesCount} wrong lines out of {len(license)}')
|
||||
def checkFile(self, pathToFile: str) -> None:
|
||||
"""Checks a file for a valid license."""
|
||||
with open(pathToFile, 'r', encoding="utf-8-sig") as file:
|
||||
test = self.findLicense(lines=file.readlines())
|
||||
if test:
|
||||
result = self._checkLicense(test=test, pathToFile=pathToFile)
|
||||
if result:
|
||||
self._reports.append(result)
|
||||
else:
|
||||
self._reports.append(Report(pathToFile=pathToFile, error=Error(errorType=ErrorType.NO_LICENSE)))
|
||||
return
|
||||
|
||||
class Walker(object):
|
||||
def __init__(self, config: Config) -> None:
|
||||
self._config = config
|
||||
self._checker = Checker(config=self._config)
|
||||
def getChecker(self):
|
||||
return self._checker
|
||||
def getConfig(self):
|
||||
return self._config
|
||||
def _getFiles(self) -> list[str]:
|
||||
result = []
|
||||
for address, dirs, files in os.walk(self._config.getDir()):
|
||||
for i in files:
|
||||
if (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getAllowListFile()))):
|
||||
filename, file_extension = os.path.splitext(i)
|
||||
if file_extension in self._config.getFileExtensions():
|
||||
result.append(os.path.join(address, i))
|
||||
else:
|
||||
for i in self._config.getIgnoreListDirName():
|
||||
if(re.search(re.escape(i), address)):
|
||||
break
|
||||
else:
|
||||
for i in self._config.getIgnoreListDir():
|
||||
if(re.search(re.escape(os.path.normpath(i)), address)):
|
||||
break
|
||||
else:
|
||||
for i in files:
|
||||
if not (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getIgnoreListFile()))):
|
||||
filename, file_extension = os.path.splitext(i)
|
||||
if file_extension in self._config.getFileExtensions():
|
||||
result.append(os.path.join(address, i))
|
||||
return result
|
||||
def checkFiles(self) -> list[Report]:
|
||||
files = self._getFiles()
|
||||
for file in files:
|
||||
if (PRINT_CHECKING):
|
||||
print(f'Checking {file}...')
|
||||
# self._checker.checkFile(file)
|
||||
try:
|
||||
self._checker.checkFile(file)
|
||||
except Exception as e:
|
||||
print(file)
|
||||
print(e)
|
||||
return self._checker.getReports()
|
||||
|
||||
class Fixer(object):
|
||||
def __init__(self, walker: Walker) -> int:
|
||||
self._walker = walker
|
||||
self._checker = self._walker.getChecker()
|
||||
self._config = self._walker.getConfig()
|
||||
def fix(self):
|
||||
count = 0
|
||||
for report in self._checker.getReports():
|
||||
if ((not FIX and report.getError().getErrorType() == ErrorType.NO_LICENSE) or (report.getError().getErrorType() == ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
|
||||
self._addLicense(report.getPathToFile())
|
||||
count += 1
|
||||
elif ((not FIX and report.getError().getErrorType() != ErrorType.NO_LICENSE) or (report.getError().getErrorType() != ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
|
||||
self._fixLicense(report.getPathToFile())
|
||||
count += 1
|
||||
return count
|
||||
def _addLicense(self, pathToFile: str):
|
||||
buffer = []
|
||||
with open(pathToFile, 'r', encoding="utf8") as file:
|
||||
buffer = file.readlines()
|
||||
with open(pathToFile, 'w', encoding="utf8") as file:
|
||||
license = self._config.getLicense()
|
||||
file.writelines(license)
|
||||
file.write('\n')
|
||||
file.writelines(buffer)
|
||||
return
|
||||
def _fixLicense(self, pathToFile: str):
|
||||
buffer = []
|
||||
writeEncoding = "utf8"
|
||||
with open(pathToFile, 'r', encoding="utf8") as file:
|
||||
buffer = file.readlines()
|
||||
if buffer and buffer[0].startswith(codecs.decode(codecs.BOM_UTF8)):
|
||||
writeEncoding = "utf-8-sig"
|
||||
oldLicense = self._checker.findLicense(buffer)
|
||||
for i in oldLicense:
|
||||
buffer.remove(i)
|
||||
with open(pathToFile, 'w', encoding=writeEncoding) as file:
|
||||
license = self._config.getLicense()
|
||||
file.writelines(license)
|
||||
file.writelines(buffer)
|
||||
return
|
||||
|
||||
|
||||
walkers: list[Walker] = []
|
||||
reports: list[Report] = []
|
||||
|
||||
def fix(walkers):
|
||||
count = 0
|
||||
if FIX:
|
||||
print(f'Fixing selected files...')
|
||||
else:
|
||||
print(f'Fixing all {len(reports)} files...')
|
||||
for walker in walkers:
|
||||
fixer = Fixer(walker=walker)
|
||||
count += fixer.fix()
|
||||
print(f'Fixed {count} files.')
|
||||
|
||||
def writeReports(reports: list[Report]) -> None:
|
||||
files: dict[str, list[Report]] = dict()
|
||||
for i in ErrorType:
|
||||
files[i.name] = []
|
||||
for i in reports:
|
||||
files[i.getError().getErrorType().name].append(i)
|
||||
for i in ErrorType:
|
||||
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
|
||||
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
|
||||
|
||||
for config in CONFIGS:
|
||||
walkers.append(Walker(config=config))
|
||||
|
||||
print('Checking files...')
|
||||
|
||||
for walker in walkers:
|
||||
reports = reports + walker.checkFiles()
|
||||
|
||||
if reports:
|
||||
if not os.path.exists(REPORT_FOLDER):
|
||||
os.mkdir(REPORT_FOLDER)
|
||||
if PRINT_REPORTS:
|
||||
print('\n'.join(map(lambda report: report.report(), reports)))
|
||||
print(f'{len(reports)} invalid licenses were found.')
|
||||
print(f'Saving reports in {REPORT_FOLDER}')
|
||||
writeReports(reports=reports)
|
||||
if FIX:
|
||||
fix(walkers=walkers)
|
||||
# else:
|
||||
# choice = str(input(f'Fix it automatically? [Y/N] ')).lower()
|
||||
# if choice == 'y':
|
||||
# fix(walkers=walkers)
|
||||
else:
|
||||
print('All licenses are ok.')
|
||||
|
||||
# os.system('pause')
|
||||
|
||||
32
scripts/min.py
Normal file
32
scripts/min.py
Normal file
@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../build_tools/scripts')
|
||||
import base
|
||||
import os
|
||||
|
||||
args = sys.argv[1:]
|
||||
|
||||
if (1 > len(args)):
|
||||
print("Please use min.py PATH_TO_SCRIPT.js")
|
||||
exit(0)
|
||||
|
||||
script_path = args[0]
|
||||
script_path = os.path.abspath(script_path)
|
||||
script_dir = os.path.dirname(script_path)
|
||||
|
||||
script_name = os.path.splitext(os.path.basename(script_path))[0]
|
||||
script_path_min = os.path.join(script_dir, script_name + ".min.js")
|
||||
|
||||
#compilation_level = "WHITESPACE_ONLY"
|
||||
compilation_level = "SIMPLE_OPTIMIZATIONS"
|
||||
base.cmd("java", ["-jar", "../../sdkjs/build/node_modules/google-closure-compiler-java/compiler.jar",
|
||||
"--compilation_level", compilation_level,
|
||||
"--js_output_file", script_path_min,
|
||||
"--js", script_path])
|
||||
|
||||
dev_content = base.readFile(script_path)
|
||||
license = dev_content[0:dev_content.find("*/")+2]
|
||||
min_content = base.readFile(script_path_min)
|
||||
base.delete_file(script_path_min)
|
||||
base.writeFile(script_path_min, license + "\n\n" + min_content)
|
||||
@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_desktop
|
||||
import package_server
|
||||
import package_builder
|
||||
|
||||
def make(product):
|
||||
if product == 'desktop': package_desktop.make()
|
||||
elif product == 'server': package_server.make()
|
||||
elif product == 'builder': package_builder.make()
|
||||
else: exit(1)
|
||||
return
|
||||
@ -1,44 +1,71 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from package_utils import *
|
||||
import package_utils as utils
|
||||
|
||||
onlyoffice = True
|
||||
company_name = 'ONLYOFFICE'
|
||||
company_name = "ONLYOFFICE"
|
||||
company_name_l = company_name.lower()
|
||||
publisher_name = 'Ascensio System SIA'
|
||||
cert_name = 'Ascensio System SIA'
|
||||
publisher_name = "Ascensio System SIA"
|
||||
cert_name = "Ascensio System SIA"
|
||||
|
||||
if product == 'desktop':
|
||||
s3_bucket = "repo-doc-onlyoffice-com"
|
||||
s3_region = "eu-west-1"
|
||||
s3_base_url = "https://s3.eu-west-1.amazonaws.com/repo-doc-onlyoffice-com"
|
||||
|
||||
if system == 'windows':
|
||||
build_dir = get_path("desktop-apps/win-linux/package/windows")
|
||||
# branding_dir = get_path(branding, build_dir)
|
||||
product_name = 'Desktop Editors'
|
||||
product_name_s = product_name.replace(' ','')
|
||||
package_name = company_name + '_' + product_name_s
|
||||
vcredist_list = ['2022', '2013']
|
||||
update_changes_list = {
|
||||
'en': "changes",
|
||||
'ru': "changes_ru"
|
||||
if utils.is_windows():
|
||||
desktop_product_name = "Desktop Editors"
|
||||
desktop_product_name_s = desktop_product_name.replace(" ","")
|
||||
desktop_package_name = company_name + "-" + desktop_product_name_s
|
||||
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
|
||||
|
||||
if utils.is_macos():
|
||||
desktop_package_name = "ONLYOFFICE"
|
||||
desktop_build_dir = "desktop-apps/macos"
|
||||
desktop_branding_dir = "desktop-apps/macos"
|
||||
desktop_updates_dir = "build/update"
|
||||
desktop_changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
|
||||
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
|
||||
|
||||
builder_product_name = "Document Builder"
|
||||
|
||||
if utils.is_linux():
|
||||
desktop_make_targets = [
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "tar/*.tar*",
|
||||
"dst": "desktop/linux/generic/"
|
||||
},
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "desktop/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "rpm-suse",
|
||||
"src": "rpm-suse/build/RPMS/*/*.rpm",
|
||||
"dst": "desktop/linux/suse/"
|
||||
}
|
||||
|
||||
elif system == 'darwin':
|
||||
build_dir = "desktop-apps/macos"
|
||||
branding_build_dir = "desktop-apps/macos"
|
||||
package_name = company_name
|
||||
updates_dir = "build/update"
|
||||
changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
|
||||
update_changes_list = {
|
||||
'en': "ReleaseNotes",
|
||||
'ru': "ReleaseNotesRU"
|
||||
]
|
||||
server_make_targets = [
|
||||
{
|
||||
"make": "deb",
|
||||
"src": "deb/*.deb",
|
||||
"dst": "server/linux/debian/"
|
||||
},
|
||||
{
|
||||
"make": "rpm",
|
||||
"src": "rpm/builddir/RPMS/*/*.rpm",
|
||||
"dst": "server/linux/rhel/"
|
||||
},
|
||||
{
|
||||
"make": "tar",
|
||||
"src": "*.tar*",
|
||||
"dst": "server/linux/snap/"
|
||||
}
|
||||
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
|
||||
|
||||
if product == 'builder':
|
||||
|
||||
if system == 'windows':
|
||||
build_dir = "document-builder-package"
|
||||
product_name = 'Document Builder'
|
||||
product_name_s = product_name.replace(' ','')
|
||||
package_name = company_name + '_' + product_name_s
|
||||
]
|
||||
|
||||
@ -1,101 +1,235 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from package_utils import *
|
||||
from package_branding import *
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
if system == 'windows':
|
||||
utils.log_h1("BUILDER")
|
||||
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_archive()
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif system == 'linux':
|
||||
if 'packages' in targets:
|
||||
set_cwd(build_dir)
|
||||
log("Clean")
|
||||
cmd("make", ["clean"])
|
||||
log("Build packages")
|
||||
cmd("make", ["packages"])
|
||||
else:
|
||||
exit(1)
|
||||
elif utils.is_macos():
|
||||
make_macos_linux()
|
||||
elif utils.is_linux():
|
||||
make_macos_linux()
|
||||
return
|
||||
|
||||
#
|
||||
# Windows
|
||||
#
|
||||
def s3_upload(files, dst):
|
||||
if not files: return False
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
utils.log_h2("builder archive build")
|
||||
utils.delete_file("builder.7z")
|
||||
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("builder archive build", ret)
|
||||
|
||||
utils.log_h2("builder archive deploy")
|
||||
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("builder archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_windows():
|
||||
global package_version, sign, machine, arch, source_dir, base_dir, \
|
||||
innosetup_file, portable_zip_file, isxdl_file
|
||||
base_dir = "base"
|
||||
isxdl_file = "exe/scripts/isxdl/isxdl.dll"
|
||||
global package_version, arch
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
set_cwd(get_abspath(git_dir, build_dir))
|
||||
package_version = common.version + "." + common.build
|
||||
arch = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x86": "x86"
|
||||
}[common.platform]
|
||||
|
||||
if 'clean' in targets:
|
||||
log("\n=== Clean\n")
|
||||
delete_dir(base_dir)
|
||||
delete_files(isxdl_file)
|
||||
delete_files("exe/*.exe")
|
||||
delete_files("zip/*.zip")
|
||||
if common.clean:
|
||||
utils.log_h2("builder clean")
|
||||
utils.delete_dir("build")
|
||||
utils.delete_dir("zip")
|
||||
|
||||
package_version = version + '.' + build
|
||||
sign = 'sign' in targets
|
||||
if make_prepare():
|
||||
make_zip()
|
||||
make_wheel()
|
||||
else:
|
||||
utils.set_summary("builder zip build", False)
|
||||
utils.set_summary("builder python wheel build", False)
|
||||
|
||||
for target in targets:
|
||||
if not (target.startswith('innosetup') or target.startswith('portable')):
|
||||
continue
|
||||
|
||||
machine = get_platform(target)['machine']
|
||||
arch = get_platform(target)['arch']
|
||||
suffix = arch
|
||||
source_prefix = "win_" + machine
|
||||
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
|
||||
|
||||
log("\n=== Copy arifacts\n")
|
||||
create_dir(base_dir)
|
||||
copy_dir_content(source_dir, base_dir + '\\')
|
||||
|
||||
if target.startswith('innosetup'):
|
||||
download_isxdl()
|
||||
innosetup_file = "exe/%s_%s_%s.exe" % (package_name, package_version, suffix)
|
||||
make_innosetup()
|
||||
|
||||
if target.startswith('portable'):
|
||||
portable_zip_file = "zip/%s_%s_%s.zip" % (package_name, package_version, suffix)
|
||||
make_win_portable()
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def download_isxdl():
|
||||
log("\n=== Download isxdl\n")
|
||||
log("--- " + isxdl_file)
|
||||
if is_file(isxdl_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
create_dir(get_dirname(isxdl_file))
|
||||
download_file(isxdl_link, isxdl_file)
|
||||
def make_prepare():
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("builder prepare")
|
||||
ret = utils.ps1("make.ps1", args, verbose=True)
|
||||
utils.set_summary("builder prepare", ret)
|
||||
return ret
|
||||
|
||||
def make_zip():
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch
|
||||
]
|
||||
# if common.sign:
|
||||
# args += ["-Sign"]
|
||||
|
||||
utils.log_h2("builder zip build")
|
||||
ret = utils.ps1("make_zip.ps1", args, verbose=True)
|
||||
utils.set_summary("builder zip build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder zip deploy")
|
||||
ret = s3_upload(utils.glob_path("zip/*.zip"), "builder/win/generic/")
|
||||
utils.set_summary("builder zip deploy", ret)
|
||||
return
|
||||
|
||||
def make_innosetup():
|
||||
log("\n=== Build innosetup project\n")
|
||||
iscc_args = ["/DVERSION=" + package_version]
|
||||
if not onlyoffice:
|
||||
iscc_args.append("/DBRANDING_DIR=" + get_abspath(git_dir, branding, build_dir, "exe"))
|
||||
if sign:
|
||||
iscc_args.append("/DSIGN")
|
||||
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
|
||||
log("--- " + innosetup_file)
|
||||
if is_file(innosetup_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
set_cwd("exe")
|
||||
cmd("iscc", iscc_args + ["builder.iss"])
|
||||
set_cwd("..")
|
||||
def make_macos_linux():
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
make_tar()
|
||||
make_wheel()
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_win_portable():
|
||||
log("\n=== Build portable\n")
|
||||
log("--- " + portable_zip_file)
|
||||
if is_file(portable_zip_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
cmd("7z", ["a", "-y", portable_zip_file, get_path(base_dir, "*")])
|
||||
def make_tar():
|
||||
utils.log_h2("builder tar build")
|
||||
make_args = ["tar"]
|
||||
if common.platform == "darwin_arm64":
|
||||
make_args += ["-e", "UNAME_M=arm64"]
|
||||
if common.platform == "darwin_x86_64":
|
||||
make_args += ["-e", "UNAME_M=x86_64"]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("builder tar build", ret)
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("builder tar deploy")
|
||||
if utils.is_macos():
|
||||
s3_dest = "builder/mac/generic/"
|
||||
elif utils.is_linux():
|
||||
s3_dest = "builder/linux/generic/"
|
||||
ret = s3_upload(utils.glob_path("tar/*.tar.xz"), s3_dest)
|
||||
utils.set_summary("builder tar deploy", ret)
|
||||
return
|
||||
|
||||
def make_wheel():
|
||||
platform_tags = {
|
||||
"windows_x64": "win_amd64",
|
||||
"windows_x86": "win32",
|
||||
"darwin_arm64": "macosx_11_0_arm64",
|
||||
"darwin_x86_64": "macosx_10_9_x86_64",
|
||||
"linux_x86_64": "manylinux_2_23_x86_64",
|
||||
"linux_aarch64": "manylinux_2_23_aarch64"
|
||||
}
|
||||
|
||||
if not common.platform in platform_tags: return
|
||||
|
||||
utils.log_h2("builder python wheel build")
|
||||
|
||||
builder_dir = "build"
|
||||
if utils.is_linux():
|
||||
builder_dir = "build/opt/onlyoffice/documentbuilder"
|
||||
|
||||
utils.delete_dir("python")
|
||||
utils.copy_dir("../onlyoffice/build_tools/packaging/docbuilder/resources", "python")
|
||||
utils.copy_dir(builder_dir, "python/docbuilder/lib", True, True)
|
||||
|
||||
desktop_dir = "../desktop-apps/macos/build/ONLYOFFICE.app/Contents/Resources/converter"
|
||||
if utils.is_macos() and "desktop" in common.targets and utils.is_exist(desktop_dir):
|
||||
for f in utils.glob_path(desktop_dir + "/*.dylib") + [desktop_dir + "/x2t"]:
|
||||
utils.copy_file(f, builder_dir + "/" + utils.get_basename(f))
|
||||
|
||||
old_cwd = utils.get_cwd()
|
||||
utils.set_cwd("python/docbuilder")
|
||||
|
||||
if not utils.is_file("docbuilder.py"):
|
||||
utils.copy_file("lib/docbuilder.py", "docbuilder.py")
|
||||
# fix docbuilder.py
|
||||
content = ""
|
||||
with open("docbuilder.py", "r") as file:
|
||||
content = file.read()
|
||||
old_line = "builder_path = os.path.dirname(os.path.realpath(__file__))"
|
||||
new_line = "builder_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"lib\")"
|
||||
content = content.replace(old_line, new_line)
|
||||
with open("docbuilder.py", "w") as file:
|
||||
file.write(content)
|
||||
|
||||
# remove unnecessary files
|
||||
utils.set_cwd("lib")
|
||||
utils.delete_dir("include")
|
||||
utils.delete_file("build.date")
|
||||
utils.delete_file("docbuilder.jar")
|
||||
utils.delete_file("docbuilder.py")
|
||||
if utils.is_windows():
|
||||
utils.delete_file("doctrenderer.lib")
|
||||
utils.delete_file("docbuilder.com.dll")
|
||||
utils.delete_file("docbuilder.net.dll")
|
||||
utils.delete_file("docbuilder.jni.dll")
|
||||
elif utils.is_macos():
|
||||
if (utils.is_file("libdocbuilder.jni.dylib")):
|
||||
utils.delete_file("libdocbuilder.jni.dylib")
|
||||
if (utils.is_dir("docbuilder.jni.framework")):
|
||||
utils.delete_file("docbuilder.jni.framework")
|
||||
utils.remove_all_symlinks(".")
|
||||
elif utils.is_linux():
|
||||
utils.delete_file("libdocbuilder.jni.so")
|
||||
|
||||
utils.set_env("DOCBUILDER_VERSION", common.version + "." + common.build)
|
||||
platform = "linux_64"
|
||||
utils.set_cwd("../..")
|
||||
plat_name = platform_tags[common.platform]
|
||||
ret = utils.sh("python setup.py bdist_wheel --plat-name " + plat_name + " --python-tag py2.py3", verbose=True)
|
||||
utils.set_summary("builder python wheel build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder python wheel deploy")
|
||||
if utils.is_windows():
|
||||
s3_dest = "builder/win/python/"
|
||||
elif utils.is_macos():
|
||||
s3_dest = "builder/mac/python/"
|
||||
elif utils.is_linux():
|
||||
s3_dest = "builder/linux/python/"
|
||||
ret = s3_upload(utils.glob_path("dist/*.whl"), s3_dest)
|
||||
utils.set_summary("builder python wheel deploy", ret)
|
||||
|
||||
utils.set_cwd(old_cwd)
|
||||
|
||||
return
|
||||
|
||||
17
scripts/package_common.py
Normal file
17
scripts/package_common.py
Normal file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
platformPrefixes = {
|
||||
"windows_x64": "win_64",
|
||||
"windows_x86": "win_32",
|
||||
"windows_arm64": "win_arm64",
|
||||
"windows_x64_xp": "win_64_xp",
|
||||
"windows_x86_xp": "win_32_xp",
|
||||
"darwin_arm64": "mac_arm64",
|
||||
"darwin_x86_64": "mac_64",
|
||||
"darwin_x86_64_v8": "mac_64",
|
||||
"linux_x86_64": "linux_64",
|
||||
"linux_aarch64": "linux_arm64",
|
||||
}
|
||||
|
||||
out_dir = "build_tools/out"
|
||||
tsa_server = "http://timestamp.digicert.com"
|
||||
93
scripts/package_core.py
Normal file
93
scripts/package_core.py
Normal file
@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("CORE")
|
||||
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_archive()
|
||||
return
|
||||
|
||||
def make_archive():
|
||||
utils.set_cwd(utils.get_path(
|
||||
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
|
||||
|
||||
utils.log_h2("core archive build")
|
||||
utils.delete_file("core.7z")
|
||||
args = ["7z", "a", "-y", "core.7z", "./core/*"]
|
||||
if utils.is_windows():
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("core archive build", ret)
|
||||
|
||||
utils.log_h2("core archive deploy")
|
||||
dest = "core-" + common.prefix.replace("_","-") + ".7z"
|
||||
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
|
||||
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
|
||||
ret = utils.s3_upload(
|
||||
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
|
||||
utils.set_summary("core archive deploy", ret)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
|
||||
utils.s3_copy(
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest)
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def deploy_closuremaps_sdkjs(license):
|
||||
if not common.deploy: return
|
||||
utils.log_h1("SDKJS CLOSURE MAPS")
|
||||
|
||||
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
|
||||
if maps:
|
||||
for m in maps: utils.log("- " + m)
|
||||
else:
|
||||
utils.log_err("files do not exist")
|
||||
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
|
||||
return
|
||||
|
||||
utils.log_h2("sdkjs closure maps %s deploy" % license)
|
||||
ret = True
|
||||
for f in maps:
|
||||
base = utils.get_basename(f)
|
||||
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
ret &= upload
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
|
||||
return
|
||||
|
||||
def deploy_closuremaps_webapps(license):
|
||||
if not common.deploy: return
|
||||
utils.log_h1("WEB-APPS CLOSURE MAPS")
|
||||
|
||||
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
|
||||
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
|
||||
if maps:
|
||||
for m in maps: utils.log("- " + m)
|
||||
else:
|
||||
utils.log_err("files do not exist")
|
||||
utils.set_summary("web-apps closure maps %s deploy" % license, False)
|
||||
return
|
||||
|
||||
utils.log_h2("web-apps closure maps %s deploy" % license)
|
||||
ret = True
|
||||
for f in maps:
|
||||
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
|
||||
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
ret &= upload
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
|
||||
return
|
||||
@ -2,272 +2,171 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
from package_utils import *
|
||||
from package_branding import *
|
||||
import re
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
if system == 'windows':
|
||||
utils.log_h1("DESKTOP")
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif system == 'darwin':
|
||||
elif utils.is_macos():
|
||||
make_macos()
|
||||
elif system == 'linux':
|
||||
if 'packages' in targets:
|
||||
set_cwd(build_dir)
|
||||
log("Clean")
|
||||
cmd("make", ["clean"])
|
||||
log("Build packages")
|
||||
cmd("make", ["packages"])
|
||||
elif utils.is_linux():
|
||||
make_linux()
|
||||
else:
|
||||
exit(1)
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def s3_upload(files, dst):
|
||||
if not files: return False
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
|
||||
#
|
||||
# Windows
|
||||
#
|
||||
|
||||
def make_windows():
|
||||
global package_version, sign, machine, arch, xp, iscc_args, source_dir, \
|
||||
innosetup_file, innosetup_update_file, advinst_file, portable_zip_file
|
||||
global package_name, package_version, arch, xp
|
||||
utils.set_cwd("desktop-apps\\package")
|
||||
|
||||
set_cwd(get_abspath(git_dir, build_dir))
|
||||
package_name = branding.desktop_package_name
|
||||
package_version = common.version + "." + common.build
|
||||
arch = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x64_xp": "x64",
|
||||
"windows_x86": "x86",
|
||||
"windows_x86_xp": "x86",
|
||||
"windows_arm64": "arm64"
|
||||
}[common.platform]
|
||||
xp = common.platform.endswith("_xp")
|
||||
|
||||
if 'clean' in targets:
|
||||
log("\n=== Clean\n")
|
||||
delete_dir(get_path("data/vcredist"))
|
||||
delete_dir("DesktopEditors-cache")
|
||||
delete_files("*.exe")
|
||||
delete_files("*.msi")
|
||||
delete_files("*.aic")
|
||||
delete_files("*.tmp")
|
||||
delete_files("*.zip")
|
||||
delete_files(get_path("update/*.exe"))
|
||||
delete_files(get_path("update/*.xml"))
|
||||
delete_files(get_path("update/*.html"))
|
||||
if common.clean:
|
||||
utils.log_h2("desktop clean")
|
||||
utils.delete_dir("build")
|
||||
utils.delete_files("inno\\package.config")
|
||||
utils.delete_files("inno\\*.exe")
|
||||
utils.delete_dir("advinst\\DesktopEditors-cache")
|
||||
utils.delete_files("advinst\\package.config")
|
||||
utils.delete_files("advinst\\*.msi")
|
||||
utils.delete_files("advinst\\*.aic")
|
||||
utils.delete_dir("zip")
|
||||
|
||||
package_version = version + '.' + build
|
||||
sign = 'sign' in targets
|
||||
|
||||
for target in targets:
|
||||
if not (target.startswith('innosetup') or target.startswith('advinst') or
|
||||
target.startswith('portable')):
|
||||
continue
|
||||
|
||||
machine = get_platform(target)['machine']
|
||||
arch = get_platform(target)['arch']
|
||||
xp = get_platform(target)['xp']
|
||||
suffix = arch + ("_xp" if xp else "")
|
||||
source_prefix = "win_" + machine + ("_xp" if xp else "")
|
||||
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
|
||||
|
||||
if target.startswith('innosetup'):
|
||||
for year in vcredist_list:
|
||||
download_vcredist(year)
|
||||
|
||||
innosetup_file = "%s_%s_%s.exe" % (package_name, package_version, suffix)
|
||||
make_innosetup()
|
||||
|
||||
if 'winsparkle-update' in targets:
|
||||
innosetup_update_file = get_path("update/editors_update_%s.exe" % suffix)
|
||||
make_innosetup_update()
|
||||
|
||||
if 'winsparkle-files' in targets:
|
||||
make_winsparkle_files()
|
||||
|
||||
if target.startswith('advinst'):
|
||||
advinst_file = "%s_%s_%s.msi" % (package_name, package_version, suffix)
|
||||
if not xp:
|
||||
make_prepare()
|
||||
make_zip()
|
||||
if branding.onlyoffice:
|
||||
make_inno()
|
||||
make_inno("standalone")
|
||||
make_advinst()
|
||||
|
||||
if target.startswith('portable'):
|
||||
portable_zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix)
|
||||
make_win_portable()
|
||||
|
||||
return
|
||||
|
||||
def download_vcredist(year):
|
||||
log("\n=== Download vcredist " + year + "\n")
|
||||
vcredist = get_path("data/vcredist/vcredist_%s_%s.exe" % (year, arch))
|
||||
log("--- " + vcredist)
|
||||
if is_file(vcredist):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
create_dir(get_dirname(vcredist))
|
||||
download_file(vcredist_links[year][machine], vcredist)
|
||||
return
|
||||
|
||||
def make_innosetup():
|
||||
log("\n=== Build innosetup project\n")
|
||||
global iscc_args
|
||||
iscc_args = [
|
||||
"/Qp",
|
||||
"/DsAppVersion=" + package_version,
|
||||
"/DDEPLOY_PATH=" + source_dir,
|
||||
"/D_ARCH=" + machine
|
||||
]
|
||||
if onlyoffice:
|
||||
iscc_args.append("/D_ONLYOFFICE=1")
|
||||
make_prepare("commercial")
|
||||
make_zip("commercial")
|
||||
make_inno("commercial")
|
||||
make_advinst("commercial")
|
||||
else:
|
||||
iscc_args.append("/DsBrandingFolder=" + get_abspath(git_dir, branding_dir))
|
||||
make_prepare("xp")
|
||||
make_zip("xp")
|
||||
make_inno("xp")
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_prepare(edition = "opensource"):
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch,
|
||||
"-Target", edition,
|
||||
"-CompanyName", branding.company_name
|
||||
]
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop prepare " + edition)
|
||||
ret = utils.ps1("make.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop prepare " + edition, ret)
|
||||
return
|
||||
|
||||
def make_zip(edition = "opensource"):
|
||||
if edition == "commercial": zip_file = "%s-Enterprise-%s-%s.zip"
|
||||
elif edition == "xp": zip_file = "%s-XP-%s-%s.zip"
|
||||
else: zip_file = "%s-%s-%s.zip"
|
||||
zip_file = "zip\\" + zip_file % (package_name, package_version, arch)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch,
|
||||
"-Target", edition,
|
||||
"-CompanyName", branding.company_name
|
||||
]
|
||||
# if common.sign:
|
||||
# args += ["-Sign"]
|
||||
|
||||
utils.log_h2("desktop zip " + edition + " build")
|
||||
ret = utils.ps1("make_zip.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop zip " + edition + " build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop zip " + edition + " deploy")
|
||||
ret = s3_upload([zip_file], "desktop/win/generic/")
|
||||
utils.set_summary("desktop zip " + edition + " deploy", ret)
|
||||
return
|
||||
|
||||
def make_inno(edition = "opensource"):
|
||||
if edition == "commercial": inno_file = "%s-Enterprise-%s-%s.exe"
|
||||
elif edition == "standalone": inno_file = "%s-Standalone-%s-%s.exe"
|
||||
elif edition == "update": inno_file = "%s-Update-%s-%s.exe"
|
||||
elif edition == "xp": inno_file = "%s-XP-%s-%s.exe"
|
||||
else: inno_file = "%s-%s-%s.exe"
|
||||
inno_file = "inno\\" + inno_file % (package_name, package_version, arch)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch,
|
||||
"-Target", edition
|
||||
]
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
if xp:
|
||||
iscc_args.append("/D_WIN_XP=1")
|
||||
if sign:
|
||||
iscc_args.append("/DENABLE_SIGNING=1")
|
||||
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
|
||||
log("--- " + innosetup_file)
|
||||
if is_file(innosetup_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
cmd("iscc", iscc_args + ["common.iss"])
|
||||
args += ["-TimestampServer", "http://timestamp.comodoca.com/authenticode"]
|
||||
|
||||
utils.log_h2("desktop inno " + edition + " build")
|
||||
ret = utils.ps1("make_inno.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop inno " + edition + " build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop inno " + edition + " deploy")
|
||||
ret = s3_upload([inno_file], "desktop/win/inno/")
|
||||
utils.set_summary("desktop inno " + edition + " deploy", ret)
|
||||
return
|
||||
|
||||
def make_innosetup_update():
|
||||
log("\n=== Build innosetup update project\n")
|
||||
log("--- " + innosetup_update_file)
|
||||
if is_file(innosetup_update_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
cmd("iscc", iscc_args + ["/DTARGET_NAME=" + innosetup_file, "update_common.iss"])
|
||||
return
|
||||
|
||||
def make_winsparkle_files():
|
||||
log("\n=== Build winsparkle files\n")
|
||||
|
||||
awk_branding = "update/branding.awk"
|
||||
if not onlyoffice:
|
||||
build_branding_dir = get_abspath(git_dir, branding_dir, "win-linux/package/windows")
|
||||
else:
|
||||
build_branding_dir = get_path(".")
|
||||
awk_args = [
|
||||
"-v", "Version=" + version,
|
||||
"-v", "Build=" + build,
|
||||
"-v", "Branch=" + get_env("RELEASE_BRANCH"),
|
||||
"-v", "Timestamp=" + timestamp,
|
||||
"-i", get_path(build_branding_dir, awk_branding)
|
||||
def make_advinst(edition = "opensource"):
|
||||
if edition == "commercial": advinst_file = "%s-Enterprise-%s-%s.msi"
|
||||
else: advinst_file = "%s-%s-%s.msi"
|
||||
advinst_file = "advinst\\" + advinst_file % (package_name, package_version, arch)
|
||||
args = [
|
||||
"-Version", package_version,
|
||||
"-Arch", arch,
|
||||
"-Target", edition
|
||||
]
|
||||
if common.sign:
|
||||
args += ["-Sign"]
|
||||
|
||||
appcast = get_path("update/appcast.xml")
|
||||
log("--- " + appcast)
|
||||
if is_file(appcast):
|
||||
log("! file exist, skip")
|
||||
else:
|
||||
command = "env LANG=en_US.UTF-8 awk " + \
|
||||
' '.join(awk_args) + " -f update/appcast.xml.awk"
|
||||
appcast_result = proc_open(command)
|
||||
if appcast_result['stderr'] != "":
|
||||
log("! error: " + appcast_result['stderr'])
|
||||
write_file(appcast, appcast_result['stdout'])
|
||||
utils.log_h2("desktop advinst " + edition + " build")
|
||||
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
|
||||
utils.set_summary("desktop advinst " + edition + " build", ret)
|
||||
|
||||
appcast_prod = get_path("update/appcast-prod.xml")
|
||||
log("--- " + appcast_prod)
|
||||
if is_file(appcast_prod):
|
||||
log("! file exist, skip")
|
||||
else:
|
||||
command = "env LANG=en_US.UTF-8 awk -v Prod=1 " + \
|
||||
' '.join(awk_args) + " -f update/appcast.xml.awk"
|
||||
appcast_result = proc_open(command)
|
||||
if appcast_result['stderr'] != "":
|
||||
log("! error: " + appcast_result['stderr'])
|
||||
write_file(appcast_prod, appcast_result['stdout'])
|
||||
|
||||
changes_dir = get_path(build_branding_dir, "update/changes", version)
|
||||
for lang, base in update_changes_list.items():
|
||||
changes = get_path("update/" + base + ".html")
|
||||
if lang == 'en': encoding = 'en_US.UTF-8'
|
||||
elif lang == 'ru': encoding = 'ru_RU.UTF-8'
|
||||
log("--- " + changes)
|
||||
if is_file(changes):
|
||||
log("! file exist, skip")
|
||||
else:
|
||||
command = "env LANG=" + encoding + " awk " + ' '.join(awk_args) + \
|
||||
" -f update\\changes.html.awk " + changes_dir + "\\" + lang + ".html"
|
||||
changes_result = proc_open(command)
|
||||
if changes_result['stderr'] != "":
|
||||
log("! error: " + changes_result['stderr'])
|
||||
write_file(changes, changes_result['stdout'])
|
||||
return
|
||||
|
||||
def make_advinst():
|
||||
log("\n=== Build advanced installer project\n")
|
||||
log("--- " + advinst_file)
|
||||
if is_file(advinst_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
if not onlyoffice:
|
||||
branding_path = get_abspath(git_dir, branding_dir)
|
||||
copy_dir_content(
|
||||
branding_path + "\\win-linux\\package\\windows\\data", "data", ".bmp")
|
||||
copy_dir_content(
|
||||
branding_path + "\\win-linux\\package\\windows\\data", "data", ".png")
|
||||
copy_dir_content(
|
||||
branding_path + "\\win-linux\\extras\\projicons\\res",
|
||||
"..\\..\\extras\\projicons\\res", ".ico")
|
||||
copy_file(
|
||||
branding_path + "\\win-linux\\package\\windows\\dictionary.ail",
|
||||
"dictionary.ail")
|
||||
copy_file(
|
||||
branding_path + "\\common\\package\\license\\eula_" + branding + ".rtf",
|
||||
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
|
||||
copy_file(
|
||||
branding_path + "\\..\\multimedia\\videoplayer\\icons\\" + branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\media.ico")
|
||||
copy_file(
|
||||
branding_path + "\\..\\multimedia\\imageviewer\\icons\\ico\\" + branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\gallery.ico")
|
||||
aic_content = [";aic"]
|
||||
if not sign:
|
||||
aic_content += [
|
||||
"ResetSig"
|
||||
]
|
||||
if machine == '32':
|
||||
aic_content += [
|
||||
"SetPackageType x86",
|
||||
"SetAppdir -buildname DefaultBuild -path [ProgramFilesFolder][MANUFACTURER_INSTALL_FOLDER]\\[PRODUCT_INSTALL_FOLDER]",
|
||||
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x64)"',
|
||||
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x64)"'
|
||||
]
|
||||
if machine == '64':
|
||||
aic_content += [
|
||||
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x86)"',
|
||||
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x86)"'
|
||||
]
|
||||
if onlyoffice:
|
||||
aic_content += [
|
||||
"DelFolder CUSTOM_PATH"
|
||||
]
|
||||
else:
|
||||
aic_content += [
|
||||
"DelLanguage 1029 -buildname DefaultBuild",
|
||||
"DelLanguage 1031 -buildname DefaultBuild",
|
||||
"DelLanguage 1041 -buildname DefaultBuild",
|
||||
"DelLanguage 1046 -buildname DefaultBuild",
|
||||
"DelLanguage 2070 -buildname DefaultBuild",
|
||||
"DelLanguage 1060 -buildname DefaultBuild",
|
||||
"DelLanguage 1036 -buildname DefaultBuild",
|
||||
"DelLanguage 3082 -buildname DefaultBuild",
|
||||
"DelLanguage 1033 -buildname DefaultBuild",
|
||||
"NewSync CUSTOM_PATH " + source_dir + "\\..\\MediaViewer",
|
||||
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + source_dir + "\\..\\MediaViewer\\ImageViewer.exe",
|
||||
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + source_dir + "\\..\\MediaViewer\\VideoPlayer.exe"
|
||||
]
|
||||
aic_content += [
|
||||
"AddOsLc -buildname DefaultBuild -arch " + arch,
|
||||
"NewSync APPDIR " + source_dir,
|
||||
"UpdateFile APPDIR\\DesktopEditors.exe " + source_dir + "\\DesktopEditors.exe",
|
||||
"SetVersion " + package_version,
|
||||
"SetPackageName " + advinst_file + " -buildname DefaultBuild",
|
||||
"Rebuild -buildslist DefaultBuild"
|
||||
]
|
||||
write_file("DesktopEditors.aic", "\r\n".join(aic_content), 'utf-8-sig')
|
||||
cmd("AdvancedInstaller.com",
|
||||
["/execute", "DesktopEditors.aip", "DesktopEditors.aic"])
|
||||
return
|
||||
|
||||
def make_win_portable():
|
||||
log("\n=== Build portable\n")
|
||||
log("--- " + portable_zip_file)
|
||||
if is_file(portable_zip_file):
|
||||
log("! file exist, skip")
|
||||
return
|
||||
cmd("7z", ["a", "-y", portable_zip_file, get_path(source_dir, "*")])
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop advinst " + edition + " deploy")
|
||||
ret = s3_upload([advinst_file], "desktop/win/advinst/")
|
||||
utils.set_summary("desktop advinst " + edition + " deploy", ret)
|
||||
return
|
||||
|
||||
#
|
||||
@ -275,88 +174,171 @@ def make_win_portable():
|
||||
#
|
||||
|
||||
def make_macos():
|
||||
global suffix, lane, scheme
|
||||
global package_name, build_dir, branding_dir, updates_dir, changes_dir, \
|
||||
suffix, lane, scheme, source_dir, released_updates_dir
|
||||
package_name = branding.desktop_package_name
|
||||
build_dir = branding.desktop_build_dir
|
||||
branding_dir = branding.desktop_branding_dir
|
||||
updates_dir = branding.desktop_updates_dir
|
||||
changes_dir = branding.desktop_changes_dir
|
||||
suffix = {
|
||||
"darwin_x86_64": "x86_64",
|
||||
"darwin_x86_64_v8": "v8",
|
||||
"darwin_arm64": "arm"
|
||||
}[common.platform]
|
||||
lane = "release_" + suffix
|
||||
scheme = package_name + "-" + suffix
|
||||
sparkle_updates = False
|
||||
|
||||
set_cwd(git_dir + "/" + branding_build_dir)
|
||||
utils.set_cwd(branding_dir)
|
||||
|
||||
for target in targets:
|
||||
if not target.startswith('diskimage'):
|
||||
continue
|
||||
if common.clean:
|
||||
utils.log_h2("clean")
|
||||
utils.delete_dir(utils.get_env("HOME") + "/Library/Developer/Xcode/Archives")
|
||||
utils.delete_dir(utils.get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
|
||||
|
||||
if target.startswith('diskimage'):
|
||||
if (target == 'diskimage-x86_64'): suffix = 'x86_64'
|
||||
elif (target == 'diskimage-x86_64-v8'): suffix = 'v8'
|
||||
elif (target == 'diskimage-arm64'): suffix = 'arm'
|
||||
else: exit(1)
|
||||
lane = "release_" + suffix
|
||||
scheme = package_name + '-' + suffix
|
||||
utils.log_h2("build")
|
||||
source_dir = "%s/build_tools/out/%s/%s" \
|
||||
% (common.workspace_dir, common.prefix, branding.company_name)
|
||||
if branding.onlyoffice:
|
||||
for path in utils.glob_path(source_dir \
|
||||
+ "/desktopeditors/editors/web-apps/apps/*/main/resources/help"):
|
||||
utils.delete_dir(path)
|
||||
|
||||
make_diskimage(target)
|
||||
if utils.get_env("ARCHIVES_DIR"):
|
||||
sparkle_updates = True
|
||||
released_updates_dir = "%s/%s/_updates" % (utils.get_env("ARCHIVES_DIR"), scheme)
|
||||
plistbuddy = "/usr/libexec/PlistBuddy"
|
||||
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
|
||||
% (common.workspace_dir, branding_dir, package_name, suffix)
|
||||
|
||||
if ('sparkle-updates' in targets):
|
||||
make_sparkle_updates()
|
||||
appcast = utils.sh_output('%s -c "Print :SUFeedURL" %s' \
|
||||
% (plistbuddy, plist_path), verbose=True).rstrip()
|
||||
appcast = released_updates_dir + "/" + appcast[appcast.rfind("/")+1:]
|
||||
|
||||
release_version_string = utils.sh_output(
|
||||
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:shortVersionString\']/text()" ' + appcast,
|
||||
verbose=True).rstrip()
|
||||
release_version = utils.sh_output(
|
||||
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:version\']/text()" ' + appcast,
|
||||
verbose=True).rstrip()
|
||||
bundle_version = str(int(release_version) + 1)
|
||||
help_url = "https://download.onlyoffice.com/install/desktop/editors/help/v" + common.version + "/apps"
|
||||
|
||||
utils.sh('%s -c "Set :CFBundleShortVersionString %s" %s' \
|
||||
% (plistbuddy, common.version, plist_path), verbose=True)
|
||||
utils.sh('%s -c "Set :CFBundleVersion %s" %s' \
|
||||
% (plistbuddy, bundle_version, plist_path), verbose=True)
|
||||
utils.sh('%s -c "Set :ASCBundleBuildNumber %s" %s' \
|
||||
% (plistbuddy, common.build, plist_path), verbose=True)
|
||||
utils.sh('%s -c "Add :ASCWebappsHelpUrl string %s" %s' \
|
||||
% (plistbuddy, help_url, plist_path), verbose=True)
|
||||
|
||||
utils.log("RELEASE=" + release_version_string + "(" + release_version + ")" \
|
||||
+ "\nCURRENT=" + common.version + "(" + bundle_version + ")")
|
||||
|
||||
dmg = make_dmg()
|
||||
if dmg and sparkle_updates:
|
||||
make_sparkle_updates()
|
||||
if common.platform != "darwin_x86_64_v8":
|
||||
make_dmg("commercial")
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_diskimage(target):
|
||||
log("\n=== Build package " + scheme + "\n")
|
||||
log("--- build/" + package_name + ".app")
|
||||
cmd("bundler", ["exec", "fastlane", lane, "skip_git_bump:true"])
|
||||
return
|
||||
def make_dmg(target = "opensource"):
|
||||
utils.log_h2("desktop dmg " + target + " build")
|
||||
utils.log_h3("build/" + package_name + ".app")
|
||||
args = ["bundler", "exec", "fastlane", lane, "skip_git_bump:true"]
|
||||
if target == "commercial":
|
||||
args += ["edition:Enterprise"]
|
||||
dmg = utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("desktop dmg " + target + " build", dmg)
|
||||
|
||||
if common.deploy and dmg:
|
||||
utils.log_h2("desktop dmg " + target + " deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("build/*.dmg"),
|
||||
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
|
||||
utils.set_summary("desktop dmg deploy", ret)
|
||||
|
||||
if common.deploy and dmg and target != "commercial":
|
||||
utils.log_h2("desktop zip " + target + " deploy")
|
||||
ret = s3_upload(
|
||||
["build/%s-%s.zip" % (scheme, common.version)],
|
||||
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
|
||||
utils.set_summary("desktop zip " + target + " deploy", ret)
|
||||
return dmg
|
||||
|
||||
def make_sparkle_updates():
|
||||
log("\n=== Build sparkle updates\n")
|
||||
utils.log_h2("desktop sparkle files build")
|
||||
|
||||
app_version = proc_open("/usr/libexec/PlistBuddy \
|
||||
-c 'print :CFBundleShortVersionString' \
|
||||
build/" + package_name + ".app/Contents/Info.plist")['stdout']
|
||||
zip_filename = scheme + '-' + app_version
|
||||
zip_filename = scheme + '-' + common.version
|
||||
macos_zip = "build/" + zip_filename + ".zip"
|
||||
updates_storage_dir = "%s/%s/_updates" % (get_env('ARCHIVES_DIR'), scheme)
|
||||
create_dir(updates_dir)
|
||||
copy_dir_content(updates_storage_dir, updates_dir, ".zip")
|
||||
copy_dir_content(updates_storage_dir, updates_dir, ".html")
|
||||
copy_file(macos_zip, updates_dir)
|
||||
utils.create_dir(updates_dir)
|
||||
utils.copy_file(macos_zip, updates_dir)
|
||||
utils.sh(
|
||||
"ls -1t " + released_updates_dir + "/*.zip" \
|
||||
+ " | head -n 3" \
|
||||
+ " | while read f; do cp -fv \"$f\" " + updates_dir + "/; done",
|
||||
verbose=True)
|
||||
|
||||
for lang, base in update_changes_list.items():
|
||||
notes_src = "%s/%s/%s.html" % (changes_dir, app_version, base)
|
||||
notes_dst = "%s/%s.html" % (updates_dir, zip_filename)
|
||||
if lang == 'en':
|
||||
encoding = 'en_US.UTF-8'
|
||||
cur_date = sh_output("env LC_ALL=" + encoding + " date -u \"+%B %e, %Y\"", verbose=True)
|
||||
elif lang == 'ru':
|
||||
encoding = 'ru_RU.UTF-8'
|
||||
cur_date = sh_output("env LC_ALL=" + encoding + " date -u \"+%e %B %Y\"", verbose=True)
|
||||
if is_file(notes_src):
|
||||
copy_file(notes_src, notes_dst)
|
||||
replace_in_file(notes_dst,
|
||||
r"(<span class=\"releasedate\">).+(</span>)",
|
||||
"\\1 - " + cur_date + "\\2")
|
||||
# else:
|
||||
# write_file(notes_dst, "placeholder\n")
|
||||
cmd(git_dir + "/" + build_dir + "/Vendor/Sparkle/bin/generate_appcast", [updates_dir])
|
||||
for ext in [".html", ".ru.html"]:
|
||||
changes_src = changes_dir + "/" + common.version + "/changes" + ext
|
||||
changes_dst = updates_dir + "/" + zip_filename + ext
|
||||
if not utils.copy_file(changes_src, changes_dst):
|
||||
utils.write_file(changes_dst, "<!DOCTYPE html>placeholder")
|
||||
|
||||
log("\n=== Edit Sparkle appcast links\n")
|
||||
appcast_url = sparkle_base_url + "/" + suffix
|
||||
appcast = "%s/%s.xml" % (updates_dir, package_name.lower())
|
||||
sparkle_base_url = "%s/%s/updates/" % (branding.sparkle_base_url, suffix)
|
||||
ret = utils.sh(
|
||||
common.workspace_dir \
|
||||
+ "/desktop-apps/macos/Vendor/Sparkle/bin/generate_appcast " \
|
||||
+ updates_dir \
|
||||
+ " --download-url-prefix " + sparkle_base_url \
|
||||
+ " --release-notes-url-prefix " + sparkle_base_url,
|
||||
verbose=True
|
||||
)
|
||||
utils.set_summary("desktop sparkle files build", ret)
|
||||
|
||||
for lang, base in update_changes_list.items():
|
||||
if base == "ReleaseNotes":
|
||||
replace_in_file(appcast,
|
||||
r"(<sparkle:releaseNotesLink>)(?:.+" + package_name + \
|
||||
"-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)",
|
||||
"\\1" + appcast_url + "/updates/changes/\\2/" + base + ".html\\3")
|
||||
else:
|
||||
replace_in_file(appcast,
|
||||
r"(<sparkle:releaseNotesLink xml:lang=\"" + lang + "\">)(?:" + package_name + \
|
||||
"-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)",
|
||||
"\\1" + appcast_url + "/updates/changes/\\2/" + base + ".html\\3")
|
||||
replace_in_file(appcast,
|
||||
r"(url=\")(?:.+/)(" + package_name + ".+\")",
|
||||
"\\1" + appcast_url + "/updates/\\2")
|
||||
|
||||
log("\n=== Delete unnecessary files\n")
|
||||
for file in os.listdir(updates_dir):
|
||||
if (-1 == file.find(app_version)) and (file.endswith(".zip") or
|
||||
file.endswith(".html")):
|
||||
delete_file(updates_dir + '/' + file)
|
||||
if common.deploy:
|
||||
utils.log_h2("desktop sparkle files deploy")
|
||||
ret = s3_upload(
|
||||
utils.glob_path("build/update/*.delta") \
|
||||
+ utils.glob_path("build/update/*.xml") \
|
||||
+ utils.glob_path("build/update/*.html"),
|
||||
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
|
||||
utils.set_summary("desktop sparkle files deploy", ret)
|
||||
return
|
||||
|
||||
#
|
||||
# Linux
|
||||
#
|
||||
|
||||
def make_linux():
|
||||
utils.set_cwd("desktop-apps/package")
|
||||
|
||||
for edition in ["opensource", "commercial"]:
|
||||
utils.log_h2("desktop " + edition + " build")
|
||||
make_args = [t["make"] for t in branding.desktop_make_targets]
|
||||
if edition == "commercial":
|
||||
make_args += ["-e", "PACKAGE_EDITION=commercial"]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../../" + common.branding + "/desktop-apps/package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("desktop " + edition + " build", ret)
|
||||
|
||||
if common.deploy:
|
||||
for t in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop " + edition + " " + t["make"] + " deploy")
|
||||
uploads = []
|
||||
for f in utils.glob_path(t["src"]):
|
||||
if "help" in f and not \
|
||||
("x86_64" in common.platform and edition == "opensource"): continue
|
||||
uploads.append(f)
|
||||
ret = s3_upload(uploads, t["dst"])
|
||||
utils.set_summary("desktop " + edition + " " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
38
scripts/package_mobile.py
Normal file
38
scripts/package_mobile.py
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("MOBILE")
|
||||
if not utils.is_linux():
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
make_mobile()
|
||||
return
|
||||
|
||||
def make_mobile():
|
||||
utils.set_cwd("build_tools/out")
|
||||
|
||||
zip_file = "build-" + common.version + "-" + common.build + ".zip"
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("mobile clean")
|
||||
utils.sh("rm -rfv *.zip", verbose=True)
|
||||
|
||||
utils.log_h2("mobile build")
|
||||
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
|
||||
utils.set_summary("mobile build", ret)
|
||||
|
||||
if common.deploy:
|
||||
if ret:
|
||||
utils.log_h2("mobile deploy")
|
||||
key = "mobile/android/" + zip_file
|
||||
ret = utils.s3_upload(zip_file, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if ret:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
utils.set_summary("mobile deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
@ -1,25 +1,82 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
import base
|
||||
import os
|
||||
|
||||
def make(platform, targets):
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
package_dir = os.path.abspath(git_dir + "/document-server-package")
|
||||
|
||||
if ("windows" == platform) or ("linux" == platform):
|
||||
|
||||
if ("packages" in targets):
|
||||
|
||||
print("Make clean")
|
||||
base.cmd_in_dir(package_dir, "make", ["clean"])
|
||||
|
||||
print("Make packages")
|
||||
base.cmd_in_dir(package_dir, "make", ["packages"])
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make(edition):
|
||||
utils.log_h1("SERVER (" + edition.upper() + ")")
|
||||
if utils.is_windows():
|
||||
make_windows(edition)
|
||||
elif utils.is_linux():
|
||||
make_linux(edition)
|
||||
else:
|
||||
|
||||
exit(1)
|
||||
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def s3_upload(files, dst):
|
||||
if not files: return False
|
||||
ret = True
|
||||
for f in files:
|
||||
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
|
||||
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
|
||||
if upload:
|
||||
utils.log("URL: " + branding.s3_base_url + "/" + key)
|
||||
ret &= upload
|
||||
return ret
|
||||
|
||||
def make_windows(edition):
|
||||
if edition == "enterprise":
|
||||
product_name = "DocumentServer-EE"
|
||||
elif edition == "developer":
|
||||
product_name = "DocumentServer-DE"
|
||||
else:
|
||||
product_name = "DocumentServer"
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
ret = utils.cmd("make", "clean", verbose=True)
|
||||
if edition == "prerequisites":
|
||||
make_args = ["exe-pr"]
|
||||
else:
|
||||
make_args = ["exe", "-e", "PRODUCT_NAME=" + product_name]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret &= utils.cmd("make", *make_args, verbose=True)
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("server " + edition + " inno deploy")
|
||||
ret = s3_upload(utils.glob_path("exe/*.exe"), "server/win/inno/")
|
||||
utils.set_summary("server " + edition + " inno deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_linux(edition):
|
||||
if edition == "enterprise":
|
||||
product_name = "documentserver-ee"
|
||||
elif edition == "developer":
|
||||
product_name = "documentserver-de"
|
||||
else:
|
||||
product_name = "documentserver"
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
make_args = [t["make"] for t in branding.server_make_targets]
|
||||
make_args += ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy:
|
||||
for t in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
|
||||
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
|
||||
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@ -11,71 +11,84 @@ import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import base
|
||||
|
||||
def parse():
|
||||
parser = argparse.ArgumentParser(description="Build packages.")
|
||||
parser.add_argument('-P', '--product', dest='product', type=str,
|
||||
action='store', help="Defines product")
|
||||
parser.add_argument('-S', '--system', dest='system', type=str,
|
||||
action='store', help="Defines system")
|
||||
parser.add_argument('-R', '--branding', dest='branding', type=str,
|
||||
action='store', help="Provides branding path")
|
||||
parser.add_argument('-V', '--version', dest='version', type=str,
|
||||
action='store', help="Defines version")
|
||||
parser.add_argument('-B', '--build', dest='build', type=str,
|
||||
action='store', help="Defines build")
|
||||
parser.add_argument('-T', '--targets', dest='targets', type=str, nargs='+',
|
||||
action='store', help="Defines targets")
|
||||
args = parser.parse_args()
|
||||
|
||||
global product, system, targets, version, build, branding, sign, clean
|
||||
product = args.product
|
||||
system = args.system if (args.system is not None) else host_platform()
|
||||
targets = args.targets
|
||||
version = args.version if (args.version is not None) else get_env('PRODUCT_VERSION', '0.0.0')
|
||||
build = args.build if (args.build is not None) else get_env('BUILD_NUMBER', '0')
|
||||
branding = args.branding
|
||||
return
|
||||
import package_common as common
|
||||
|
||||
def host_platform():
|
||||
return platform.system().lower()
|
||||
|
||||
def log(string, end='\n', bold=False):
|
||||
if bold:
|
||||
out = '\033[1m' + string + '\033[0m' + end
|
||||
else:
|
||||
out = string + end
|
||||
sys.stdout.write(out)
|
||||
def is_windows():
|
||||
return host_platform() == "windows"
|
||||
|
||||
def is_macos():
|
||||
return host_platform() == "darwin"
|
||||
|
||||
def is_linux():
|
||||
return host_platform() == "linux"
|
||||
|
||||
def log(string, end='\n'):
|
||||
sys.stdout.write(string + end)
|
||||
sys.stdout.flush()
|
||||
return
|
||||
|
||||
def get_env(name, default=''):
|
||||
return os.getenv(name, default)
|
||||
|
||||
def set_env(name, value):
|
||||
os.environ[name] = value
|
||||
def log_h1(string):
|
||||
line = "#" * (len(string) + 8)
|
||||
log("\n" + line + "\n### " + string + " ###\n" + line + "\n")
|
||||
return
|
||||
|
||||
def set_cwd(dir):
|
||||
log("- change working dir: " + dir)
|
||||
os.chdir(dir)
|
||||
def log_h2(string):
|
||||
log("\n### " + string + "\n")
|
||||
return
|
||||
|
||||
def get_path(*paths):
|
||||
arr = []
|
||||
for path in paths:
|
||||
if host_platform() == 'windows':
|
||||
arr += path.split('/')
|
||||
else:
|
||||
arr += [path]
|
||||
return os.path.join(*arr)
|
||||
def log_h3(string):
|
||||
log("# " + string)
|
||||
return
|
||||
|
||||
def get_abspath(*paths):
|
||||
arr = []
|
||||
for path in paths:
|
||||
arr += path.split('/')
|
||||
return os.path.abspath(os.path.join(*arr))
|
||||
def log_err(string):
|
||||
log("!!! " + string)
|
||||
return
|
||||
|
||||
def get_timestamp():
|
||||
return "%.f" % time.time()
|
||||
|
||||
def get_env(key, default=None):
|
||||
return os.getenv(key, default)
|
||||
|
||||
def set_env(key, value):
|
||||
os.environ[key] = value
|
||||
return
|
||||
|
||||
def get_cwd():
|
||||
return os.getcwd()
|
||||
|
||||
def set_cwd(path, verbose=True):
|
||||
if verbose:
|
||||
log("- change working dir:")
|
||||
log(" path: " + path)
|
||||
os.chdir(path)
|
||||
return
|
||||
|
||||
def get_path(path):
|
||||
if is_windows():
|
||||
return path.replace("/", "\\")
|
||||
return path
|
||||
|
||||
def get_relpath(path, rel_path):
|
||||
return os.path.relpath(get_path(path), get_path(rel_path))
|
||||
|
||||
def get_abspath(path):
|
||||
return os.path.abspath(get_path(path))
|
||||
|
||||
def get_basename(path):
|
||||
return os.path.basename(path)
|
||||
|
||||
def get_dirname(path):
|
||||
return os.path.dirname(path)
|
||||
|
||||
def get_file_size(path):
|
||||
return os.path.getsize(path)
|
||||
|
||||
def get_script_dir(path):
|
||||
return get_dirname(os.path.realpath(path))
|
||||
|
||||
def is_file(path):
|
||||
return os.path.isfile(path)
|
||||
@ -88,200 +101,309 @@ def is_exist(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_dirname(path):
|
||||
return os.path.dirname(path)
|
||||
def glob_path(path):
|
||||
return glob.glob(path)
|
||||
|
||||
def create_dir(path):
|
||||
log("- create dir: " + path)
|
||||
def glob_file(path):
|
||||
if glob.glob(path) and is_file(glob.glob(path)[0]):
|
||||
return glob.glob(path)[0]
|
||||
return
|
||||
|
||||
def get_hash_sha256(path):
|
||||
if os.path.exists(path):
|
||||
h = hashlib.sha256()
|
||||
h.update(open(path, "rb").read())
|
||||
return h.hexdigest()
|
||||
return
|
||||
|
||||
def get_hash_sha1(path):
|
||||
if os.path.exists(path):
|
||||
h = hashlib.sha1()
|
||||
h.update(open(path, "rb").read())
|
||||
return h.hexdigest()
|
||||
return
|
||||
|
||||
def get_hash_md5(path):
|
||||
if os.path.exists(path):
|
||||
h = hashlib.md5()
|
||||
h.update(open(path, "rb").read())
|
||||
return h.hexdigest()
|
||||
return
|
||||
|
||||
def create_dir(path, verbose=True):
|
||||
if verbose:
|
||||
log("- create_dir:")
|
||||
log(" path: " + path)
|
||||
if not is_exist(path):
|
||||
os.makedirs(path)
|
||||
else:
|
||||
log("! dir exist")
|
||||
log_err("dir exist")
|
||||
return
|
||||
|
||||
def write_file(path, data, encoding='utf-8'):
|
||||
def write_file(path, data, encoding='utf-8', verbose=True):
|
||||
if is_file(path):
|
||||
delete_file(path)
|
||||
log("- write file: " + path)
|
||||
if verbose:
|
||||
log("- write_file:")
|
||||
log(" path: " + path)
|
||||
log(" encoding: " + encoding)
|
||||
log(" data: |\n" + data)
|
||||
with codecs.open(path, 'w', encoding) as file:
|
||||
file.write(data)
|
||||
return
|
||||
|
||||
def write_template(src, dst, encoding='utf-8', **kwargs):
|
||||
template = Template(open(src).read())
|
||||
if is_file(dst):
|
||||
os.remove(dst)
|
||||
log("- write template: " + dst + " < " + src)
|
||||
with codecs.open(dst, 'w', encoding) as file:
|
||||
file.write(template.render(**kwargs))
|
||||
return
|
||||
|
||||
def replace_in_file(path, pattern, textReplace, encoding='utf-8'):
|
||||
log("- replace in file: " + path + \
|
||||
"\n pattern: " + pattern + \
|
||||
"\n replace: " + textReplace)
|
||||
filedata = ""
|
||||
def replace_in_file(path, pattern, text_replace, encoding='utf-8', verbose=True):
|
||||
if verbose:
|
||||
log("- replace_in_file:")
|
||||
log(" path: " + path)
|
||||
log(" pattern: " + pattern)
|
||||
log(" replace: " + text_replace)
|
||||
log(" encoding: " + encoding)
|
||||
file_data = ""
|
||||
with codecs.open(get_path(path), "r", encoding) as file:
|
||||
filedata = file.read()
|
||||
filedata = re.sub(pattern, textReplace, filedata)
|
||||
file_data = file.read()
|
||||
file_data = re.sub(pattern, text_replace, file_data)
|
||||
delete_file(path)
|
||||
with codecs.open(get_path(path), "w", encoding) as file:
|
||||
file.write(filedata)
|
||||
file.write(file_data)
|
||||
return
|
||||
|
||||
def copy_file(src, dst):
|
||||
log("- copy file: " + dst + " < " + src)
|
||||
def copy_file(src, dst, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_file:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
if is_file(dst):
|
||||
delete_file(dst)
|
||||
delete_file(dst, False)
|
||||
if not is_file(src):
|
||||
log("! file not exist: " + src)
|
||||
log_err("file not exist: " + src)
|
||||
return
|
||||
return shutil.copy2(get_path(src), get_path(dst))
|
||||
|
||||
def copy_files(src, dst, override=True):
|
||||
log("- copy files: " + dst + " < " + src)
|
||||
def copy_files(src, dst, override=True, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_files:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" override: " + str(override))
|
||||
for file in glob.glob(src):
|
||||
file_name = os.path.basename(file)
|
||||
if is_file(file):
|
||||
if override and is_file(dst + "/" + file_name):
|
||||
delete_file(dst + "/" + file_name)
|
||||
if not is_file(dst + "/" + file_name):
|
||||
copy_file(file, dst)
|
||||
if verbose:
|
||||
log(file + " : " + get_path(dst))
|
||||
shutil.copy2(file, get_path(dst))
|
||||
elif is_dir(file):
|
||||
if not is_dir(dst + "/" + file_name):
|
||||
create_dir(dst + "/" + file_name)
|
||||
copy_files(file + "/*", dst + "/" + file_name, override)
|
||||
return
|
||||
|
||||
def copy_dir(src, dst):
|
||||
if is_dir(dst):
|
||||
delete_dir(dst)
|
||||
try:
|
||||
shutil.copytree(get_path(src), get_path(dst))
|
||||
except OSError as e:
|
||||
log('! Directory not copied. Error: %s' % e)
|
||||
def copy_dir(src, dst, verbose=True, symlinks=False):
|
||||
if verbose:
|
||||
log("- copy_dir:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
shutil.copytree(src, dst, symlinks=symlinks)
|
||||
return
|
||||
|
||||
def copy_dir_content(src, dst, filterInclude = "", filterExclude = ""):
|
||||
log("- copy dir content: " + src + " " + dst + " " + filterInclude + " " + filterExclude)
|
||||
src_folder = src
|
||||
if ("/" != src[-1:]):
|
||||
src_folder += "/"
|
||||
src_folder += "*"
|
||||
for file in glob.glob(src_folder):
|
||||
basename = os.path.basename(file)
|
||||
if ("" != filterInclude) and (-1 == basename.find(filterInclude)):
|
||||
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
|
||||
if verbose:
|
||||
log("- copy_dir_content:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" include: " + filter_include)
|
||||
log(" exclude: " + filter_exclude)
|
||||
for item in os.listdir(src):
|
||||
s = os.path.join(src, item)
|
||||
d = os.path.join(dst, item)
|
||||
if ("" != filter_include) and (-1 == item.find(filter_include)):
|
||||
continue
|
||||
if ("" != filterExclude) and (-1 != basename.find(filterExclude)):
|
||||
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
|
||||
continue
|
||||
if is_file(file):
|
||||
copy_file(file, dst)
|
||||
elif is_dir(file):
|
||||
copy_dir(file, dst + "/" + basename)
|
||||
if os.path.isdir(s):
|
||||
shutil.copytree(s, d)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
log(item)
|
||||
return
|
||||
|
||||
def delete_file(path):
|
||||
log("- delete file: " + path)
|
||||
def delete_file(path, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_file:")
|
||||
log(" path: " + path)
|
||||
if not is_file(path):
|
||||
log("! file not exist")
|
||||
log_err("file not exist")
|
||||
return
|
||||
return os.remove(path)
|
||||
|
||||
def delete_dir(path):
|
||||
log("- delete dir: " + path)
|
||||
def delete_dir(path, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_dir:")
|
||||
log(" path: " + path)
|
||||
if not is_dir(path):
|
||||
log("! dir not exist")
|
||||
log_err("dir not exist")
|
||||
return
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return
|
||||
|
||||
def delete_files(src):
|
||||
def delete_files(src, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_files:")
|
||||
log(" pattern: " + src)
|
||||
for path in glob.glob(src):
|
||||
if verbose:
|
||||
log(path)
|
||||
if is_file(path):
|
||||
delete_file(path)
|
||||
os.remove(path)
|
||||
elif is_dir(path):
|
||||
delete_dir(path)
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return
|
||||
|
||||
def download_file(url, path):
|
||||
log("- download file: " + path + " < " + url)
|
||||
if is_file(path):
|
||||
os.remove(path)
|
||||
powershell(["Invoke-WebRequest", url, "-OutFile", path])
|
||||
def remove_all_symlinks(dir):
|
||||
for root, dirs, files in os.walk(dir, topdown=True, followlinks=False):
|
||||
for name in files:
|
||||
path = os.path.join(root, name)
|
||||
if os.path.islink(path):
|
||||
os.unlink(path)
|
||||
|
||||
for name in list(dirs):
|
||||
path = os.path.join(root, name)
|
||||
if os.path.islink(path):
|
||||
os.unlink(path)
|
||||
dirs.remove(name)
|
||||
return
|
||||
|
||||
def proc_open(command):
|
||||
log("- open process: " + command)
|
||||
popen = subprocess.Popen(command, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE, shell=True)
|
||||
ret = {'stdout' : '', 'stderr' : ''}
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
ret['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
|
||||
ret['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
def set_summary(target, status):
|
||||
common.summary.append({target: status})
|
||||
return
|
||||
|
||||
def cmd(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- cmd:")
|
||||
log(" command: " + " ".join(args))
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
log_err("creates exist")
|
||||
return False
|
||||
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
|
||||
oldcwd = get_cwd()
|
||||
set_cwd(kwargs["chdir"], verbose=False)
|
||||
ret = subprocess.call(
|
||||
[i for i in args], stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
if kwargs.get("chdir") and oldcwd:
|
||||
set_cwd(oldcwd, verbose=False)
|
||||
return ret
|
||||
|
||||
def cmd(prog, args=[], is_no_errors=False):
|
||||
log("- cmd: " + prog + " " + ' '.join(args))
|
||||
ret = 0
|
||||
if host_platform() == 'windows':
|
||||
sub_args = args[:]
|
||||
sub_args.insert(0, get_path(prog))
|
||||
ret = subprocess.call(sub_args, stderr=subprocess.STDOUT, shell=True)
|
||||
else:
|
||||
command = prog
|
||||
for arg in args:
|
||||
command += (" \"%s\"" % arg)
|
||||
ret = subprocess.call(command, stderr=subprocess.STDOUT, shell=True)
|
||||
if ret != 0 and True != is_no_errors:
|
||||
sys.exit("! error (" + prog + "): " + str(ret))
|
||||
def cmd_output(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- cmd_output:")
|
||||
log(" command: " + " ".join(args))
|
||||
return subprocess.check_output(
|
||||
[i for i in args], stderr=subprocess.STDOUT, shell=True
|
||||
).decode("utf-8")
|
||||
|
||||
def powershell(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- powershell:")
|
||||
log(" command: " + " ".join(args))
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
return False
|
||||
args = ["powershell", "-Command"] + [i for i in args]
|
||||
ret = subprocess.call(
|
||||
args, stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
return ret
|
||||
|
||||
def powershell(cmd):
|
||||
log("- pwsh: " + ' '.join(cmd))
|
||||
ret = subprocess.call(['powershell', '-Command'] + cmd,
|
||||
stderr=subprocess.STDOUT, shell=True)
|
||||
if ret != 0:
|
||||
sys.exit("! error: " + str(ret))
|
||||
def ps1(file, args=[], **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- ps1: " + file + " " + " ".join(args))
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
return True
|
||||
ret = subprocess.call(
|
||||
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
|
||||
stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
return ret
|
||||
|
||||
def sh(command, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- sh:")
|
||||
log(" command: " + command)
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
log_err("creates exist")
|
||||
return False
|
||||
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
|
||||
oldcwd = get_cwd()
|
||||
set_cwd(kwargs["chdir"], verbose=False)
|
||||
ret = subprocess.call(
|
||||
command, stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
if kwargs.get("chdir") and oldcwd:
|
||||
set_cwd(oldcwd, verbose=False)
|
||||
return ret
|
||||
|
||||
def sh_output(command, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- sh output: " + command)
|
||||
log("- sh_output:")
|
||||
log(" command: " + command)
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
|
||||
oldcwd = get_cwd()
|
||||
set_cwd(kwargs["chdir"], verbose=False)
|
||||
ret = subprocess.check_output(
|
||||
command, stderr=subprocess.STDOUT, shell=True
|
||||
)
|
||||
return ret.decode("utf-8").strip()
|
||||
).decode("utf-8")
|
||||
log(ret)
|
||||
if kwargs.get("chdir") and oldcwd:
|
||||
set_cwd(oldcwd, verbose=False)
|
||||
return ret
|
||||
|
||||
def get_platform(target):
|
||||
xp = (-1 != target.find('-xp'))
|
||||
if (-1 != target.find('-x64')):
|
||||
return {'machine': "64", 'arch': "x64", 'xp': xp}
|
||||
elif (-1 != target.find('-x86')):
|
||||
return {'machine': "32", 'arch': "x86", 'xp': xp}
|
||||
return
|
||||
def s3_upload(src, dst, **kwargs):
|
||||
if not is_file(src):
|
||||
log_err("file not exist: " + src)
|
||||
return False
|
||||
metadata = "sha256=" + get_hash_sha256(src) \
|
||||
+ ",sha1=" + get_hash_sha1(src) \
|
||||
+ ",md5=" + get_hash_md5(src)
|
||||
args = ["aws"]
|
||||
if kwargs.get("endpoint_url"):
|
||||
args += ["--endpoint-url", kwargs["endpoint_url"]]
|
||||
args += ["s3", "cp", "--no-progress"]
|
||||
if kwargs.get("acl"):
|
||||
args += ["--acl", kwargs["acl"]]
|
||||
args += ["--metadata", metadata, src, dst]
|
||||
if is_windows():
|
||||
ret = cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = sh(" ".join(args), verbose=True)
|
||||
return ret
|
||||
|
||||
global git_dir, out_dir, tsa_server, vcredist_links
|
||||
git_dir = get_abspath(get_dirname(__file__), '../..')
|
||||
out_dir = get_abspath(get_dirname(__file__), '../out')
|
||||
timestamp = "%.f" % time.time()
|
||||
tsa_server = "http://timestamp.digicert.com"
|
||||
vcredist_links = {
|
||||
'2022': {
|
||||
'64': "https://aka.ms/vs/17/release/vc_redist.x64.exe",
|
||||
'32': "https://aka.ms/vs/17/release/vc_redist.x86.exe"
|
||||
},
|
||||
'2015': {
|
||||
'64': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x64.exe",
|
||||
'32': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x86.exe"
|
||||
},
|
||||
'2013': {
|
||||
'64': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x64.exe",
|
||||
'32': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x86.exe"
|
||||
}
|
||||
}
|
||||
isxdl_link = "https://raw.githubusercontent.com/jrsoftware/ispack/is-5_6_1/isxdlfiles/isxdl.dll"
|
||||
def s3_copy(src, dst, **kwargs):
|
||||
args = ["aws"]
|
||||
if kwargs.get("endpoint_url"):
|
||||
args += ["--endpoint-url", kwargs["endpoint_url"]]
|
||||
args += ["s3", "cp", "--no-progress"]
|
||||
if kwargs.get("acl"):
|
||||
args += ["--acl", kwargs["acl"]]
|
||||
args += [src, dst]
|
||||
if is_windows():
|
||||
ret = cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = sh(" ".join(args), verbose=True)
|
||||
return ret
|
||||
|
||||
190
scripts/qmake.py
Normal file
190
scripts/qmake.py
Normal file
@ -0,0 +1,190 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
__dir__name__ = os.path.dirname(__file__)
|
||||
sys.path.append(__dir__name__ + '/core_common/modules/android')
|
||||
|
||||
import base
|
||||
import config
|
||||
import android_ndk
|
||||
import multiprocessing
|
||||
|
||||
def get_make_file_suffix(platform):
|
||||
suffix = platform
|
||||
if config.check_option("config", "debug"):
|
||||
suffix += "_debug_"
|
||||
suffix += config.option("branding")
|
||||
return suffix
|
||||
|
||||
def get_j_num():
|
||||
if ("0" != config.option("multiprocess")):
|
||||
return ["-j" + str(multiprocessing.cpu_count())]
|
||||
return []
|
||||
|
||||
def check_support_platform(platform):
|
||||
qt_dir = base.qt_setup(platform)
|
||||
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe") and not base.is_file(qt_dir + "/bin/qmake.bat"):
|
||||
return False
|
||||
return True
|
||||
|
||||
def make(platform, project, qmake_config_addon="", is_no_errors=False):
|
||||
# check platform
|
||||
if not check_support_platform(platform):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
return
|
||||
|
||||
old_env = dict(os.environ)
|
||||
|
||||
# qt
|
||||
qt_dir = base.qt_setup(platform)
|
||||
base.set_env("OS_DEPLOY", platform)
|
||||
|
||||
# pro & makefile
|
||||
file_pro = os.path.abspath(project)
|
||||
|
||||
pro_dir = os.path.dirname(file_pro)
|
||||
if (pro_dir.endswith("/.")):
|
||||
pro_dir = pro_dir[:-2]
|
||||
if (pro_dir.endswith("/")):
|
||||
pro_dir = pro_dir[:-1]
|
||||
|
||||
makefile_name = "Makefile." + get_make_file_suffix(platform)
|
||||
makefile = pro_dir + "/" + makefile_name
|
||||
stash_file = pro_dir + "/.qmake.stash"
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(pro_dir)
|
||||
|
||||
if (base.is_file(stash_file)):
|
||||
base.delete_file(stash_file)
|
||||
if (base.is_file(makefile)):
|
||||
base.delete_file(makefile)
|
||||
|
||||
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
|
||||
|
||||
# setup android env
|
||||
if (-1 != platform.find("android")):
|
||||
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
|
||||
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
|
||||
|
||||
# setup ios env
|
||||
if (-1 != platform.find("ios")):
|
||||
base.hack_xcode_ios()
|
||||
sdk_name = "iphoneos"
|
||||
if qmake_config_addon.find("ios_simulator") != -1:
|
||||
sdk_name = "iphonesimulator"
|
||||
base.set_env("SDK_PATH", base.find_ios_sdk(sdk_name))
|
||||
base.set_env("XCODE_TOOLCHAIN_PATH", base.find_xcode_toolchain(sdk_name))
|
||||
|
||||
if base.is_file(makefile):
|
||||
base.delete_file(makefile)
|
||||
|
||||
config_param = base.qt_config(platform)
|
||||
if ("" != qmake_config_addon):
|
||||
config_param += (" " + qmake_config_addon)
|
||||
|
||||
# qmake ADDON
|
||||
qmake_addon = []
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon = config.option("qmake_addon").split()
|
||||
|
||||
clean_params = ["clean", "-f", makefile]
|
||||
distclean_params = ["distclean", "-f", makefile]
|
||||
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
|
||||
|
||||
qmake_app = qt_dir + "/bin/qmake"
|
||||
|
||||
# non windows platform
|
||||
if not base.is_windows():
|
||||
if base.is_file(qt_dir + "/onlyoffice_qt.conf"):
|
||||
build_params.append("-qtconf")
|
||||
build_params.append(qt_dir + "/onlyoffice_qt.conf")
|
||||
if "1" == config.option("use-clang"):
|
||||
build_params.append("-spec")
|
||||
build_params.append("linux-clang-libc++")
|
||||
|
||||
if "" != config.option("sysroot"):
|
||||
sysroot_path = config.option("sysroot_" + platform)
|
||||
os.environ['QMAKE_CUSTOM_SYSROOT'] = sysroot_path
|
||||
os.environ['QMAKE_CUSTOM_SYSROOT_BIN'] = config.get_custom_sysroot_bin(platform)
|
||||
os.environ['PKG_CONFIG_PATH'] = config.get_custom_sysroot_lib(platform, True) + "/pkgconfig"
|
||||
os.environ['PKG_CONFIG_SYSROOT_DIR'] = sysroot_path
|
||||
|
||||
base.cmd_exe(qmake_app, build_params)
|
||||
|
||||
if "" != config.option("sysroot"):
|
||||
base.set_sysroot_env(platform)
|
||||
|
||||
base.correct_makefile_after_qmake(platform, makefile)
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd_and_return_cwd("make", clean_params, True)
|
||||
base.cmd_and_return_cwd("make", distclean_params, True)
|
||||
|
||||
if "" != config.option("sysroot"):
|
||||
base.restore_sysroot_env()
|
||||
base.cmd(qmake_app, build_params)
|
||||
if "" != config.option("sysroot"):
|
||||
base.set_sysroot_env(platform)
|
||||
|
||||
base.correct_makefile_after_qmake(platform, makefile)
|
||||
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
|
||||
|
||||
if "" != config.option("sysroot"):
|
||||
base.restore_sysroot_env()
|
||||
else:
|
||||
config_params_array = base.qt_config_as_param(config_param)
|
||||
config_params_string = ""
|
||||
for item in config_params_array:
|
||||
config_params_string += (" \"" + item + "\"")
|
||||
qmake_addon_string = " ".join(qmake_addon)
|
||||
if ("" != qmake_addon_string):
|
||||
qmake_addon_string = " " + qmake_addon_string
|
||||
|
||||
vcvarsall_arch = "x64"
|
||||
if base.platform_is_32(platform):
|
||||
vcvarsall_arch = "x86"
|
||||
if (platform == "win_arm64"):
|
||||
vcvarsall_arch = "x64_arm64"
|
||||
|
||||
qmake_env_addon = base.get_env("QT_QMAKE_ADDON")
|
||||
if (qmake_env_addon != ""):
|
||||
qmake_env_addon += " "
|
||||
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + vcvarsall_arch)
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
|
||||
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + qmake_env_addon + file_pro + config_params_string + qmake_addon_string)
|
||||
if ("1" == config.option("clean")):
|
||||
qmake_bat.append("call nmake " + " ".join(clean_params))
|
||||
qmake_bat.append("call nmake " + " ".join(distclean_params))
|
||||
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefile)
|
||||
base.run_as_bat(qmake_bat, is_no_errors)
|
||||
|
||||
if (base.is_file(stash_file)):
|
||||
base.delete_file(stash_file)
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
def make_all_platforms(project, qmake_config_addon=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
continue
|
||||
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
make(platform, project, qmake_config_addon)
|
||||
return
|
||||
@ -1,9 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
import argparse
|
||||
|
||||
def readFile(path):
|
||||
with open(path, "r") as file:
|
||||
with open(path, "r", errors='replace') as file:
|
||||
filedata = file.read()
|
||||
return filedata
|
||||
|
||||
@ -11,7 +13,7 @@ def writeFile(path, content):
|
||||
if (os.path.isfile(path)):
|
||||
os.remove(path)
|
||||
|
||||
with open(path, "w") as file:
|
||||
with open(path, "w", encoding='utf-8') as file:
|
||||
file.write(content)
|
||||
return
|
||||
|
||||
@ -46,12 +48,12 @@ class EditorApi(object):
|
||||
if -1 != retParam.find("[]"):
|
||||
isArray = True
|
||||
retParam = retParam.replace("[]", "")
|
||||
retType = retParam.replace("|", " ").split(" ")[0]
|
||||
retType = retParam.replace("|", " ").replace(".", " ").split(" ")[0]
|
||||
retTypeLower = retType.lower()
|
||||
retValue = ""
|
||||
if -1 != retType.find("\""):
|
||||
retValue = "\"\""
|
||||
elif "bool" == retTypeLower:
|
||||
elif "boolean" == retTypeLower or "bool" == retTypeLower:
|
||||
retValue = "true"
|
||||
elif "string" == retTypeLower:
|
||||
retValue = "\"\""
|
||||
@ -61,6 +63,12 @@ class EditorApi(object):
|
||||
retValue = "undefined"
|
||||
elif "null" == retTypeLower:
|
||||
retValue = "null"
|
||||
elif "array" == retTypeLower:
|
||||
retValue = "[]"
|
||||
elif "base64img" == retTypeLower:
|
||||
retValue = "base64img"
|
||||
elif "error" == retTypeLower:
|
||||
retValue = "undefined"
|
||||
else:
|
||||
retValue = "new " + retType + "()"
|
||||
if isArray:
|
||||
@ -72,30 +80,42 @@ class EditorApi(object):
|
||||
rec = rec.replace("\t", "")
|
||||
rec = rec.replace('\n ', '\n')
|
||||
indexEndDecoration = rec.find("*/")
|
||||
|
||||
indexOfStartPropName = rec.find('Object.defineProperty(')
|
||||
if indexOfStartPropName != -1:
|
||||
propName = re.search(r'"([^\"]*)"', rec[indexOfStartPropName:])[0]
|
||||
else:
|
||||
propName = None
|
||||
|
||||
decoration = "/**" + rec[0:indexEndDecoration + 2]
|
||||
decoration = decoration.replace("Api\n", "ApiInterface\n")
|
||||
decoration = decoration.replace("Api ", "ApiInterface ")
|
||||
decoration = decoration.replace("{Api}", "{ApiInterface}")
|
||||
decoration = decoration.replace("@return ", "@returns ")
|
||||
decoration = decoration.replace("@returns {?", "@returns {")
|
||||
decoration = decoration.replace("?}", "}")
|
||||
if -1 != decoration.find("@name ApiInterface"):
|
||||
self.append_record(decoration, "var ApiInterface = function() {};\nvar Api = new ApiInterface();\n", True)
|
||||
return
|
||||
code = rec[indexEndDecoration + 2:]
|
||||
code = code.strip("\t\n\r ")
|
||||
code = code.replace("=\n", "= ").strip("\t\n\r ")
|
||||
lines = code.split("\n")
|
||||
codeCorrect = ""
|
||||
sFuncName = ""
|
||||
sMethodName = re.search(r'.prototype.(.*)=', code)
|
||||
|
||||
is_found_function = False
|
||||
addon_for_func = "{}"
|
||||
if -1 != decoration.find("@return"):
|
||||
addon_for_func = "{ return null; }"
|
||||
|
||||
for line in lines:
|
||||
line = line.strip("\t\n\r ")
|
||||
line = line.replace("{", "")
|
||||
line = line.replace("}", "")
|
||||
lineWithoutSpaces = line.replace(" ", "")
|
||||
if not is_found_function and 0 == line.find("function "):
|
||||
if -1 == decoration.find("@constructor"):
|
||||
return
|
||||
codeCorrect += (line + addon_for_func + "\n")
|
||||
is_found_function = True
|
||||
if not is_found_function and -1 != line.find(".prototype."):
|
||||
@ -107,6 +127,20 @@ class EditorApi(object):
|
||||
codeCorrect += (line + "\n")
|
||||
codeCorrect = codeCorrect.replace("Api.prototype", "ApiInterface.prototype")
|
||||
self.append_record(decoration, codeCorrect)
|
||||
className = codeCorrect[0:codeCorrect.find('.')]
|
||||
|
||||
# если свойство определено сразу под методом (без декорации)
|
||||
if propName is not None and sMethodName is not None:
|
||||
prop_define = f'{className}.prototype.{propName[1:-1]} = {className}.prototype.{sMethodName.group(1)}();\n'
|
||||
self.append_record(decoration, prop_define)
|
||||
#иначе
|
||||
elif propName is not None:
|
||||
className = re.search(r'.defineProperty\((.*).prototype', code).group(1).strip()
|
||||
returnValue = 'undefined' if decoration.find('@return') == -1 else self.getReturnValue(decoration)
|
||||
if (returnValue != 'undefined'):
|
||||
returnValue = re.search(r'{ return (.*); }', returnValue).group(1).strip()
|
||||
prop_define = f'{className}.prototype.{propName[1:-1]} = {returnValue};\n'
|
||||
self.append_record(decoration, prop_define)
|
||||
return
|
||||
|
||||
def append_record(self, decoration, code, init=False):
|
||||
@ -126,6 +160,12 @@ class EditorApi(object):
|
||||
editors_support = decoration[index_type_editors:index_type_editors_end]
|
||||
if -1 == editors_support.find(self.type):
|
||||
return
|
||||
|
||||
decoration = "\n".join(
|
||||
line for line in decoration.splitlines()
|
||||
if "@typeofeditors" not in line and "@see" not in line
|
||||
)
|
||||
|
||||
# optimizations for first file
|
||||
if 0 == self.numfile:
|
||||
self.records.append(decoration + "\n" + code + "\n")
|
||||
@ -147,7 +187,7 @@ class EditorApi(object):
|
||||
|
||||
def generate(self):
|
||||
for file in self.files:
|
||||
file_content = readFile(file)
|
||||
file_content = readFile(f'{sdkjs_dir}/{file}')
|
||||
arrRecords = file_content.split("/**")
|
||||
arrRecords = arrRecords[1:-1]
|
||||
for record in arrRecords:
|
||||
@ -155,8 +195,8 @@ class EditorApi(object):
|
||||
self.numfile += 1
|
||||
correctContent = ''.join(self.records)
|
||||
correctContent += "\n"
|
||||
os.mkdir('deploy/api_builder/' + self.folder)
|
||||
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
|
||||
os.mkdir(args.destination + self.folder)
|
||||
writeFile(args.destination + self.folder + "/api.js", correctContent)
|
||||
return
|
||||
|
||||
def convert_to_interface(arrFiles, sEditorType):
|
||||
@ -165,12 +205,27 @@ def convert_to_interface(arrFiles, sEditorType):
|
||||
editor.generate()
|
||||
return
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir("../../../sdkjs")
|
||||
if True == os.path.isdir('deploy/api_builder'):
|
||||
shutil.rmtree('deploy/api_builder', ignore_errors=True)
|
||||
os.mkdir('deploy/api_builder')
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
sdkjs_dir = "../../../sdkjs"
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default="../../../web-apps/vendor/monaco/libs/" # Default value
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
old_cur = os.getcwd()
|
||||
|
||||
if True == os.path.isdir(args.destination):
|
||||
shutil.rmtree(args.destination, ignore_errors=True)
|
||||
os.mkdir(args.destination)
|
||||
convert_to_interface(["word/apiBuilder.js", "../sdkjs-forms/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
|
||||
|
||||
|
||||
138
scripts/sdkjs_common/jsdoc/README.md
Normal file
138
scripts/sdkjs_common/jsdoc/README.md
Normal file
@ -0,0 +1,138 @@
|
||||
# Documentation Generation Guide
|
||||
|
||||
This guide explains how to generate documentation for Onlyoffice Builder
|
||||
and Plugins (Methods/Events) API using the following Python scripts:
|
||||
|
||||
- `office-api/generate_docs_json.py`
|
||||
- `office-api/generate_docs_md.py`
|
||||
- `plugins/generate_docs_methods_json.py`
|
||||
- `plugins/generate_docs_methods_md.py`
|
||||
- `plugins/generate_docs_events_json.py`
|
||||
- `plugins/generate_docs_events_md.py`
|
||||
|
||||
## Requirements
|
||||
|
||||
```bash
|
||||
Node.js v20 and above
|
||||
Python v3.12 and above
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
git clone https://github.com/ONLYOFFICE/build_tools.git
|
||||
cd build_tools/scripts/sdkjs_common/jsdoc
|
||||
npm install
|
||||
```
|
||||
|
||||
## Scripts Overview
|
||||
|
||||
### `office-api/generate_docs_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api-declarations/office-js-api`.
|
||||
|
||||
### `office-api/generate_docs_md.py`
|
||||
|
||||
This script generates Markdown documentation from the `apiBuilder.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_md.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the Markdown documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api/`.
|
||||
|
||||
### `plugins/generate_docs_methods_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `api_plugins.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_methods_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api-declarations/office-js-api-plugins`.
|
||||
|
||||
### `plugins/generate_docs_events_json.py`
|
||||
|
||||
This script generates JSON documentation based on the `plugin-events.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_events_json.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the JSON documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api-declarations/office-js-api-plugins`.
|
||||
|
||||
### `plugins/generate_docs_methods_md.py`
|
||||
|
||||
This script generates Markdown documentation from the `api_plugins.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_methods_md.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the Markdown documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api/`.
|
||||
|
||||
### `plugins/generate_docs_events_md.py`
|
||||
|
||||
This script generates Markdown documentation from the `plugin-events.js` files.
|
||||
|
||||
- **Usage**:
|
||||
|
||||
```bash
|
||||
python generate_docs_events_md.py output_path
|
||||
```
|
||||
|
||||
- **Parameters**:
|
||||
- `output_path` (optional): The directory where the Markdown documentation
|
||||
will be saved. If not specified, the default path is
|
||||
`../../../../office-js-api/`.
|
||||
|
||||
## Example
|
||||
|
||||
To generate JSON documentation with the default output path:
|
||||
|
||||
```bash
|
||||
python generate_docs_json.py /path/to/save/json
|
||||
```
|
||||
|
||||
To generate Markdown documentation and specify a custom output path:
|
||||
|
||||
```bash
|
||||
python generate_docs_md.py /path/to/save/markdown
|
||||
```
|
||||
|
||||
## Notes
|
||||
|
||||
- Make sure to have all necessary permissions to run these scripts and write
|
||||
to the specified directories.
|
||||
- The output directories will be created if they do not exist.
|
||||
39
scripts/sdkjs_common/jsdoc/get_latest_branch.py
Normal file
39
scripts/sdkjs_common/jsdoc/get_latest_branch.py
Normal file
@ -0,0 +1,39 @@
|
||||
import subprocess
|
||||
|
||||
def fetch_branches():
|
||||
#Fetch all branches without tags from the remote.
|
||||
subprocess.run(['git', 'fetch', '--no-tags', 'origin', '+refs/heads/*:refs/remotes/origin/*'], check=True)
|
||||
|
||||
def get_branches():
|
||||
#Get list of branches in the repository."""
|
||||
result = subprocess.run(['git', 'branch', '-r'], capture_output=True, text=True)
|
||||
return [line.strip() for line in result.stdout.splitlines()]
|
||||
|
||||
def parse_version(version_str):
|
||||
#Parse version string and return a tuple of integers (major, minor, patch).
|
||||
try:
|
||||
return tuple(map(int, version_str.lstrip('v').split('.')))
|
||||
except ValueError:
|
||||
return (0, 0, 0) # Default for non-parsable versions
|
||||
|
||||
def get_max_version_branch(branches):
|
||||
#Find the branch with the highest version.
|
||||
max_branch = None
|
||||
max_version = (0, 0, 0)
|
||||
|
||||
for branch in branches:
|
||||
parts = branch.split('/')
|
||||
if len(parts) >= 2 and (parts[1] == 'hotfix' or parts[1] == 'release'):
|
||||
version = parse_version(parts[2])
|
||||
if version > max_version:
|
||||
max_version = version
|
||||
max_branch = parts
|
||||
|
||||
return max_branch
|
||||
|
||||
if __name__ == "__main__":
|
||||
fetch_branches() # Fetch branches without tags
|
||||
branches = get_branches()
|
||||
max_version_branch = get_max_version_branch(branches)
|
||||
if max_version_branch:
|
||||
print('/'.join(max_version_branch[1:])) # Print only the branch name without origin
|
||||
16
scripts/sdkjs_common/jsdoc/office-api/config/cell.json
Normal file
16
scripts/sdkjs_common/jsdoc/office-api/config/cell.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js", "../../../../../sdkjs/cell/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
231
scripts/sdkjs_common/jsdoc/office-api/config/correct_doclets.js
Normal file
231
scripts/sdkjs_common/jsdoc/office-api/config/correct_doclets.js
Normal file
@ -0,0 +1,231 @@
|
||||
exports.handlers = {
|
||||
processingComplete: function(e) {
|
||||
// array for filtered doclets
|
||||
let filteredDoclets = [];
|
||||
|
||||
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
|
||||
|
||||
const classesDocletsMap = {}; // doclets for classes write at the end
|
||||
let passedClasses = []; // passed classes for current editor
|
||||
|
||||
// Remove dublicates doclets
|
||||
const latestDoclets = {};
|
||||
e.doclets.forEach(doclet => {
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
|
||||
latestDoclets[doclet.longname] = doclet;
|
||||
}
|
||||
});
|
||||
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
|
||||
|
||||
// check available classess for current editor
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
const shouldAdd =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' &&
|
||||
(!isMethod || hasTypeofEditorsTag);
|
||||
|
||||
if (shouldAdd) {
|
||||
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
passedClasses.push(cleanName(doclet.memberof));
|
||||
}
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
classesDocletsMap[cleanName(doclet.name)] = doclet;
|
||||
}
|
||||
}
|
||||
|
||||
// remove unavailave classes in current editor
|
||||
passedClasses = passedClasses.filter(className => {
|
||||
const doclet = classesDocletsMap[className];
|
||||
if (!doclet) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
|
||||
|
||||
// class is passes if there is no editor tag or the current editor is among the tags
|
||||
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
|
||||
return isPassed;
|
||||
});
|
||||
|
||||
for (let i = 0; i < e.doclets.length; i++) {
|
||||
const doclet = e.doclets[i];
|
||||
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
|
||||
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
|
||||
|
||||
let shouldAddMethod =
|
||||
doclet.kind !== 'member' &&
|
||||
(!doclet.longname || doclet.longname.search('private') === -1) &&
|
||||
doclet.scope !== 'inner' && hasTypeofEditorsTag;
|
||||
|
||||
// class names may be the same between editors, we check against the inheritance tree
|
||||
if (doclet.inherits) {
|
||||
const parentClass = doclet.inherits.split('#')[0];
|
||||
const curClass = cleanName(doclet.memberof);
|
||||
|
||||
if (!classesDocletsMap[curClass].augments || !classesDocletsMap[curClass].augments.includes(parentClass)) {
|
||||
shouldAddMethod = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldAddMethod) {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We leave only the necessary fields
|
||||
doclet.memberof = cleanName(doclet.memberof);
|
||||
doclet.longname = cleanName(doclet.longname);
|
||||
doclet.name = cleanName(doclet.name);
|
||||
|
||||
// skip inherited methods if ovveriden in child class
|
||||
if (doclet.inherited && filteredDoclets.find((addedDoclet) => addedDoclet['name'] == doclet['name'] && addedDoclet['memberof'] == doclet['memberof'])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
memberof: cleanName(doclet.memberof),
|
||||
|
||||
params: doclet.params ? doclet.params.map(param => ({
|
||||
type: param.type ? {
|
||||
names: param.type.names,
|
||||
parsedType: param.type.parsedType
|
||||
} : param.type,
|
||||
|
||||
name: param.name,
|
||||
description: param.description,
|
||||
optional: param.optional,
|
||||
defaultvalue: param.defaultvalue
|
||||
})) : doclet.params,
|
||||
|
||||
returns: doclet.returns ? doclet.returns.map(returnObj => ({
|
||||
type: {
|
||||
names: returnObj.type.names,
|
||||
parsedType: returnObj.type.parsedType
|
||||
}
|
||||
})) : doclet.returns,
|
||||
|
||||
name: doclet.name,
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: doclet.scope,
|
||||
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
see: doclet.see
|
||||
};
|
||||
|
||||
// Add the filtered doclet to the array
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'class') {
|
||||
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
|
||||
if (false == passedClasses.includes(cleanName(doclet.name))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
augments: doclet.augments || undefined,
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
see: doclet.see || undefined
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
else if (doclet.kind == 'typedef') {
|
||||
const filteredDoclet = {
|
||||
comment: doclet.comment,
|
||||
description: doclet.description,
|
||||
name: cleanName(doclet.name),
|
||||
longname: cleanName(doclet.longname),
|
||||
kind: doclet.kind,
|
||||
scope: "global",
|
||||
|
||||
meta: doclet.meta ? {
|
||||
lineno: doclet.meta.lineno,
|
||||
columnno: doclet.meta.columnno
|
||||
} : doclet.meta,
|
||||
|
||||
properties: doclet.properties ? doclet.properties.map(property => ({
|
||||
type: property.type ? {
|
||||
names: property.type.names,
|
||||
parsedType: property.type.parsedType
|
||||
} : property.type,
|
||||
|
||||
name: property.name,
|
||||
description: property.description,
|
||||
optional: property.optional,
|
||||
defaultvalue: property.defaultvalue
|
||||
})) : doclet.properties,
|
||||
|
||||
see: doclet.see,
|
||||
type: doclet.type ? {
|
||||
names: doclet.type.names,
|
||||
parsedType: doclet.type.parsedType
|
||||
} : doclet.type
|
||||
};
|
||||
|
||||
filteredDoclets.push(filteredDoclet);
|
||||
}
|
||||
}
|
||||
|
||||
// Replace doclets with a filtered array
|
||||
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
|
||||
}
|
||||
};
|
||||
16
scripts/sdkjs_common/jsdoc/office-api/config/forms.json
Normal file
16
scripts/sdkjs_common/jsdoc/office-api/config/forms.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs-forms/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/office-api/config/pdf.json
Normal file
16
scripts/sdkjs_common/jsdoc/office-api/config/pdf.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/pdf/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/office-api/config/slide.json
Normal file
16
scripts/sdkjs_common/jsdoc/office-api/config/slide.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
16
scripts/sdkjs_common/jsdoc/office-api/config/word.json
Normal file
16
scripts/sdkjs_common/jsdoc/office-api/config/word.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"source": {
|
||||
"include": ["../../../../../sdkjs/word/apiBuilder.js"]
|
||||
},
|
||||
"plugins": ["./correct_doclets.js"],
|
||||
"opts": {
|
||||
"destination": "./out",
|
||||
"recurse": true,
|
||||
"encoding": "utf8"
|
||||
},
|
||||
"templates": {
|
||||
"json": {
|
||||
"pretty": true
|
||||
}
|
||||
}
|
||||
}
|
||||
112
scripts/sdkjs_common/jsdoc/office-api/generate_docs_json.py
Normal file
112
scripts/sdkjs_common/jsdoc/office-api/generate_docs_json.py
Normal file
@ -0,0 +1,112 @@
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
import argparse
|
||||
import re
|
||||
import platform
|
||||
|
||||
script_path = os.path.abspath(__file__)
|
||||
root = os.path.abspath(os.path.join(os.path.dirname(script_path), '../../../../..'))
|
||||
|
||||
# Configuration files
|
||||
configs = [
|
||||
"./config/word.json",
|
||||
"./config/cell.json",
|
||||
"./config/slide.json",
|
||||
"./config/forms.json",
|
||||
"./config/pdf.json"
|
||||
]
|
||||
|
||||
editors_maps = {
|
||||
"word": "CDE",
|
||||
"cell": "CSE",
|
||||
"slide": "CPE",
|
||||
"forms": "CFE",
|
||||
"pdf": "PDFE"
|
||||
}
|
||||
|
||||
def generate(output_dir, md=False):
|
||||
os.chdir(os.path.dirname(script_path))
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# Generate JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
command_set_env = "export"
|
||||
if (platform.system().lower() == "windows"):
|
||||
command_set_env = "set"
|
||||
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
|
||||
print(f"Generating {editor_name}.json: {command}")
|
||||
subprocess.run(command, shell=True)
|
||||
|
||||
# Append examples to JSON documentation
|
||||
for config in configs:
|
||||
editor_name = config.split('/')[-1].replace('.json', '')
|
||||
output_file = os.path.join(output_dir, editor_name + ".json")
|
||||
|
||||
# Read the JSON file
|
||||
with open(output_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Modify JSON data
|
||||
for doclet in data:
|
||||
if 'see' in doclet:
|
||||
if doclet['see'] is not None:
|
||||
if editor_name == 'forms':
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
|
||||
else:
|
||||
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
|
||||
|
||||
file_path = f'{root}/' + doclet['see'][0]
|
||||
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as see_file:
|
||||
example_content = see_file.read()
|
||||
|
||||
# Extract the first line as a comment if it exists
|
||||
lines = example_content.split('\n')
|
||||
if lines[0].startswith('//'):
|
||||
comment = lines[0] + '\n'
|
||||
code_content = '\n'.join(lines[1:])
|
||||
else:
|
||||
comment = ''
|
||||
code_content = example_content
|
||||
|
||||
if md == True:
|
||||
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
|
||||
|
||||
if md == False:
|
||||
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name}"}}\n{code_content}\n```'
|
||||
|
||||
# Write the modified JSON file back
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
print("Documentation generation for builder completed.")
|
||||
|
||||
def remove_builder_lines(text):
|
||||
lines = text.splitlines() # Split text into lines
|
||||
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
|
||||
return "\n".join(filtered_lines)
|
||||
|
||||
def remove_js_comments(text):
|
||||
# Remove single-line comments, leaving text after //
|
||||
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
|
||||
# Remove multi-line comments, leaving text after /*
|
||||
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
|
||||
return text.strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/office-js-api-declarations/office-js-api"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
generate(args.destination)
|
||||
669
scripts/sdkjs_common/jsdoc/office-api/generate_docs_md.py
Normal file
669
scripts/sdkjs_common/jsdoc/office-api/generate_docs_md.py
Normal file
@ -0,0 +1,669 @@
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import generate_docs_json
|
||||
import json
|
||||
from pathlib import PurePosixPath
|
||||
|
||||
# Configuration files
|
||||
editors = {
|
||||
"word": "text-document-api",
|
||||
"cell": "spreadsheet-api",
|
||||
"slide": "presentation-api",
|
||||
"forms": "form-api",
|
||||
"pdf": "pdf-api",
|
||||
}
|
||||
|
||||
|
||||
script_path = os.path.abspath(__file__)
|
||||
root = os.path.abspath(os.path.join(os.path.dirname(script_path), '../../../../..'))
|
||||
|
||||
missing_examples = []
|
||||
used_enumerations = set()
|
||||
translations = {}
|
||||
translations_lang = None
|
||||
missed_translations = {}
|
||||
used_translations_keys = {}
|
||||
global_output_dir = ""
|
||||
cur_editor_name = None
|
||||
|
||||
def find_common_path_part(path_full: str, path_suffix: str, anchor: str) -> str:
|
||||
path_full = path_full.replace('\\', '/')
|
||||
path_suffix = path_suffix.replace('\\', '/')
|
||||
|
||||
parts1 = PurePosixPath(path_full).parts
|
||||
parts2 = PurePosixPath(path_suffix).parts
|
||||
|
||||
try:
|
||||
idx1 = [p.lower() for p in parts1].index(anchor.lower())
|
||||
idx2 = [p.lower() for p in parts2].index(anchor.lower())
|
||||
except ValueError:
|
||||
return ""
|
||||
|
||||
common_segments = []
|
||||
|
||||
for p1, p2 in zip(parts1[idx1:], parts2[idx2:]):
|
||||
if p1.lower() == p2.lower():
|
||||
common_segments.append(p1)
|
||||
else:
|
||||
break
|
||||
|
||||
return "/".join(common_segments)
|
||||
|
||||
def load_json(file_path):
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
|
||||
def write_markdown_file(file_path, content):
|
||||
with open(file_path, 'w', encoding='utf-8') as md_file:
|
||||
md_file.write(content)
|
||||
|
||||
def remove_js_comments(text):
|
||||
text = re.sub(r'^\s*//.*$', '', text, flags=re.MULTILINE) # single-line
|
||||
text = re.sub(r'/\*.*?\*/', '', text, flags=re.DOTALL) # multi-line
|
||||
return text.strip()
|
||||
|
||||
def get_translation(key):
|
||||
def process_part(k):
|
||||
if k not in translations:
|
||||
missed_translations[k] = k
|
||||
else:
|
||||
used_translations_keys[k] = True
|
||||
return translations.get(k, k)
|
||||
|
||||
if '\\\n' in key:
|
||||
parts = key.split('\\\n')
|
||||
translated_parts = [process_part(p) for p in parts]
|
||||
return '\\\n'.join(translated_parts)
|
||||
|
||||
return process_part(key)
|
||||
|
||||
def process_link_tags(text, root=''):
|
||||
"""
|
||||
Finds patterns like {@link ...} and replaces them with Markdown links.
|
||||
If the prefix 'global#' is found, a link to a typedef is generated,
|
||||
otherwise, a link to a class method is created.
|
||||
For a method, if an alias is not specified, the name is left in the format 'Class#Method'.
|
||||
"""
|
||||
|
||||
def replace_link(match):
|
||||
content = match.group(1).strip() # Example: "global#ShapeType shape type" or "global#ErrorValue ErrorValue
|
||||
parts = content.split()
|
||||
ref = parts[0]
|
||||
label = parts[1] if len(parts) > 1 else None
|
||||
|
||||
if ref.startswith('/docs/'):
|
||||
url = root + '../../../..' + ref
|
||||
display_text = label if label else ref
|
||||
|
||||
if url.endswith('/'):
|
||||
last_dir = url.rstrip('/').split('/')[-1]
|
||||
url = f"{url}{last_dir}"
|
||||
|
||||
return f"[{display_text}]({url}.md)"
|
||||
elif ref.startswith("global#"):
|
||||
# Handle links to typedef (similar logic as before)
|
||||
typedef_name = ref.split("#")[1]
|
||||
used_enumerations.add(typedef_name)
|
||||
display_text = label if label else typedef_name
|
||||
return f"[{display_text}]({root}Enumeration/{typedef_name}.md)"
|
||||
else:
|
||||
# Handle links to class methods like ClassName#MethodName
|
||||
try:
|
||||
class_name, method_name = ref.split("#")
|
||||
except ValueError:
|
||||
return match.group(0)
|
||||
display_text = label if label else ref # Keep the full notation, e.g., "Api#CreateSlide"
|
||||
return f"[{display_text}]({root}{class_name}/Methods/{method_name}.md)"
|
||||
|
||||
return re.sub(r'{@link\s+([^}]+)}', replace_link, text)
|
||||
|
||||
def correct_description(string, root='', isInTable=False):
|
||||
"""
|
||||
Cleans or transforms specific tags in the doclet description:
|
||||
- <b> => ** (bold text)
|
||||
- <note>...</note> => 💡 ...
|
||||
- {@link ...} is replaced with a Markdown link
|
||||
- If the description is missing, returns a default value.
|
||||
- All '\r' characters are replaced with '\n'.
|
||||
"""
|
||||
if string is None:
|
||||
return get_translation('No description provided.')
|
||||
|
||||
if False == isInTable:
|
||||
# Line breaks
|
||||
string = string.replace('\r', '\\\n')
|
||||
# Replace <b> tags with Markdown bold formatting
|
||||
string = re.sub(r'<b>', '-**', string)
|
||||
else:
|
||||
string = re.sub(r'<b>', '**', string)
|
||||
string = remove_line_breaks(string)
|
||||
|
||||
string = re.sub(r'</b>', '**', string)
|
||||
|
||||
# Replace <note> tags with an icon and text
|
||||
string = re.sub(r'<note>(.*?)</note>', r'💡 \1', string, flags=re.DOTALL)
|
||||
|
||||
# Process {@link ...} constructions
|
||||
string = process_link_tags(string, root)
|
||||
|
||||
return get_translation(string)
|
||||
|
||||
def correct_default_value(value, enumerations, classes):
|
||||
if value is None or value == '':
|
||||
return ''
|
||||
|
||||
if isinstance(value, bool):
|
||||
value = "true" if value else "false"
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return generate_data_types_markdown([value], enumerations, classes)
|
||||
|
||||
def remove_line_breaks(string):
|
||||
return re.sub(r'[\r\n]+', ' ', string)
|
||||
|
||||
# Convert Array.<T> => T[] (including nested arrays).
|
||||
def convert_jsdoc_array_to_ts(type_str: str) -> str:
|
||||
"""
|
||||
Recursively replaces 'Array.<T>' with 'T[]',
|
||||
handling nested arrays like 'Array.<Array.<string>>' => 'string[][]'.
|
||||
"""
|
||||
pattern = re.compile(r'Array\.<([^>]+)>')
|
||||
|
||||
while True:
|
||||
match = pattern.search(type_str)
|
||||
if not match:
|
||||
break
|
||||
|
||||
inner_type = match.group(1).strip()
|
||||
# Recursively convert inner parts
|
||||
inner_type = convert_jsdoc_array_to_ts(inner_type)
|
||||
|
||||
# Replace the outer Array.<...> with ...[]
|
||||
type_str = (
|
||||
type_str[:match.start()]
|
||||
+ f"{inner_type}[]"
|
||||
+ type_str[match.end():]
|
||||
)
|
||||
|
||||
return type_str
|
||||
|
||||
def escape_text_outside_code_blocks(markdown: str) -> str:
|
||||
"""
|
||||
Splits content by fenced code blocks, escapes MDX-unsafe characters
|
||||
(<, >, {, }) only in the text outside those code blocks.
|
||||
"""
|
||||
# A regex to capture fenced code blocks with ```
|
||||
parts = re.split(r'(```.*?```)', markdown, flags=re.DOTALL)
|
||||
|
||||
# Even indices (0, 2, 4, ...) are outside code blocks,
|
||||
# odd indices (1, 3, 5, ...) are actual code blocks.
|
||||
for i in range(0, len(parts), 2):
|
||||
text = (parts[i]
|
||||
.replace('<', '<')
|
||||
.replace('>', '>')
|
||||
.replace('{', '{')
|
||||
.replace('}', '}'))
|
||||
parts[i] = escape_brackets_in_quotes(text)
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
def escape_brackets_in_quotes(text: str) -> str:
|
||||
return re.sub(
|
||||
r"(['\"])(.*?)(?<!\\)\1",
|
||||
lambda m: m.group(1)
|
||||
+ m.group(2).replace('[', r'\[').replace(']', r'\]')
|
||||
+ m.group(1),
|
||||
text
|
||||
)
|
||||
|
||||
def get_base_type(ts_type: str) -> str:
|
||||
"""
|
||||
Given a TypeScript-like type (e.g. "Drawing[][]"), return the
|
||||
'base' portion by stripping trailing "[]". For "Drawing[][]",
|
||||
returns "Drawing". For "Array.<Drawing>", you'd convert it first
|
||||
to "Drawing[]" then return "Drawing".
|
||||
"""
|
||||
while ts_type.endswith('[]'):
|
||||
ts_type = ts_type[:-2]
|
||||
return ts_type
|
||||
|
||||
def generate_data_types_markdown(types, enumerations, classes, root='../../'):
|
||||
"""
|
||||
1) Converts each type from JSDoc (e.g., Array.<T>) to T[].
|
||||
2) Processes union types by splitting them using '|'.
|
||||
3) Supports multidimensional arrays, e.g., (string|ApiRange|number)[].
|
||||
4) If the base type matches the name of an enumeration or class, generates a link.
|
||||
5) The final types are joined using " | ".
|
||||
"""
|
||||
# Convert each type from JSDoc format to TypeScript format (e.g., T[])
|
||||
converted = [convert_jsdoc_array_to_ts(t) for t in types]
|
||||
|
||||
# Set of primitive types
|
||||
primitive_types = {"string", "number", "boolean", "null", "undefined", "any", "object", "false", "true", "json", "function", "date", "{}"}
|
||||
|
||||
def is_primitive(type):
|
||||
if (type.lower() in primitive_types or
|
||||
(type.startswith('"') and type.endswith('"')) or
|
||||
(type.startswith("'") and type.endswith("'")) or
|
||||
type.replace('.', '', 1).isdigit() or
|
||||
(type.startswith('-') and type[1:].replace('.', '', 1).isdigit())):
|
||||
return True
|
||||
return False
|
||||
|
||||
def link_if_known(ts_type):
|
||||
ts_type = ts_type.strip()
|
||||
# Count the number of array dimensions, e.g., "[][]" has 2 dimensions
|
||||
array_dims = 0
|
||||
while ts_type.endswith("[]"):
|
||||
array_dims += 1
|
||||
ts_type = ts_type[:-2].strip()
|
||||
|
||||
# Process generic types, e.g., Object.<string, editorType>
|
||||
if ".<" in ts_type and ts_type.endswith(">"):
|
||||
import re
|
||||
m = re.match(r'^(.*?)\.<(.*)>$', ts_type)
|
||||
if m:
|
||||
base_part = m.group(1).strip()
|
||||
generic_args_str = m.group(2).strip()
|
||||
# Process the base part of the type
|
||||
found = False
|
||||
for enum in enumerations:
|
||||
if enum['name'] == base_part:
|
||||
used_enumerations.add(base_part)
|
||||
base_result = f"[{base_part}]({root}Enumeration/{base_part}.md)"
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
if base_part in classes:
|
||||
base_result = f"[{base_part}]({root}{base_part}/{base_part}.md)"
|
||||
elif is_primitive(base_part):
|
||||
base_result = base_part
|
||||
elif cur_editor_name == "forms":
|
||||
base_result = f"[{base_part}]({root}../text-document-api/{base_part}/{base_part}.md)"
|
||||
else:
|
||||
print(f"Unknown type encountered: {base_part}")
|
||||
base_result = base_part
|
||||
# Split the generic parameters by commas and process each recursively
|
||||
generic_args = [link_if_known(x) for x in generic_args_str.split(",")]
|
||||
result = base_result + ".<" + ", ".join(generic_args) + ">"
|
||||
result += "[]" * array_dims
|
||||
return result
|
||||
|
||||
# Process union types: if the type is enclosed in parentheses
|
||||
if ts_type.startswith("(") and ts_type.endswith(")"):
|
||||
inner = ts_type[1:-1].strip()
|
||||
subtypes = [sub.strip() for sub in inner.split("|")]
|
||||
if len(subtypes) == 1:
|
||||
result = link_if_known(subtypes[0])
|
||||
else:
|
||||
processed = [link_if_known(subtype) for subtype in subtypes]
|
||||
result = "(" + " | ".join(processed) + ")"
|
||||
result += "[]" * array_dims
|
||||
return result
|
||||
|
||||
# If not a generic or union type – process the base type
|
||||
else:
|
||||
base = ts_type
|
||||
found = False
|
||||
for enum in enumerations:
|
||||
if enum['name'] == base:
|
||||
used_enumerations.add(base)
|
||||
result = f"[{base}]({root}Enumeration/{base}.md)"
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
if base in classes:
|
||||
result = f"[{base}]({root}{base}/{base}.md)"
|
||||
elif is_primitive(base):
|
||||
result = base
|
||||
elif cur_editor_name == "forms":
|
||||
result = f"[{base}]({root}../text-document-api/{base}/{base}.md)"
|
||||
else:
|
||||
print(f"Unknown type encountered: {base}")
|
||||
result = base
|
||||
result += "[]" * array_dims
|
||||
return result
|
||||
|
||||
# Apply link_if_known to each converted type
|
||||
linked = [link_if_known(ts_t) for ts_t in converted]
|
||||
|
||||
# Join results using " | "
|
||||
param_types_md = r' | '.join(linked)
|
||||
param_types_md = param_types_md.replace("|", r"\|")
|
||||
|
||||
# Escape remaining angle brackets for generics
|
||||
def replace_leftover_generics(match):
|
||||
element = match.group(1).strip()
|
||||
return f"<{element}>"
|
||||
|
||||
param_types_md = re.sub(r'<([^<>]+)>', replace_leftover_generics, param_types_md)
|
||||
|
||||
return param_types_md
|
||||
|
||||
|
||||
def generate_class_markdown(class_name, methods, properties, enumerations, classes):
|
||||
content = f"# {class_name}\n\n{get_translation(f"Represents the {class_name} class.")}\n\n"
|
||||
|
||||
content += generate_properties_markdown(properties, enumerations, classes)
|
||||
|
||||
content += f"\n## {get_translation(f"Methods")}\n\n"
|
||||
content += f"| {get_translation(f"Method")} | {get_translation(f"Returns")} | {get_translation(f"Description")} |\n"
|
||||
content += "| ------ | ------- | ----------- |\n"
|
||||
|
||||
for method in sorted(methods, key=lambda m: m['name']):
|
||||
method_name = method['name']
|
||||
|
||||
# Get the type of return values
|
||||
returns = method.get('returns', [])
|
||||
if returns:
|
||||
return_type_list = returns[0].get('type', {}).get('names', [])
|
||||
returns_markdown = generate_data_types_markdown(return_type_list, enumerations, classes, '../')
|
||||
else:
|
||||
returns_markdown = get_translation(f"None")
|
||||
|
||||
# Processing the method description
|
||||
description = correct_description(method.get('description', 'No description provided.'), '../', True)
|
||||
|
||||
# Form a link to the method document
|
||||
method_link = f"[{method_name}](./Methods/{method_name}.md)"
|
||||
|
||||
content += f"| {method_link} | {returns_markdown} | {description} |\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_method_markdown(method, enumerations, classes, example_editor_name):
|
||||
method_name = method['name']
|
||||
description = method.get('description', 'No description provided.')
|
||||
description = correct_description(description, '../../')
|
||||
params = method.get('params', [])
|
||||
returns = method.get('returns', [])
|
||||
example = method.get('example', '')
|
||||
memberof = method.get('memberof', '')
|
||||
|
||||
content = f"# {method_name}\n\n{description}\n\n"
|
||||
|
||||
# Syntax
|
||||
param_list = ', '.join([param['name'] for param in params if '.' not in param['name']]) if params else ''
|
||||
content += f"## {get_translation(f"Syntax")}\n\n```javascript\nexpression.{method_name}({param_list});\n```\n\n"
|
||||
if memberof:
|
||||
content += f"`expression` - {get_translation(f"A variable that represents a [{memberof}](../{memberof}.md) class.")}\n\n"
|
||||
|
||||
# Parameters
|
||||
content += f"## {get_translation(f"Parameters")}\n\n"
|
||||
if params:
|
||||
content += f"| **{get_translation(f"Name")}** | **{get_translation(f"Required/Optional")}** | **{get_translation(f"Data type")}** | **{get_translation(f"Default")}** | **{get_translation(f"Description")}** |\n"
|
||||
content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n"
|
||||
for param in params:
|
||||
param_name = param.get('name', 'Unnamed')
|
||||
param_types = param.get('type', {}).get('names', []) if param.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(param_types, enumerations, classes)
|
||||
param_desc = correct_description(param.get('description', 'No description provided.'), '../../', True)
|
||||
param_required = f"{get_translation(f"Required")}" if not param.get('optional') else f"{get_translation(f"Optional")}"
|
||||
param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes)
|
||||
|
||||
content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n"
|
||||
else:
|
||||
content += f"{get_translation("This method doesn't have any parameters.")}\n"
|
||||
|
||||
# Returns
|
||||
content += f"\n## {get_translation(f"Returns")}\n\n"
|
||||
if returns:
|
||||
return_type_list = returns[0].get('type', {}).get('names', [])
|
||||
return_type_md = generate_data_types_markdown(return_type_list, enumerations, classes)
|
||||
content += return_type_md
|
||||
else:
|
||||
content += get_translation(f"This method doesn't return any data.")
|
||||
|
||||
# Example
|
||||
if example:
|
||||
# Separate comment and code, remove JS comments
|
||||
if '```js' in example:
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = get_translation(comment.strip())
|
||||
content += f"\n\n## {get_translation(f"Example")}\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n"
|
||||
else:
|
||||
# If there's no triple-backtick structure, just show it as code
|
||||
cleaned_example = remove_js_comments(example)
|
||||
content += f"\n\n## {get_translation(f"Example")}\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_properties_markdown(properties, enumerations, classes, root='../'):
|
||||
if properties is None:
|
||||
return ''
|
||||
|
||||
content = f"## {get_translation(f"Properties")}\n\n"
|
||||
content += f"| {get_translation(f"Name")} | {get_translation(f"Type")} | {get_translation(f"Description")} |\n"
|
||||
content += "| ---- | ---- | ----------- |\n"
|
||||
|
||||
for prop in sorted(properties, key=lambda m: m['name']):
|
||||
prop_name = prop['name']
|
||||
prop_description = prop.get('description', 'No description provided.')
|
||||
prop_description = correct_description(prop_description, root, True)
|
||||
prop_types = prop['type']['names'] if prop.get('type') else []
|
||||
param_types_md = generate_data_types_markdown(prop_types, enumerations, classes, root)
|
||||
content += f"| {prop_name} | {param_types_md} | {prop_description} |\n"
|
||||
|
||||
# Escape outside code blocks
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def generate_enumeration_markdown(enumeration, enumerations, classes, example_editor_name):
|
||||
enum_name = enumeration['name']
|
||||
|
||||
if enum_name not in used_enumerations:
|
||||
return None
|
||||
|
||||
description = enumeration.get('description', 'No description provided.')
|
||||
description = correct_description(description, '../')
|
||||
example = enumeration.get('example', '')
|
||||
|
||||
content = f"# {enum_name}\n\n{description}\n\n"
|
||||
|
||||
ptype = enumeration['type']['parsedType']
|
||||
if ptype['type'] == 'TypeUnion':
|
||||
enum_empty = True # is empty enum
|
||||
|
||||
content += f"## {get_translation(f"Type")}\n\n{get_translation(f"Enumeration")}\n\n"
|
||||
content += f"## {get_translation(f"Values")}\n\n"
|
||||
# Each top-level name in the union
|
||||
for raw_t in enumeration['type']['names']:
|
||||
ts_t = convert_jsdoc_array_to_ts(raw_t)
|
||||
|
||||
# Attempt linking: we compare the raw type to enumerations/classes
|
||||
if any(enum['name'] == raw_t for enum in enumerations):
|
||||
used_enumerations.add(raw_t)
|
||||
content += f"- [{ts_t}](../Enumeration/{raw_t}.md)\n"
|
||||
enum_empty = False
|
||||
elif raw_t in classes:
|
||||
content += f"- [{ts_t}](../{raw_t}/{raw_t}.md)\n"
|
||||
enum_empty = False
|
||||
elif ts_t.find('Api') == -1:
|
||||
content += f"- {ts_t}\n"
|
||||
enum_empty = False
|
||||
|
||||
if enum_empty == True:
|
||||
return None
|
||||
elif enumeration['properties'] is not None:
|
||||
content += f"## {get_translation(f"Type")}\n\n{get_translation(f"Object")}\n\n"
|
||||
content += generate_properties_markdown(enumeration['properties'], enumerations, classes)
|
||||
else:
|
||||
content += f"## {get_translation(f"Type")}\n\n"
|
||||
# If it's not a union and has no properties, simply print the type(s).
|
||||
types = enumeration['type']['names']
|
||||
t_md = generate_data_types_markdown(types, enumerations, classes, '../')
|
||||
content += t_md + "\n\n"
|
||||
|
||||
# Example
|
||||
if example:
|
||||
if '```js' in example:
|
||||
comment, code = example.split('```js', 1)
|
||||
comment = get_translation(comment.strip())
|
||||
content += f"\n\n## {get_translation(f"Example")}\n\n{comment}\n\n```javascript {example_editor_name}\n{code.strip()}\n"
|
||||
else:
|
||||
# If there's no triple-backtick structure
|
||||
cleaned_example = remove_js_comments(example)
|
||||
content += f"\n\n## {get_translation(f"Example")}\n\n```javascript {example_editor_name}\n{cleaned_example}\n```\n"
|
||||
|
||||
return escape_text_outside_code_blocks(content)
|
||||
|
||||
def process_doclets(data, output_dir, editor_name):
|
||||
global cur_editor_name
|
||||
cur_editor_name = editor_name
|
||||
|
||||
classes = {}
|
||||
classes_props = {}
|
||||
enumerations = []
|
||||
editor_dir = os.path.join(output_dir, editors[editor_name])
|
||||
example_editor_name = 'editor-'
|
||||
|
||||
if editor_name == 'word':
|
||||
example_editor_name += 'docx'
|
||||
elif editor_name == 'forms':
|
||||
example_editor_name += 'forms'
|
||||
elif editor_name == 'slide':
|
||||
example_editor_name += 'pptx'
|
||||
elif editor_name == 'cell':
|
||||
example_editor_name += 'xlsx'
|
||||
elif editor_name == 'pdf':
|
||||
example_editor_name += 'pdf'
|
||||
|
||||
for doclet in data:
|
||||
if doclet['kind'] == 'class':
|
||||
class_name = doclet['name']
|
||||
if class_name:
|
||||
if class_name not in classes:
|
||||
classes[class_name] = []
|
||||
classes_props[class_name] = doclet.get('properties', None)
|
||||
elif doclet['kind'] == 'function':
|
||||
class_name = doclet.get('memberof')
|
||||
if class_name:
|
||||
if class_name not in classes:
|
||||
classes[class_name] = []
|
||||
classes[class_name].append(doclet)
|
||||
elif doclet['kind'] == 'typedef':
|
||||
enumerations.append(doclet)
|
||||
|
||||
# Process classes
|
||||
for class_name, methods in classes.items():
|
||||
if (len(methods) == 0):
|
||||
continue
|
||||
|
||||
class_dir = os.path.join(editor_dir, class_name)
|
||||
methods_dir = os.path.join(class_dir, 'Methods')
|
||||
os.makedirs(methods_dir, exist_ok=True)
|
||||
|
||||
# Write class file
|
||||
class_content = generate_class_markdown(
|
||||
class_name,
|
||||
methods,
|
||||
classes_props[class_name],
|
||||
enumerations,
|
||||
classes
|
||||
)
|
||||
write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content)
|
||||
|
||||
# Write method files
|
||||
for method in methods:
|
||||
method_file_path = os.path.join(methods_dir, f"{method['name']}.md")
|
||||
method_content = generate_method_markdown(method, enumerations, classes, example_editor_name)
|
||||
write_markdown_file(method_file_path, method_content)
|
||||
|
||||
if not method.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(method_file_path, output_dir))
|
||||
|
||||
# Process enumerations
|
||||
enum_dir = os.path.join(editor_dir, 'Enumeration')
|
||||
os.makedirs(enum_dir, exist_ok=True)
|
||||
|
||||
# idle run
|
||||
prev_used_count = -1
|
||||
while len(used_enumerations) != prev_used_count:
|
||||
prev_used_count = len(used_enumerations)
|
||||
for enum in [e for e in enumerations if e['name'] in used_enumerations]:
|
||||
enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name)
|
||||
|
||||
for enum in enumerations:
|
||||
enum_file_path = os.path.join(enum_dir, f"{enum['name']}.md")
|
||||
enum_content = generate_enumeration_markdown(enum, enumerations, classes, example_editor_name)
|
||||
if enum_content is None:
|
||||
continue
|
||||
|
||||
write_markdown_file(enum_file_path, enum_content)
|
||||
if not enum.get('example', ''):
|
||||
missing_examples.append(os.path.relpath(enum_file_path, output_dir))
|
||||
|
||||
def generate(output_dir, translations_file):
|
||||
global translations
|
||||
global translations_lang
|
||||
global global_output_dir
|
||||
global_output_dir = output_dir
|
||||
|
||||
if translations_file is not None and os.path.exists(translations_file):
|
||||
translations = load_json(translations_file)
|
||||
translations_lang = os.path.splitext(os.path.basename(translations_file))[0]
|
||||
else:
|
||||
translations = {}
|
||||
|
||||
os.chdir(os.path.dirname(script_path))
|
||||
|
||||
print('Generating Markdown documentation...')
|
||||
|
||||
generate_docs_json.generate(output_dir + 'tmp_json', md=True)
|
||||
for editor_name, folder_name in editors.items():
|
||||
input_file = os.path.join(output_dir + '/tmp_json', editor_name + ".json")
|
||||
|
||||
editor_folder_path = os.path.join(output_dir, folder_name)
|
||||
for folder_name in os.listdir(editor_folder_path):
|
||||
folder_path_to_del = os.path.join(editor_folder_path, folder_name)
|
||||
if os.path.isdir(folder_path_to_del):
|
||||
shutil.rmtree(folder_path_to_del, ignore_errors=True)
|
||||
|
||||
data = load_json(input_file)
|
||||
used_enumerations.clear()
|
||||
process_doclets(data, output_dir, editor_name)
|
||||
|
||||
if translations_file is not None:
|
||||
target_dir = os.path.dirname(translations_file)
|
||||
|
||||
missed_file_path = os.path.join(target_dir, "missed_translations.json")
|
||||
print(f'Saving missed translations to: {missed_file_path}')
|
||||
with open(missed_file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(missed_translations, f, ensure_ascii=False, indent=4)
|
||||
|
||||
unused_keys = set(translations.keys()) - set(used_translations_keys.keys())
|
||||
unused_data = {k: translations[k] for k in unused_keys}
|
||||
unused_file_path = os.path.join(target_dir, "unused_translations.json")
|
||||
print(f'Saving unused translations to: {unused_file_path}')
|
||||
with open(unused_file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(unused_data, f, ensure_ascii=False, indent=4)
|
||||
|
||||
shutil.rmtree(output_dir + 'tmp_json')
|
||||
print('Done')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Generate documentation")
|
||||
parser.add_argument(
|
||||
"destination",
|
||||
type=str,
|
||||
help="Destination directory for the generated documentation",
|
||||
nargs='?', # Indicates the argument is optional
|
||||
default=f"{root}/api.onlyoffice.com/site/docs/office-api/usage-api/" # Default value
|
||||
)
|
||||
parser.add_argument(
|
||||
"--translations",
|
||||
type=str,
|
||||
help="Path to the JSON file with translations",
|
||||
nargs='?',
|
||||
default=None
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
generate(args.destination, args.translations)
|
||||
print("START_MISSING_EXAMPLES")
|
||||
print(",".join(missing_examples))
|
||||
print("END_MISSING_EXAMPLES")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user