mirror of
https://github.com/ONLYOFFICE/build_tools.git
synced 2026-04-07 14:06:31 +08:00
Compare commits
763 Commits
v6.0.2.2
...
feature/do
| Author | SHA1 | Date | |
|---|---|---|---|
| 82b9c58ac6 | |||
| 069829dcce | |||
| 7b0d4d3b7e | |||
| 72740c6d09 | |||
| 804aa2d36e | |||
| 8f02dff4d0 | |||
| 43f07c191c | |||
| bbb9e52376 | |||
| 6dd9204dce | |||
| b91b6d5a23 | |||
| 6e836ed988 | |||
| 2d0148df5e | |||
| aee3f4879e | |||
| be11cbabeb | |||
| 41343b310b | |||
| 06ce6912a2 | |||
| 22eca507a4 | |||
| 9195f92b13 | |||
| 200b4625d8 | |||
| dc5b10f5ce | |||
| c50ac3620d | |||
| 60147414b4 | |||
| 60110fa69a | |||
| 1521310097 | |||
| a2dc8256cb | |||
| 9ba2c2a792 | |||
| 0b55af5455 | |||
| e1fcf60f29 | |||
| 151c691af2 | |||
| 9f00f08c30 | |||
| 3e2c03d3a3 | |||
| cd1c420fae | |||
| c4d592be20 | |||
| 808e470b27 | |||
| 597b8a67e2 | |||
| f21689f8dd | |||
| 9bd3f170e5 | |||
| 34e9c614b8 | |||
| 960db59935 | |||
| d57efcf0fe | |||
| d8ac434e7e | |||
| 6907fadce3 | |||
| fc05ba6f4d | |||
| 3c6d7edea0 | |||
| 908f2efd43 | |||
| 329ba4a62d | |||
| feac842b8a | |||
| 2916e4e625 | |||
| b8bee2a9fe | |||
| 25b6af331e | |||
| 65e9994963 | |||
| cd8ced38f2 | |||
| f6e35f7250 | |||
| 29299704aa | |||
| ba5a532da0 | |||
| ab838ae3ba | |||
| 4dedb18137 | |||
| 0c18cbc758 | |||
| c012a8045f | |||
| 536b64a63d | |||
| 6b6b91c083 | |||
| d4cd2d83d4 | |||
| 606b73d92f | |||
| 75543fe126 | |||
| 41e5f53c45 | |||
| 626efceaee | |||
| 9d0596089d | |||
| 9d17f14fbb | |||
| 2c407117dd | |||
| 6667c03ff6 | |||
| 91b75fcae5 | |||
| 048a54716f | |||
| 694d562a80 | |||
| a12f5dba9f | |||
| 7841606a41 | |||
| cf67d1cb77 | |||
| 255ecd64b2 | |||
| 96913b568f | |||
| 9c046cf10f | |||
| 68367474d0 | |||
| ab77f6d936 | |||
| 8dadf0dada | |||
| f074914f1b | |||
| 110981066e | |||
| 6195485cc7 | |||
| f13471428c | |||
| 55f1a05d17 | |||
| 173b81c288 | |||
| 1269d0234d | |||
| 62a8e2f72a | |||
| 0307890bf3 | |||
| 998daaa8d0 | |||
| 0b4faf9c80 | |||
| bafeadd809 | |||
| ecab59b715 | |||
| 0edb21a44b | |||
| 652fa57245 | |||
| 108f7bd8f7 | |||
| fce06d28a2 | |||
| 62169f91db | |||
| 2d2f1ec7d1 | |||
| 3a60d08eb3 | |||
| 04f8f175b9 | |||
| c687a4ae5b | |||
| c19c692ace | |||
| 8e71fa736b | |||
| e76fc53e85 | |||
| dc548da9eb | |||
| c618c0a6c3 | |||
| 6e4c75144a | |||
| c1f7e8f471 | |||
| fe098a7ee7 | |||
| aced6c5119 | |||
| abe9b200c9 | |||
| 8b542376c5 | |||
| b59df7faec | |||
| c9c516daf2 | |||
| 94cd21189e | |||
| 4e07941e7a | |||
| a2fcf85e3b | |||
| e830cb9141 | |||
| 9bf3985fb2 | |||
| 59ad11b0f4 | |||
| ca7d92703e | |||
| 75109ea476 | |||
| cd040fc148 | |||
| 6b62d86151 | |||
| 56c6ff289e | |||
| 30d331b16e | |||
| 7f41b96e07 | |||
| f8216e4f6a | |||
| 2e9a66c70c | |||
| 41d2dfce6a | |||
| 0a712e3a68 | |||
| 7455472856 | |||
| 0eca5a6fba | |||
| 8ab3f20eb8 | |||
| 634119f66a | |||
| 28718191d6 | |||
| f5f40c4746 | |||
| f6f832a7dd | |||
| e50882881d | |||
| 2937163371 | |||
| 0e6af8fc1b | |||
| 4da15cc3bd | |||
| 96312f29e3 | |||
| 32e2956346 | |||
| be820dc843 | |||
| 1a31d76034 | |||
| 73f0fb77bf | |||
| 263c857ca2 | |||
| 21e2a8f72e | |||
| 762eec7bf2 | |||
| ad388af712 | |||
| 6b46c5d2b2 | |||
| 370fa31c11 | |||
| 29f5c6e111 | |||
| 0e4134b5f8 | |||
| 9e6cd77650 | |||
| 101bbebbe6 | |||
| 2c27efe936 | |||
| 6404e71e22 | |||
| 575f835475 | |||
| ad2cdeebd8 | |||
| 66591ea617 | |||
| e5f4ee1555 | |||
| b1576abb74 | |||
| 35aa3e8ee2 | |||
| 9b0c7a1008 | |||
| ebccbfbbe2 | |||
| 48cc6e7f5a | |||
| e921585baa | |||
| 9662f10652 | |||
| 9b374f2683 | |||
| a5eceac4c5 | |||
| 1d26beacfb | |||
| e0352bdc4a | |||
| 1d50755d9b | |||
| 200a3c698a | |||
| 3b3fa59307 | |||
| 7530a20cd8 | |||
| 9ac28dfb65 | |||
| 5b265245a4 | |||
| 9e974d30db | |||
| f3145e0d06 | |||
| 72a9c18b94 | |||
| 7b8f8184b0 | |||
| 254b413617 | |||
| efdd2ce743 | |||
| 14522ee010 | |||
| b917e0b8d5 | |||
| d3d53b983a | |||
| 2a3b6d0ebb | |||
| 042bbb364f | |||
| 2bc9e29e4b | |||
| d7b3d00e82 | |||
| 7116d8b916 | |||
| cc84676070 | |||
| 206c1da862 | |||
| e575effb05 | |||
| f01c20dee1 | |||
| 0f2a340c78 | |||
| be58f39505 | |||
| 9976df1185 | |||
| 6140cec905 | |||
| 53f5910f9b | |||
| 7db4a48565 | |||
| afbe93c5cb | |||
| 6c7b08606d | |||
| 75886ff835 | |||
| dfd74fbd72 | |||
| e0a597fc4c | |||
| ae28bd21d6 | |||
| e1ef29d9a6 | |||
| 554e6489e5 | |||
| 25030391ed | |||
| be06b3c2c8 | |||
| 5ab1995a23 | |||
| 287d0e3612 | |||
| 421ba72564 | |||
| bb6ad49a1c | |||
| aef52e14b5 | |||
| 582d8f54f4 | |||
| 9543f3fced | |||
| 6207b46e4f | |||
| c7ddae7a62 | |||
| 7e9823614f | |||
| e93ad56f5b | |||
| 2dc02a5bbf | |||
| bfcc577df1 | |||
| ae65dff284 | |||
| 773fca881b | |||
| a8a3487749 | |||
| 1505f17d6e | |||
| 874d749901 | |||
| b1a433f267 | |||
| e31c77d42c | |||
| c6e7679c63 | |||
| 4cb42515c7 | |||
| 43a5557200 | |||
| 1dec5f5b9a | |||
| 140b418b1e | |||
| e06a076e30 | |||
| e421a16469 | |||
| c01e200a89 | |||
| b339a50057 | |||
| 64425cc1e1 | |||
| 82187cf17b | |||
| bc3a6cd4e9 | |||
| 5b54f203b6 | |||
| 60a01bd455 | |||
| 623d1a7223 | |||
| d6facc48bf | |||
| 902505bf64 | |||
| d2a12e0445 | |||
| ca2c87f366 | |||
| fc4ffca529 | |||
| 9b8d7f94d5 | |||
| ad1c0d559f | |||
| 3d793be2a2 | |||
| 41122103ee | |||
| 6d1d674d5a | |||
| 89a5ad9498 | |||
| fb4f80589e | |||
| f3688dd303 | |||
| 0ba0f82141 | |||
| 0e876386e0 | |||
| 7a115ed3b4 | |||
| a497049352 | |||
| 43e0545287 | |||
| 4f7b992e64 | |||
| 83a842037b | |||
| c836ff9bd5 | |||
| 91e0b2089b | |||
| 11c80866c3 | |||
| 203fa4a554 | |||
| c8351fdb89 | |||
| 27ac97d2e0 | |||
| 0afe45b296 | |||
| 18f705ba7b | |||
| 77c25b06dc | |||
| fdcfc9f872 | |||
| bc005c4e5f | |||
| 1ae3df953c | |||
| 26f17b832a | |||
| 56cc1e505e | |||
| a06b1a3eae | |||
| 1008de5dcf | |||
| 7e5a509c32 | |||
| 927ed626a2 | |||
| 2dd8142b9d | |||
| 74c15af762 | |||
| 72ae1f18fd | |||
| f5dfc7f03b | |||
| 50b3fac2d2 | |||
| b5ad4ae8a7 | |||
| e1132269e5 | |||
| 2c43b6e49a | |||
| 2599806cb1 | |||
| 00d31b2354 | |||
| 9d1d24811b | |||
| cf4cac9482 | |||
| 3537d7c05d | |||
| f2022f2607 | |||
| e58fe4dfa1 | |||
| 3abd32c3db | |||
| 2a48cd4f0b | |||
| a77f59cb91 | |||
| fc4b093d9a | |||
| 4808e770d0 | |||
| 2a34edb488 | |||
| 67f356c388 | |||
| e57e8414ac | |||
| 104c02c61a | |||
| 747ed92e7f | |||
| 23d96dbd4d | |||
| a651981b61 | |||
| d162d24acd | |||
| fa0f24be09 | |||
| 6376218441 | |||
| cff2f7256a | |||
| a7c6f1f81a | |||
| fa23cb1efe | |||
| 74ecbb2bf0 | |||
| 5a32e54b4d | |||
| c090bb7c27 | |||
| 2c6a2e81ec | |||
| 084ca7db1e | |||
| a3da3c0e3b | |||
| 4ec18f6707 | |||
| cf0f2feb29 | |||
| 331291010a | |||
| b35ba7f6d5 | |||
| fefbca490f | |||
| 65eef0eda8 | |||
| 659ef4b804 | |||
| 1e922f0a57 | |||
| 610fb0e711 | |||
| 10b1e1e917 | |||
| d641c85aea | |||
| 0a4aaa0583 | |||
| 5b4ba91288 | |||
| d6e85aa352 | |||
| 156ede73c7 | |||
| 6e0908eb52 | |||
| 1ee486e7e6 | |||
| 191c895d6b | |||
| 1acd5bf8bb | |||
| 86a3dcedd9 | |||
| 1a4e6aa5ab | |||
| bdd4b7ee45 | |||
| 9924fa55dc | |||
| fd24c14da4 | |||
| e15a138ff7 | |||
| 4389f35509 | |||
| 8b2c7cac42 | |||
| b2f22ff710 | |||
| 2edf26d70a | |||
| e0d2229b16 | |||
| cec0423b14 | |||
| c2646afd40 | |||
| 03cf7e26b7 | |||
| 4649f8b854 | |||
| 0d81353be1 | |||
| 0fb05dd5e8 | |||
| a38d97c302 | |||
| c0ead7a4c7 | |||
| 7a142c714f | |||
| e1f0b3bdcc | |||
| c4cf3710a7 | |||
| 22666b8bba | |||
| 811a82a211 | |||
| 84b81d2df0 | |||
| 05fa273558 | |||
| 8b23a074a3 | |||
| c6cc482b92 | |||
| 9ccad7a417 | |||
| 71e4b7b2ef | |||
| 39dd4e3aa4 | |||
| 1a35214b2b | |||
| 0d1f975687 | |||
| 418d8f74cf | |||
| 92f7db0a50 | |||
| e93317ebb2 | |||
| d922e03c95 | |||
| c218aebea6 | |||
| cedb7af539 | |||
| 0b0277923f | |||
| 6e69de0acc | |||
| 54838d542b | |||
| a3a7289d42 | |||
| 130933db36 | |||
| 8030ac6beb | |||
| f8091afb69 | |||
| 0a41abdff7 | |||
| 8381ef6c24 | |||
| 880dc8450b | |||
| 8de2dec4e1 | |||
| fc3013a066 | |||
| dfcde3fef9 | |||
| 278bcb6136 | |||
| 105d69337c | |||
| fc2e7b5382 | |||
| 9a69116d8f | |||
| d5bf7cff40 | |||
| a00f198a33 | |||
| 3d9870e8a3 | |||
| 070c77f11a | |||
| b34e11cfdf | |||
| 25a453fa1e | |||
| 830b01114a | |||
| 48f2d54676 | |||
| c3ab2959bd | |||
| 4e5fe85e6d | |||
| 730b2e5adb | |||
| c28c329085 | |||
| 1e933cefcb | |||
| 5217b7c342 | |||
| ee22cbca1b | |||
| c1e21f9884 | |||
| 74a6da2f57 | |||
| c25c28333a | |||
| fc80745b8d | |||
| b361128ec4 | |||
| fa7dfa2e96 | |||
| f4d6c67431 | |||
| 8fe758e14a | |||
| bb58688b49 | |||
| 676b519f46 | |||
| 626a121eb0 | |||
| c89a54ccf4 | |||
| 23a7b9cc87 | |||
| 351192378d | |||
| aa978f56d1 | |||
| a8b9d2ce3e | |||
| 6e1db8eb00 | |||
| df98984a40 | |||
| 661041b1df | |||
| ff1c0c9b41 | |||
| 0c111433c8 | |||
| e0754402d8 | |||
| 1af7d31285 | |||
| 1fd9566702 | |||
| dd3f9410cb | |||
| 3942c2f69a | |||
| 8c93a0ebae | |||
| e9359c86d2 | |||
| 0519981d24 | |||
| e86f2b3e9f | |||
| d9df09ceec | |||
| afde46bbae | |||
| ee9fed8fe8 | |||
| d26eee685a | |||
| 68a2eb8742 | |||
| d582343874 | |||
| a6c4dcaad3 | |||
| ac071bd62b | |||
| b6260f100a | |||
| 61ac320826 | |||
| 64dce5a060 | |||
| 71692df973 | |||
| e1b94bf226 | |||
| c895e4da83 | |||
| 98f211bd4b | |||
| b41a1b8a67 | |||
| 83cc90fbbd | |||
| 5dac0c57cb | |||
| 4aef611665 | |||
| da28605e63 | |||
| 2a6c42eb1f | |||
| 46fc1d8142 | |||
| aa2df4f6df | |||
| 9ca7fea042 | |||
| 9661121b38 | |||
| e6b8396d2c | |||
| 0df4b93182 | |||
| c20656897d | |||
| 1540675db7 | |||
| 0fb4a31c3c | |||
| f4a3c8dc0f | |||
| c61ba0cb77 | |||
| 407db1a1ae | |||
| 345af89c97 | |||
| bd3568eba0 | |||
| 9486e8ef1c | |||
| 9cb14b295b | |||
| 686b576588 | |||
| 00cad780a9 | |||
| 1f4748afc4 | |||
| 2abdf201df | |||
| 8a61529d41 | |||
| 46202ce1c7 | |||
| 50d0e4dc61 | |||
| 1cf2095e98 | |||
| faf5d1904f | |||
| 807f5a6bab | |||
| cf5b322852 | |||
| d52d724e66 | |||
| 42ac2b9907 | |||
| 95c41f4a6a | |||
| 0a832f3e5f | |||
| c2550f7f91 | |||
| 917165cc86 | |||
| 12c18895e9 | |||
| 832147f4d2 | |||
| a9ad740efe | |||
| 0b33ff68bf | |||
| d685720dd6 | |||
| c305cf4684 | |||
| 66aa92eab9 | |||
| f254b46efc | |||
| 83bdcb6ada | |||
| b627457c98 | |||
| a739db1854 | |||
| 7e20b9eaa2 | |||
| 8e1ccbf685 | |||
| 5389dffc35 | |||
| 5631c9825e | |||
| 9ff45e3e3c | |||
| 11487b4fd2 | |||
| a8e7b74393 | |||
| 7bb5eba0ea | |||
| 284b7e2f90 | |||
| 4f61bed415 | |||
| c3622cf784 | |||
| 5639a0d5dd | |||
| c07fad0be6 | |||
| efdd77bdf5 | |||
| e16341be8b | |||
| 89061f939e | |||
| 9c956d4703 | |||
| 6c05fd2010 | |||
| aca55603db | |||
| 6e68833d10 | |||
| b3b23e4d47 | |||
| 68030465f7 | |||
| 4c3d162440 | |||
| f09cd28ba3 | |||
| 7cd507d023 | |||
| 68cfbf067d | |||
| 9538741b11 | |||
| 7219c65dcb | |||
| 938019d26f | |||
| c4b81dc438 | |||
| 46bbbd1bc0 | |||
| bb690328a8 | |||
| 043b77e06a | |||
| 9da9dde15b | |||
| 5e7b22b6d7 | |||
| ba83513fa3 | |||
| b20c55aab5 | |||
| c2de074c55 | |||
| 8b8380f4dd | |||
| fe7e3e5af7 | |||
| d75a63d007 | |||
| dcef9dfe25 | |||
| d524938599 | |||
| 4a4e8b7568 | |||
| e37c9bce92 | |||
| 073e60d040 | |||
| 66badba0a2 | |||
| 9e09b2737a | |||
| 6dc374c6bf | |||
| 26ce05f76c | |||
| 1f3f801124 | |||
| 63a6ecd17f | |||
| 55448fcb73 | |||
| 6364de594f | |||
| 119dc6af77 | |||
| 251a5923f2 | |||
| 602f78ab2a | |||
| 4ee720333d | |||
| b403f04bb6 | |||
| dd8145083e | |||
| d17eec062c | |||
| 7e59c568d5 | |||
| e7d609f85c | |||
| 344bf5152a | |||
| e116b9d6f4 | |||
| fc2db8dd7e | |||
| ce748fa881 | |||
| 27e7cc2253 | |||
| 22d8c377df | |||
| ad12cc9a54 | |||
| a9aead5547 | |||
| 1a2d610039 | |||
| 9ec7f2e29a | |||
| baee5f58b8 | |||
| f5d4abd5ac | |||
| 3f72bebfb1 | |||
| 14f857d3a8 | |||
| d95ca411ae | |||
| ebb29f3dd0 | |||
| 6988327dbc | |||
| 4f3694ddb9 | |||
| ff32728095 | |||
| 55a580eb00 | |||
| 7e02786cd3 | |||
| 2a7f301f45 | |||
| 8cba53e8b5 | |||
| e69b69a616 | |||
| 20907e7cab | |||
| b77f139fd2 | |||
| 78ad21d89d | |||
| 88e63978e9 | |||
| e9954f3a47 | |||
| 0861578c14 | |||
| d0738a3585 | |||
| 72b3baf36d | |||
| a8eacdad02 | |||
| 276b2f9c6c | |||
| 8e2b57ea21 | |||
| e49e5a8cc2 | |||
| a18547c771 | |||
| c3eb2e6c9d | |||
| fb6e5efa8b | |||
| 149e928a8c | |||
| fd746856cd | |||
| 410516bf8e | |||
| f8ce636d1a | |||
| 09806db27f | |||
| 4cc0bd8696 | |||
| 25dc0d8937 | |||
| f6105aabd9 | |||
| 827b2d384a | |||
| 62dc58387a | |||
| d78d457064 | |||
| be1368d0df | |||
| 8cccfb95d3 | |||
| fa46b0a8d2 | |||
| 275c1cf63b | |||
| d8522fba35 | |||
| 4ccf7afdac | |||
| 6eab11335c | |||
| c4c2bf341d | |||
| 9085b1965b | |||
| 43142b56b5 | |||
| 3686e77360 | |||
| 15c8159b8d | |||
| b918b0e179 | |||
| 670d255de1 | |||
| af2af9dfc8 | |||
| 8719ac760b | |||
| 82e3a81d72 | |||
| 97d9bbbd1d | |||
| 88ade05b03 | |||
| c1a54ffa7a | |||
| d5813b4506 | |||
| 479db29d51 | |||
| 3461f8c86e | |||
| 87f67c2ed1 | |||
| 0f065be011 | |||
| bc56a130ff | |||
| 2a4bdd43bc | |||
| 08cb8dbe59 | |||
| fe0f63a085 | |||
| f284c9951a | |||
| 8f3fcdfc04 | |||
| de24cdf87c | |||
| 798c1d4c8a | |||
| c0ed15331e | |||
| 02b032e8b8 | |||
| 685a9378f4 | |||
| bb5e60c285 | |||
| 13e9224483 | |||
| 0600e328b2 | |||
| 41a03cedc4 | |||
| d25257abfd | |||
| 23b8c73979 | |||
| cd02fe3ce5 | |||
| 04fb01ac86 | |||
| 0af070e895 | |||
| d62cc2aa74 | |||
| 1ce5f56063 | |||
| 8dcc1e16b4 | |||
| f22853785a | |||
| 0786d40aa3 | |||
| b5deccb8c3 | |||
| e2b83924f2 | |||
| a57f3e7a6e | |||
| 2ceca89488 | |||
| 7ed37328ea | |||
| 4d34ef329e | |||
| 88a9f44409 | |||
| bcb0261944 | |||
| 76ea37dcef | |||
| 632d572dcc | |||
| 0a3415ab9b | |||
| 2213252489 | |||
| 9ab087bb80 | |||
| aee17a98ee | |||
| dd0dec49f8 | |||
| c5fa42edb9 | |||
| 041782b2d9 | |||
| e3f5ee7ed1 | |||
| 001f921199 | |||
| fdb35262d5 | |||
| d3f2a22225 | |||
| 5fed163fad | |||
| 53fff2bbba | |||
| 1c6c44644b | |||
| 619e8004af | |||
| 53a039fcc2 | |||
| f35172aced | |||
| e620f2fd0e | |||
| 3db780f641 | |||
| 42e5be64b0 | |||
| 8b21c54892 | |||
| c6e9dded78 | |||
| f9d7ad4a14 | |||
| 205f1cac11 | |||
| d2ef744fe0 | |||
| 84a55eee4e | |||
| 50e8402dd2 | |||
| f961e91196 | |||
| bf3c8be37d | |||
| 10983f5ba5 | |||
| 5788b5e0e9 | |||
| 7b925419e6 | |||
| 787aae5038 | |||
| b37a57d9c1 | |||
| 0225c5030e | |||
| 60b1a52394 | |||
| d8e1fa0bc4 | |||
| 2d3208ee1f | |||
| 70d30f4db3 | |||
| 1aba51fcc1 | |||
| 90da8d975c | |||
| cf4a1b9545 | |||
| f144f286ca | |||
| 47fb1a4a74 | |||
| 82a728f183 | |||
| b3b2def10c | |||
| f5117abc3f | |||
| be0b236b1d | |||
| 64fb225c90 | |||
| 2f08f7e413 | |||
| c3461c229e | |||
| 9c3a764001 | |||
| 8b60ddfdda | |||
| ffddb2015f | |||
| b562870211 | |||
| e8d77c24f9 | |||
| 68545075fb | |||
| 786b62081b | |||
| 3dece370dd | |||
| 38479b6329 | |||
| 2342a0e57f | |||
| 7be19f6e21 | |||
| 7887d09fbb | |||
| b8475b7560 | |||
| a8b072c830 | |||
| c60a92759a | |||
| a3c0f75db5 | |||
| 4e64d49c88 | |||
| e72c742dcd | |||
| b2657d70fd | |||
| 6bb915f62e | |||
| f545523496 | |||
| a34d1cd413 | |||
| fb09f6799d | |||
| 8e95232846 | |||
| 90e03e5acb |
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
---
|
||||
name: Bug Report
|
||||
about: Report an issue with build_tools you've discovered.
|
||||
---
|
||||
|
||||
**Describe your problem**:
|
||||
|
||||
*Be clear in your description of the problem.
|
||||
Open an issue with a descriptive title and a summary in complete sentences.*
|
||||
|
||||
*Use the template below when reporting bugs. Please, make sure that
|
||||
you're fetched latest master of build_tools and that the problem you're reporting
|
||||
hasn't been reported (and potentially fixed) already.*
|
||||
|
||||
*Before filing the ticket you should replace all cursive text with your own words.*
|
||||
|
||||
**Steps to reproduce the problem**:
|
||||
|
||||
*Please make sure your error can be reproduced, Docker can be a great help*
|
||||
|
||||
**build_tools base repo version**:
|
||||
|
||||
**Operating system**:
|
||||
|
||||
**Compiler version**:
|
||||
21
.github/workflows/check.yml
vendored
Normal file
21
.github/workflows/check.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
name: Markdown check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- '*.md'
|
||||
- 'develop/*.md'
|
||||
|
||||
jobs:
|
||||
markdownlint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: DavidAnson/markdownlint-cli2-action@v9
|
||||
with:
|
||||
command: config
|
||||
globs: |
|
||||
.markdownlint.jsonc
|
||||
*.md
|
||||
develop/*.md
|
||||
33
.github/workflows/update-version.yml
vendored
Normal file
33
.github/workflows/update-version.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
name: Update hard-coded version
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
|
||||
update-version:
|
||||
if: >-
|
||||
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
|
||||
contains(github.ref, 'refs/heads/release/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.PUSH_TOKEN }}
|
||||
|
||||
- name: Set version
|
||||
run: >-
|
||||
echo "${{ github.ref_name }}" |
|
||||
awk '{gsub(/.+\/v/,"version=");print;}' >> $GITHUB_ENV
|
||||
|
||||
- name: Save version
|
||||
run: echo "${{ env.version }}" > version
|
||||
|
||||
- name: Commit & push changes
|
||||
uses: EndBug/add-and-commit@v8
|
||||
with:
|
||||
author_name: github-actions[bot]
|
||||
author_email: github-actions[bot]@users.noreply.github.com
|
||||
message: Update hard-coded version to v${{ env.version }}
|
||||
add: version
|
||||
5
.markdownlint.jsonc
Normal file
5
.markdownlint.jsonc
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"line-length": {
|
||||
"code_block_line_length": 300
|
||||
}
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
language: node_js
|
||||
node_js: 12
|
||||
jobs:
|
||||
include:
|
||||
- stage: markdownlint
|
||||
script:
|
||||
- npm install -g markdownlint-cli
|
||||
- markdownlint *.md
|
||||
@ -1,12 +1,13 @@
|
||||
FROM ubuntu:14.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y install python3 \
|
||||
apt-get -y install python \
|
||||
python3 \
|
||||
sudo
|
||||
|
||||
RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python
|
||||
ADD . /build_tools
|
||||
WORKDIR /build_tools
|
||||
|
||||
|
||||
661
LICENSE.txt
Normal file
661
LICENSE.txt
Normal file
@ -0,0 +1,661 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
17
README.md
17
README.md
@ -13,7 +13,7 @@ the `master` branch.
|
||||
|
||||
## How to use - Linux
|
||||
|
||||
**Note**: The solution has been tested on **Ubuntu 14.04**.
|
||||
**Note**: The solution has been tested on **Ubuntu 16.04**.
|
||||
|
||||
### Installing dependencies
|
||||
|
||||
@ -167,10 +167,6 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
|
||||
proxy_pass http://localhost:8000;
|
||||
proxy_http_version 1.1;
|
||||
}
|
||||
location /spellchecker/ {
|
||||
proxy_pass http://localhost:8080/;
|
||||
proxy_http_version 1.1;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@ -261,16 +257,7 @@ allow to run foreground processes in background mode.
|
||||
./converter
|
||||
```
|
||||
|
||||
2. Start the **SpellChecker** service:
|
||||
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/SpellChecker
|
||||
NODE_ENV=development-linux \
|
||||
NODE_CONFIG_DIR=$PWD/../Common/config \
|
||||
./spellchecker
|
||||
```
|
||||
|
||||
3. Start the **DocService** service:
|
||||
2. Start the **DocService** service:
|
||||
|
||||
```bash
|
||||
cd out/linux_64/onlyoffice/documentserver/server/DocService
|
||||
|
||||
3
Rakefile
3
Rakefile
@ -2,8 +2,9 @@
|
||||
|
||||
desc 'Cleanup old build files'
|
||||
task :clean do
|
||||
archive_name_pattern = 'build_tools*.tar.gz'
|
||||
sh('sudo rm -rf out')
|
||||
sh('rm build_tools*.tar.gz')
|
||||
sh("rm #{archive_name_pattern}") unless Dir.glob(archive_name_pattern).empty?
|
||||
end
|
||||
|
||||
desc 'Build version anew'
|
||||
|
||||
383
build.pro
383
build.pro
@ -4,349 +4,124 @@ ROOT_DIR=$$PWD/..
|
||||
DEPLOY_DIR=$$PWD/deploy
|
||||
CORE_ROOT_DIR=$$ROOT_DIR/core
|
||||
|
||||
include($$CORE_ROOT_DIR/Common/base.pri)
|
||||
|
||||
MAKEFILE=makefiles/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX
|
||||
PRO_SUFFIX=$$CORE_BUILDS_PLATFORM_PREFIX
|
||||
|
||||
core_debug {
|
||||
MAKEFILE=$$join(MAKEFILE, MAKEFILE, "", "_debug_")
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, PRO_SUFFIX, "", "_debug_")
|
||||
}
|
||||
build_xp {
|
||||
MAKEFILE=$$join(MAKEFILE, MAKEFILE, "", "_xp")
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, PRO_SUFFIX, "", "_xp")
|
||||
}
|
||||
OO_BRANDING_SUFFIX = $$(OO_BRANDING)
|
||||
!isEmpty(OO_BRANDING_SUFFIX) {
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, PRO_SUFFIX, "", "$$OO_BRANDING_SUFFIX")
|
||||
MAKEFILE=$$join(MAKEFILE, MAKEFILE, "", "$$OO_BRANDING_SUFFIX")
|
||||
}
|
||||
include($$PWD/common.pri)
|
||||
|
||||
CONFIG += ordered
|
||||
|
||||
core_windows {
|
||||
CONFIG += core_and_multimedia
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_linux {
|
||||
CONFIG += core_and_multimedia
|
||||
desktop:CONFIG += core_and_multimedia
|
||||
}
|
||||
core_mac {
|
||||
CONFIG += no_use_htmlfileinternal
|
||||
CONFIG += no_desktop_apps
|
||||
}
|
||||
build_xp {
|
||||
CONFIG += no_use_htmlfileinternal
|
||||
}
|
||||
core_ios {
|
||||
CONFIG += no_use_htmlfileinternal
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
core_android {
|
||||
CONFIG += no_use_htmlfileinternal
|
||||
CONFIG += no_use_common_binary
|
||||
CONFIG += no_desktop_apps
|
||||
CONFIG += no_tests
|
||||
}
|
||||
|
||||
SUBDIRS = \
|
||||
cryptopp \
|
||||
\
|
||||
kernel \
|
||||
unicodeconverter \
|
||||
graphics \
|
||||
pdfwriter \
|
||||
djvufile \
|
||||
xpsfile \
|
||||
htmlrenderer \
|
||||
pdfreader \
|
||||
htmlfile \
|
||||
doctrenderer
|
||||
|
||||
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
|
||||
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
|
||||
cryptopp)
|
||||
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
|
||||
unicodeconverter)
|
||||
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
|
||||
kernel unicodeconverter)
|
||||
addSubProject(pdffile, $$CORE_ROOT_DIR/PdfFile/PdfFile.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
|
||||
kernel unicodeconverter graphics pdffile)
|
||||
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
|
||||
kernel unicodeconverter graphics pdffile)
|
||||
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
|
||||
kernel unicodeconverter graphics network)
|
||||
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
|
||||
kernel unicodeconverter graphics htmlfile2)
|
||||
!no_x2t {
|
||||
SUBDIRS += \
|
||||
docxformat \
|
||||
pptxformat \
|
||||
docxfile \
|
||||
txtxmlformat \
|
||||
rtfformat \
|
||||
pptformat \
|
||||
docformat \
|
||||
odffilereader \
|
||||
odffilewriter \
|
||||
xlsformat \
|
||||
x2t
|
||||
}
|
||||
addSubProject(docxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/DocxFormatLib/DocxFormatLib.pro)
|
||||
addSubProject(pptxformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/PPTXFormatLib/PPTXFormatLib.pro)
|
||||
addSubProject(xlsbformat, $$CORE_ROOT_DIR/OOXML/Projects/Linux/XlsbFormatLib/XlsbFormatLib.pro)
|
||||
|
||||
!no_use_htmlfileinternal {
|
||||
SUBDIRS += htmlfileinternal
|
||||
}
|
||||
addSubProject(docformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/DocFormatLib/Linux/DocFormatLib.pro)
|
||||
addSubProject(pptformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/PPTFormatLib/Linux/PPTFormatLib.pro)
|
||||
addSubProject(xlsformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/XlsFormatLib/Linux/XlsFormatLib.pro)
|
||||
addSubProject(vbaformat, $$CORE_ROOT_DIR/MsBinaryFile/Projects/VbaFormatLib/Linux/VbaFormatLib.pro)
|
||||
|
||||
!no_use_common_binary {
|
||||
SUBDIRS += \
|
||||
allfontsgen \
|
||||
allthemesgen \
|
||||
docbuilder
|
||||
}
|
||||
|
||||
core_ios:CONFIG += no_tests
|
||||
core_android:CONFIG += no_tests
|
||||
!no_tests {
|
||||
SUBDIRS += standardtester
|
||||
}
|
||||
|
||||
core_and_multimedia {
|
||||
SUBDIRS += \
|
||||
videoplayer
|
||||
}
|
||||
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/TxtFile/Projects/Linux/TxtXmlFormatLib.pro)
|
||||
addSubProject(rtfformat, $$CORE_ROOT_DIR/RtfFile/Projects/Linux/RtfFormatLib.pro)
|
||||
addSubProject(odffile, $$CORE_ROOT_DIR/OdfFile/Projects/Linux/OdfFormatLib.pro)
|
||||
|
||||
desktop {
|
||||
message(desktop)
|
||||
addSubProject(cfcpp, $$CORE_ROOT_DIR/Common/cfcpp/cfcpp.pro)
|
||||
addSubProject(bindocument, $$CORE_ROOT_DIR/OOXML/Projects/Linux/BinDocument/BinDocument.pro)
|
||||
|
||||
SUBDIRS += \
|
||||
hunspell \
|
||||
ooxmlsignature \
|
||||
documentscore \
|
||||
documentscore_helper
|
||||
|
||||
!core_mac {
|
||||
SUBDIRS += qtdocumentscore
|
||||
}
|
||||
|
||||
!no_desktop_apps {
|
||||
core_windows:SUBDIRS += projicons
|
||||
SUBDIRS += desktopapp
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ordered {
|
||||
# remove all makefiles
|
||||
|
||||
defineTest(removeFile) {
|
||||
file = $$1
|
||||
win32:file ~= s,/,\\,g
|
||||
core_windows {
|
||||
system(if exist $$shell_quote($$file) $$QMAKE_DEL_FILE $$shell_quote($$file) $$escape_expand(\\n\\t))
|
||||
} else {
|
||||
system($$QMAKE_DEL_FILE $$shell_quote($$file) $$escape_expand(\\n\\t))
|
||||
}
|
||||
}
|
||||
|
||||
removeFile($$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/Makefile.cryptopp$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/Common/Makefile.kernel$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/UnicodeConverter/Makefile.UnicodeConverter$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/graphics/pro/Makefile.graphics$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/PdfWriter/Makefile.PdfWriter$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DjVuFile/Makefile.DjVuFile$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/XpsFile/Makefile.XpsFile$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/HtmlRenderer/Makefile.htmlrenderer$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/PdfReader/Makefile.PdfReader$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/HtmlFile/Makefile.HtmlFile$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/doctrenderer/Makefile.doctrenderer$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-sdk/HtmlFile/Internal/Makefile.Internal$$PRO_SUFFIX)
|
||||
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/Makefile.AllFontsGen$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/allthemesgen/Makefile.allthemesgen$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/Makefile.docbuilder$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/Test/Applications/StandardTester/Makefile.standardtester$$PRO_SUFFIX)
|
||||
|
||||
removeFile($$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/Makefile.DocxFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/Makefile.PPTXFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/Makefile.ASCOfficeDocxFile2Lib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/Makefile.TxtXmlFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/Makefile.RtfFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/Makefile.PPTFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/Makefile.DocFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/Makefile.OdfFileReaderLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/Makefile.OdfFileWriterLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/Makefile.XlsFormatLib$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/X2tConverter/build/Qt/Makefile.X2tConverter$$PRO_SUFFIX)
|
||||
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/hunspell-1.3.3/src/qt/Makefile.hunspell$$PRO_SUFFIX)
|
||||
removeFile($$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/Makefile.ooxmlsignature$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/Makefile.ascdocumentscore$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/Makefile.ascdocumentscore_helper$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/Makefile.qtascdocumentscore$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/Makefile.videoplayerlib$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-apps/win-linux/extras/projicons/Makefile.ProjIcons$$PRO_SUFFIX)
|
||||
removeFile($$ROOT_DIR/desktop-apps/win-linux/Makefile.ASCDocumentEditor$$PRO_SUFFIX)
|
||||
}
|
||||
|
||||
# PROJECTS
|
||||
cryptopp.file = $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro
|
||||
cryptopp.makefile = $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/Makefile.cryptopp$$PRO_SUFFIX
|
||||
|
||||
kernel.file = $$CORE_ROOT_DIR/Common/kernel.pro
|
||||
kernel.makefile = $$CORE_ROOT_DIR/Common/Makefile.kernel$$PRO_SUFFIX
|
||||
|
||||
unicodeconverter.file = $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro
|
||||
unicodeconverter.makefile = $$CORE_ROOT_DIR/UnicodeConverter/Makefile.UnicodeConverter$$PRO_SUFFIX
|
||||
|
||||
graphics.file = $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro
|
||||
graphics.makefile = $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/Makefile.graphics$$PRO_SUFFIX
|
||||
|
||||
pdfwriter.file = $$CORE_ROOT_DIR/PdfWriter/PdfWriter.pro
|
||||
pdfwriter.makefile = $$CORE_ROOT_DIR/PdfWriter/Makefile.PdfWriter$$PRO_SUFFIX
|
||||
|
||||
djvufile.file = $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro
|
||||
djvufile.makefile = $$CORE_ROOT_DIR/DjVuFile/Makefile.DjVuFile$$PRO_SUFFIX
|
||||
|
||||
xpsfile.file = $$CORE_ROOT_DIR/XpsFile/XpsFile.pro
|
||||
xpsfile.makefile = $$CORE_ROOT_DIR/XpsFile/Makefile.XpsFile$$PRO_SUFFIX
|
||||
|
||||
htmlrenderer.file = $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro
|
||||
htmlrenderer.makefile = $$CORE_ROOT_DIR/HtmlRenderer/Makefile.htmlrenderer$$PRO_SUFFIX
|
||||
|
||||
pdfreader.file = $$CORE_ROOT_DIR/PdfReader/PdfReader.pro
|
||||
pdfreader.makefile = $$CORE_ROOT_DIR/PdfReader/Makefile.PdfReader$$PRO_SUFFIX
|
||||
|
||||
htmlfile.file = $$CORE_ROOT_DIR/HtmlFile/HtmlFile.pro
|
||||
htmlfile.makefile = $$CORE_ROOT_DIR/HtmlFile/Makefile.HtmlFile$$PRO_SUFFIX
|
||||
|
||||
doctrenderer.file = $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro
|
||||
doctrenderer.makefile = $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/Makefile.doctrenderer$$PRO_SUFFIX
|
||||
|
||||
!no_use_htmlfileinternal {
|
||||
htmlfileinternal.file = $$ROOT_DIR/desktop-sdk/HtmlFile/Internal/Internal.pro
|
||||
htmlfileinternal.makefile = $$ROOT_DIR/desktop-sdk/HtmlFile/Internal/Makefile.Internal$$PRO_SUFFIX
|
||||
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
|
||||
docxformat pptxformat xlsbformat docformat pptformat xlsformat vbaformat txtxmlformat rtfformat odffile cfcpp bindocument fb2file epubfile docxrenderer)
|
||||
}
|
||||
|
||||
!no_use_common_binary {
|
||||
allfontsgen.file = $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro
|
||||
allfontsgen.makefile = $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/Makefile.AllFontsGen$$PRO_SUFFIX
|
||||
|
||||
allthemesgen.file = $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro
|
||||
allthemesgen.makefile = $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/Makefile.allthemesgen$$PRO_SUFFIX
|
||||
|
||||
docbuilder.file = $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro
|
||||
docbuilder.makefile = $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/Makefile.docbuilder$$PRO_SUFFIX
|
||||
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
|
||||
kernel unicodeconverter graphics doctrenderer)
|
||||
}
|
||||
|
||||
!no_tests {
|
||||
standardtester.file = $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro
|
||||
standardtester.makefile = $$CORE_ROOT_DIR/Test/Applications/StandardTester/Makefile.standardtester$$PRO_SUFFIX
|
||||
}
|
||||
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
|
||||
addSubProject(x2ttester, $$CORE_ROOT_DIR/Test/Applications/x2tTester/x2ttester.pro)
|
||||
|
||||
!no_x2t {
|
||||
docxformat.file = $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/DocxFormatLib.pro
|
||||
docxformat.makefile = $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/Makefile.DocxFormatLib$$PRO_SUFFIX
|
||||
|
||||
pptxformat.file = $$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/PPTXFormatLib.pro
|
||||
pptxformat.makefile = $$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/Makefile.PPTXFormatLib$$PRO_SUFFIX
|
||||
|
||||
docxfile.file = $$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/ASCOfficeDocxFile2Lib.pro
|
||||
docxfile.makefile = $$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/Makefile.ASCOfficeDocxFile2Lib$$PRO_SUFFIX
|
||||
|
||||
txtxmlformat.file = $$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/TxtXmlFormatLib.pro
|
||||
txtxmlformat.makefile = $$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/Makefile.TxtXmlFormatLib$$PRO_SUFFIX
|
||||
|
||||
rtfformat.file = $$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/RtfFormatLib.pro
|
||||
rtfformat.makefile = $$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/Makefile.RtfFormatLib$$PRO_SUFFIX
|
||||
|
||||
pptformat.file = $$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/PPTFormatLib.pro
|
||||
pptformat.makefile = $$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/Makefile.PPTFormatLib$$PRO_SUFFIX
|
||||
|
||||
docformat.file = $$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/DocFormatLib.pro
|
||||
docformat.makefile = $$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/Makefile.DocFormatLib$$PRO_SUFFIX
|
||||
|
||||
odffilereader.file = $$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/OdfFileReaderLib.pro
|
||||
odffilereader.makefile = $$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/Makefile.OdfFileReaderLib$$PRO_SUFFIX
|
||||
|
||||
odffilewriter.file = $$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/OdfFileWriterLib.pro
|
||||
odffilewriter.makefile = $$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/Makefile.OdfFileWriterLib$$PRO_SUFFIX
|
||||
|
||||
xlsformat.file = $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/XlsFormatLib.pro
|
||||
xlsformat.makefile = $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/Makefile.XlsFormatLib$$PRO_SUFFIX
|
||||
|
||||
x2t.file = $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro
|
||||
x2t.makefile = $$CORE_ROOT_DIR/X2tConverter/build/Qt/Makefile.X2tConverter$$PRO_SUFFIX
|
||||
}
|
||||
|
||||
desktop {
|
||||
hunspell.file = $$CORE_ROOT_DIR/DesktopEditor/hunspell-1.3.3/src/qt/hunspell.pro
|
||||
hunspell.makefile = $$CORE_ROOT_DIR/DesktopEditor/hunspell-1.3.3/src/qt/Makefile.hunspell$$PRO_SUFFIX
|
||||
|
||||
ooxmlsignature.file = $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro
|
||||
ooxmlsignature.makefile = $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/Makefile.ooxmlsignature$$PRO_SUFFIX
|
||||
|
||||
documentscore.file = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro
|
||||
documentscore.makefile = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/Makefile.ascdocumentscore$$PRO_SUFFIX
|
||||
|
||||
documentscore_helper.file = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro
|
||||
documentscore_helper.makefile = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/Makefile.ascdocumentscore_helper$$PRO_SUFFIX
|
||||
|
||||
!core_mac {
|
||||
qtdocumentscore.file = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro
|
||||
qtdocumentscore.makefile = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/Makefile.qtascdocumentscore$$PRO_SUFFIX
|
||||
}
|
||||
|
||||
!no_desktop_apps {
|
||||
core_windows {
|
||||
projicons.file = $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro
|
||||
projicons.makefile = $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/Makefile.ProjIcons$$PRO_SUFFIX
|
||||
}
|
||||
desktopapp.file = $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro
|
||||
desktopapp.makefile = $$ROOT_DIR/desktop-apps/win-linux/Makefile.ASCDocumentEditor$$PRO_SUFFIX
|
||||
}
|
||||
#TODO:
|
||||
!linux_arm64:addSubProject(ooxml_crypt, $$CORE_ROOT_DIR/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro)
|
||||
}
|
||||
|
||||
core_and_multimedia {
|
||||
videoplayer.file = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro
|
||||
videoplayer.makefile = $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/Makefile.videoplayerlib$$PRO_SUFFIX
|
||||
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
}
|
||||
|
||||
# DEPENDS
|
||||
kernel.depends = cryptopp
|
||||
graphics.depends = kernel unicodeconverter
|
||||
pdfwriter.depends = kernel unicodeconverter graphics
|
||||
djvufile.depends = kernel unicodeconverter graphics pdfwriter
|
||||
xpsfile.depends = kernel unicodeconverter graphics pdfwriter
|
||||
htmlrenderer.depends = kernel unicodeconverter graphics pdfwriter
|
||||
pdfreader.depends = kernel unicodeconverter graphics pdfwriter htmlrenderer
|
||||
htmlfile.depends = kernel unicodeconverter graphics
|
||||
doctrenderer.depends = kernel unicodeconverter graphics
|
||||
|
||||
!no_use_htmlfileinternal {
|
||||
htmlfileinternal.depends = kernel unicodeconverter graphics
|
||||
}
|
||||
|
||||
!no_use_common_binary {
|
||||
allfontsgen.depends = kernel unicodeconverter graphics
|
||||
allthemesgen.depends = kernel unicodeconverter graphics
|
||||
|
||||
docbuilder.depends = kernel unicodeconverter graphics doctrenderer
|
||||
}
|
||||
|
||||
core_and_multimedia {
|
||||
videoplayer.depends = kernel unicodeconverter graphics
|
||||
}
|
||||
|
||||
desktop {
|
||||
ooxmlsignature.depends = kernel unicodeconverter graphics
|
||||
documentscore.depends = kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdfwriter pdfreader djvufile xpsfile
|
||||
documentscore_helper.depends = documentscore
|
||||
|
||||
message(desktop)
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
|
||||
kernel unicodeconverter graphics)
|
||||
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
|
||||
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdffile djvufile xpsfile)
|
||||
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
|
||||
documentscore)
|
||||
!core_mac {
|
||||
qtdocumentscore.depends = documentscore
|
||||
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
|
||||
documentscore)
|
||||
}
|
||||
|
||||
|
||||
!no_desktop_apps {
|
||||
core_windows:projicons.depends = documentscore videoplayer
|
||||
desktopapp.depends = documentscore videoplayer
|
||||
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
|
||||
documentscore videoplayer)
|
||||
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
|
||||
documentscore videoplayer)
|
||||
}
|
||||
}
|
||||
|
||||
!no_x2t {
|
||||
x2t.depends = \
|
||||
docxformat \
|
||||
pptxformat \
|
||||
docxfile \
|
||||
txtxmlformat \
|
||||
rtfformat \
|
||||
pptformat \
|
||||
docformat \
|
||||
odffilereader \
|
||||
odffilewriter \
|
||||
xlsformat
|
||||
}
|
||||
mobile {
|
||||
message(mobile)
|
||||
!desktop {
|
||||
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
|
||||
}
|
||||
}
|
||||
|
||||
69
common.pri
Normal file
69
common.pri
Normal file
@ -0,0 +1,69 @@
|
||||
# must setup CORE_ROOT_DIR before including
|
||||
|
||||
include($$CORE_ROOT_DIR/Common/base.pri)
|
||||
|
||||
MAKEFILE=makefiles/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX
|
||||
PRO_SUFFIX=$$CORE_BUILDS_PLATFORM_PREFIX
|
||||
|
||||
core_debug {
|
||||
MAKEFILE=$$join(MAKEFILE, , , "_debug_")
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, , , "_debug_")
|
||||
}
|
||||
build_xp {
|
||||
MAKEFILE=$$join(MAKEFILE, , , "_xp")
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, , , "_xp")
|
||||
}
|
||||
OO_BRANDING_SUFFIX = $$(OO_BRANDING)
|
||||
!isEmpty(OO_BRANDING_SUFFIX) {
|
||||
PRO_SUFFIX=$$join(PRO_SUFFIX, , , "$$OO_BRANDING_SUFFIX")
|
||||
MAKEFILE=$$join(MAKEFILE, , , "$$OO_BRANDING_SUFFIX")
|
||||
}
|
||||
|
||||
message(current_makefile)
|
||||
message($$MAKEFILE)
|
||||
|
||||
CONFIG += ordered
|
||||
|
||||
defineTest(removeFile) {
|
||||
file = $$1
|
||||
win32:file ~= s,/,\\,g
|
||||
core_windows {
|
||||
system(if exist $$shell_quote($$file) $$QMAKE_DEL_FILE $$shell_quote($$file) $$escape_expand(\\n\\t))
|
||||
} else {
|
||||
system($$QMAKE_DEL_FILE $$shell_quote($$file) $$escape_expand(\\n\\t))
|
||||
}
|
||||
}
|
||||
defineTest(qmakeClear) {
|
||||
dir = $$1
|
||||
name = $$2
|
||||
removeFile($$1/Makefile.$$2$$PRO_SUFFIX)
|
||||
removeFile($$1/.qmake.stash)
|
||||
}
|
||||
|
||||
# addSubProject() - adds project to SUBDIRS, creates variables associated with the project(file, makefile, depends)
|
||||
# Arg1 - Project name
|
||||
# Arg2 - Qmake file of project
|
||||
# Arg3(optional) - Project dependencies
|
||||
defineTest(addSubProject) {
|
||||
pro_name = $$1
|
||||
pro_file = $$2
|
||||
pro_depends = $$3
|
||||
isEmpty(pro_name):error(Sub-project name is not defined.)
|
||||
isEmpty(pro_file):error(Qmake file of sub-project \'$$pro_name\' is not defined.)
|
||||
!exists($$pro_file):error(Sub-project qmake file \'$$pro_file\' is not exists.)
|
||||
path = $$section(pro_file, /, 0, -2)
|
||||
ext_name = $$section(pro_file, /, -1, -1)
|
||||
name = $$section(ext_name, ., 0, 0)
|
||||
SUBDIRS += $$pro_name
|
||||
export(SUBDIRS)
|
||||
$${pro_name}.file = $$pro_file
|
||||
export($${pro_name}.file)
|
||||
$${pro_name}.makefile = $$path/Makefile.$$name$$PRO_SUFFIX
|
||||
export($${pro_name}.makefile)
|
||||
!isEmpty(pro_depends) {
|
||||
$${pro_name}.depends = $$pro_depends
|
||||
export($${pro_name}.depends)
|
||||
}
|
||||
# remove makefile
|
||||
qmakeClear($$path, $$name)
|
||||
}
|
||||
14
configure.py
14
configure.py
@ -11,11 +11,18 @@ parser.add_option("--update", action="store", type="string", dest="update", defa
|
||||
parser.add_option("--update-light", action="store", type="string", dest="update-light", default="", help="performs pull/clone without switching branches, can be used only if update is true.")
|
||||
parser.add_option("--branch", action="store", type="string", dest="branch", default="master", help="branch/tag name, used only if update is true and update_light is not used. Updates/clones all the repos and switches the branch to the proper one deleting all the local changes")
|
||||
parser.add_option("--clean", action="store", type="string", dest="clean", default="1", help="defines whether to build everything anew")
|
||||
parser.add_option("--module", action="store", type="string", dest="module", default="builder", help="defines what modules to build. You can specify several of them, e.g. --module 'core desktop builder server develop mobile'")
|
||||
parser.add_option("--module", action="store", type="string", dest="module", default="builder", help="defines what modules to build. You can specify several of them, e.g. --module 'core desktop builder server mobile'")
|
||||
parser.add_option("--develop", action="store", type="string", dest="develop", default="0", help="defines develop mode")
|
||||
parser.add_option("--beta", action="store", type="string", dest="beta", default="0", help="defines beta mode")
|
||||
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]")
|
||||
parser.add_option("--config", action="store", type="string", dest="config", default="", help="provides ability to specify additional parameters for qmake")
|
||||
parser.add_option("--qt-dir", action="store", type="string", dest="qt-dir", default="", help="defines qmake directory path. qmake can be found in qt-dir/compiler/bin directory")
|
||||
parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp", default="", help="defines qmake directory path for Windows XP. qmake can be found in 'qt-dir/compiler/bin directory")
|
||||
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
|
||||
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
|
||||
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
|
||||
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
|
||||
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
|
||||
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
|
||||
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
|
||||
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
|
||||
@ -29,6 +36,11 @@ parser.add_option("--server-addon", action="append", type="string", dest="server
|
||||
parser.add_option("--web-apps-addon", action="append", type="string", dest="web-apps-addons", default=[], help="provides web-apps addons")
|
||||
parser.add_option("--sdkjs-plugin", action="append", type="string", dest="sdkjs-plugin", default=["default"], help="provides plugins for server-based and desktop versions of the editors")
|
||||
parser.add_option("--sdkjs-plugin-server", action="append", type="string", dest="sdkjs-plugin-server", default=["default"], help="provides plugins for server-based version of the editors")
|
||||
parser.add_option("--features", action="store", type="string", dest="features", default="", help="native features (config addon)")
|
||||
parser.add_option("--vs-version", action="store", type="string", dest="vs-version", default="2015", help="version of visual studio")
|
||||
parser.add_option("--vs-path", action="store", type="string", dest="vs-path", default="", help="path to vcvarsall")
|
||||
parser.add_option("--siteUrl", action="store", type="string", dest="siteUrl", default="127.0.0.1", help="site url")
|
||||
parser.add_option("--multiprocess", action="store", type="string", dest="multiprocess", default="1", help="provides ability to specify single process for make")
|
||||
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
configOptions = vars(options)
|
||||
|
||||
1
defaults
1
defaults
@ -1,2 +1,3 @@
|
||||
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode"
|
||||
sdkjs-plugin-server="speech, zotero, mendeley"
|
||||
sdkjs-addons="sdkjs-forms"
|
||||
|
||||
19
develop/Dockerfile
Normal file
19
develop/Dockerfile
Normal file
@ -0,0 +1,19 @@
|
||||
FROM onlyoffice/documentserver:latest
|
||||
RUN apt-get update -y && \
|
||||
apt-get install git -y \
|
||||
python3 -y \
|
||||
openjdk-11-jdk -y \
|
||||
bzip2 -y \
|
||||
npm -y && \
|
||||
npm install -g grunt grunt-cli -y && \
|
||||
ln -s /usr/bin/python3 /usr/bin/python && \
|
||||
ln -s /usr/bin/pip3 /usr/bin/pip && \
|
||||
git clone --depth 1 -b feature/docker-instruction https://github.com/ONLYOFFICE/build_tools.git var/www/onlyoffice/documentserver/build_tools && \
|
||||
sed -i '/documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}/d' /app/ds/run-document-server.sh && \
|
||||
#Set Up Debug Logging
|
||||
sed -i 's/WARN/ALL/g' /etc/onlyoffice/documentserver/log4js/production.json && \
|
||||
#Start test example
|
||||
if [ -s /etc/supervisor/conf.d/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /etc/supervisor/conf.d/ds-example.conf; fi && \
|
||||
if [ -s /app/ds/setup/config/supervisor/ds/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /app/ds/setup/config/supervisor/ds/ds-example.conf; fi && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver $@ && /bin/sh -c /app/ds/run-document-server.sh
|
||||
219
develop/README.md
Normal file
219
develop/README.md
Normal file
@ -0,0 +1,219 @@
|
||||
# Docker
|
||||
|
||||
This directory containing instruction for developers,
|
||||
who want to change something in sdkjs or web-apps or server module,
|
||||
but don't want to compile pretty compilcated core product to make those changes.
|
||||
|
||||
## System requirements
|
||||
|
||||
### Windows
|
||||
|
||||
You need the latest
|
||||
[Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/)
|
||||
installed.
|
||||
|
||||
**Note**: Docker Desktop does not start automatically after installation.
|
||||
You should manually start the **Docker Desktop** application.
|
||||
|
||||
**Note**: If you have problems running Docker Desktop with the
|
||||
"Use WSL 2 instead of Hyper-V" installation option,
|
||||
try reinstalling it without this option.
|
||||
|
||||
### Linux or macOS
|
||||
|
||||
You need the latest
|
||||
[Docker](https://docs.docker.com/engine/install/)
|
||||
version installed.
|
||||
|
||||
## Create develop Docker Images
|
||||
|
||||
To create a image with the ability to include external non-minified sdkjs code,
|
||||
use the following commands:
|
||||
|
||||
### Clone development environment to work dir
|
||||
|
||||
```bash
|
||||
git clone -b feature/docker-instruction https://github.com/ONLYOFFICE/build_tools.git
|
||||
```
|
||||
|
||||
### Modify Docker Images
|
||||
|
||||
**Note**: Do not prefix docker command with sudo.
|
||||
[This](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
|
||||
instruction show how to use docker without sudo.
|
||||
|
||||
```bash
|
||||
cd build_tools/develop
|
||||
docker pull onlyoffice/documentserver
|
||||
docker build -t documentserver-develop .
|
||||
```
|
||||
|
||||
**Note**: The dot at the end is required.
|
||||
|
||||
**Note**: Sometimes script may fail due to network errors. Just restart it.
|
||||
|
||||
## Clone development modules
|
||||
|
||||
Clone development modules to the work dir
|
||||
|
||||
* `sdkjs` repo is located [here](https://github.com/ONLYOFFICE/sdkjs/)
|
||||
* `web-apps` repo is located [here](https://github.com/ONLYOFFICE/web-apps/)
|
||||
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
|
||||
|
||||
```bash
|
||||
cd ../..
|
||||
git clone https://github.com/ONLYOFFICE/sdkjs.git
|
||||
git clone https://github.com/ONLYOFFICE/web-apps.git
|
||||
git clone https://github.com/ONLYOFFICE/server.git
|
||||
```
|
||||
|
||||
## Start server with external folders
|
||||
|
||||
To mount external folders to the container,
|
||||
you need to pass the "-v" parameter
|
||||
along with the relative paths to the required folders.
|
||||
The folders `sdkjs` and `web-apps` are required for proper development workflow.
|
||||
The folders `server` is optional
|
||||
|
||||
**Note**: ONLYOFFICE server uses port 80.
|
||||
Look for another application using port 80 and stop it
|
||||
|
||||
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
|
||||
and takes 20 minutes with `server`
|
||||
|
||||
**Note**: Run command from work dir with development modules
|
||||
|
||||
### docker run on Windows (PowerShell)
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
node_modules
|
||||
|
||||
run with `sdkjs` and `web-apps`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
```
|
||||
|
||||
or run with `sdkjs`, `web-apps` and `server`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $pwd/server:/var/www/onlyoffice/documentserver/server documentserver-develop
|
||||
```
|
||||
|
||||
### docker run on Linux or macOS
|
||||
|
||||
run with `sdkjs` and `web-apps`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
|
||||
```
|
||||
|
||||
or run with `sdkjs`, `web-apps` and `server`
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $(pwd)/server:/var/www/onlyoffice/documentserver/server documentserver-develop
|
||||
```
|
||||
|
||||
## Open editor
|
||||
|
||||
After the server starts successfully, you will see Docker log messages like this
|
||||
|
||||
```bash
|
||||
[Date] [WARN] [localhost] [docId] [userId] nodeJS
|
||||
```
|
||||
|
||||
To try the document editor, open a browser tab and type
|
||||
[http://localhost/example](http://localhost/example) into the URL bar.
|
||||
|
||||
**Note**: Disable **ad blockers** for localhost page.
|
||||
It may block some scripts (like Analytics.js)
|
||||
|
||||
## Modify sources
|
||||
|
||||
### To change something in `sdkjs` do the following steps
|
||||
|
||||
1)Edit source file. Let's insert an image url into each open document.
|
||||
Following command inserts (in case of problems, you can replace URL)
|
||||
`this.AddImageUrl(['http://localhost/example/images/logo.png']);`
|
||||
after event
|
||||
`this.sendEvent('asc_onDocumentContentReady');`
|
||||
in file
|
||||
`sdkjs/common/apiBase.js`
|
||||
|
||||
### change sdkjs on Windows (PowerShell)
|
||||
|
||||
```bash
|
||||
(Get-Content sdkjs/common/apiBase.js) -replace "this\.sendEvent\('asc_onDocumentContentReady'\);", "this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);" | Set-Content sdkjs/common/apiBase.js
|
||||
```
|
||||
|
||||
### change sdkjs on Linux or macOS
|
||||
|
||||
```bash
|
||||
sed -i "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
|
||||
```
|
||||
|
||||
2)Delete browser cache or hard reload the page `Ctrl + Shift + R`
|
||||
|
||||
3)Open new file in browser
|
||||
|
||||
### To change something in `server` do the following steps
|
||||
|
||||
1)Edit source file. Let's send `"Hello World!"`
|
||||
chart message every time a document is opened.
|
||||
Following command inserts
|
||||
`yield* onMessage(ctx, conn, {"message": "Hello World!"});`
|
||||
in function
|
||||
`sendAuthInfo`
|
||||
in file
|
||||
`server/DocService/sources/DocsCoServer.js`
|
||||
|
||||
### change server on Windows (PowerShell)
|
||||
|
||||
```bash
|
||||
(Get-Content server/DocService/sources/DocsCoServer.js) -replace 'opt_hasForgotten, opt_openedAt\) \{', 'opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});' | Set-Content server/DocService/sources/DocsCoServer.js
|
||||
```
|
||||
|
||||
### change server on Linux or macOS
|
||||
|
||||
```bash
|
||||
sed -i 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
|
||||
```
|
||||
|
||||
2)Restart document server process
|
||||
|
||||
**Note**: Look for ``CONTAINER_ID`` in the result of ``docker ps``.
|
||||
|
||||
```bash
|
||||
docker exec -it CONTAINER_ID supervisorctl restart all
|
||||
```
|
||||
|
||||
3)Open new file in browser
|
||||
|
||||
## Start server with additional functionality(addons)
|
||||
|
||||
To get additional functionality and branding you need to connect a branding folder,
|
||||
additional addon folders and pass command line arguments
|
||||
|
||||
For example run with `onlyoffice` branding and
|
||||
addons:`sdkjs-forms`, `sdkjs-ooxml`, `web-apps-mobile`
|
||||
|
||||
### docker run on Windows (PowerShell) with branding
|
||||
|
||||
**Note**: Run PowerShell as administrator to fix EACCES error when installing
|
||||
node_modules
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true `
|
||||
-v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps `
|
||||
-v $pwd/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $pwd/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $pwd/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $pwd/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile `
|
||||
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
|
||||
```
|
||||
|
||||
### docker run on Linux or macOS with branding
|
||||
|
||||
```bash
|
||||
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true \
|
||||
-v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps \
|
||||
-v $(pwd)/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $(pwd)/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $(pwd)/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $(pwd)/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile \
|
||||
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
|
||||
```
|
||||
57
develop/run_build_js.py
Normal file
57
develop/run_build_js.py
Normal file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
sys.path.append(sys.argv[1] + '/build_tools/scripts')
|
||||
sys.path.append(sys.argv[1] + '/build_tools/scripts/develop')
|
||||
import build_js
|
||||
import run_server
|
||||
import config
|
||||
import base
|
||||
|
||||
git_dir = sys.argv[1];
|
||||
|
||||
base.print_info('argv :'+' '.join(sys.argv))
|
||||
base.cmd_in_dir(git_dir + '/build_tools/', 'python3', ['configure.py', '--develop', '1'] + sys.argv[2:])
|
||||
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
|
||||
if base.is_exist(git_dir + "/server/FileConverter/bin/fonts.log"):
|
||||
base.print_info('remove font cache to regenerate fonts in external sdkjs volume')
|
||||
base.delete_file(git_dir + "/server/FileConverter/bin/fonts.log");
|
||||
|
||||
# external server volume
|
||||
if base.is_exist(sys.argv[1] + '/server/DocService/package.json'):
|
||||
base.print_info('replace supervisor cfg to run docservice and converter from source')
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
|
||||
base.print_info('run_server.run_docker_server')
|
||||
run_server.run_docker_server();
|
||||
else:
|
||||
#Fix theme generation for external sdkjs volume
|
||||
if base.is_exist(git_dir + "/server/FileConverter/bin/DoctRenderer.config"):
|
||||
base.print_info('replace DoctRenderer.config for external sdkjs volume')
|
||||
base.generate_doctrenderer_config(git_dir + "/server/FileConverter/bin/DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
|
||||
|
||||
addons = {}
|
||||
addons.update(base.get_sdkjs_addons())
|
||||
addons.update(base.get_web_apps_addons())
|
||||
staticContent = ""
|
||||
for addon in addons:
|
||||
if (addon):
|
||||
staticContent += '"/' + addon + '": {"path": "/var/www/onlyoffice/documentserver/' + addon + '","options": {"maxAge": "7d"}},'
|
||||
|
||||
if staticContent:
|
||||
base.print_info('replace production-linux.json for addons'+staticContent)
|
||||
base.replaceInFileRE("/etc/onlyoffice/documentserver/production-linux.json", '"static_content": {.*', '"static_content": {' + staticContent)
|
||||
|
||||
base.print_info('replace supervisor cfg to run docservice and converter from pkg')
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
|
||||
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
|
||||
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
|
||||
base.print_info('run_server.run_docker_sdk_web_apps: ' + git_dir)
|
||||
run_server.run_docker_sdk_web_apps(git_dir)
|
||||
|
||||
57
make.py
57
make.py
@ -2,6 +2,8 @@
|
||||
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append('scripts/develop')
|
||||
sys.path.append('scripts/develop/vendor')
|
||||
sys.path.append('scripts/core_common')
|
||||
sys.path.append('scripts/core_common/modules')
|
||||
import config
|
||||
@ -11,11 +13,13 @@ import build_js
|
||||
import build_server
|
||||
import deploy
|
||||
import make_common
|
||||
import develop
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
base.set_env("BUILD_PLATFORM", config.option("platform"))
|
||||
|
||||
# branding
|
||||
@ -31,7 +35,7 @@ if ("1" != base.get_env("OO_RUNNING_BRANDING")) and ("" != config.option("brandi
|
||||
base.cmd_in_dir(branding_dir, "git", ["fetch"], True)
|
||||
|
||||
if not is_exist or ("1" != config.option("update-light")):
|
||||
base.cmd_in_dir(branding_dir, "git", ["checkout", "-f", config.option("branch")])
|
||||
base.cmd_in_dir(branding_dir, "git", ["checkout", "-f", config.option("branch")], True)
|
||||
|
||||
base.cmd_in_dir(branding_dir, "git", ["pull"], True)
|
||||
|
||||
@ -49,37 +53,13 @@ base.check_build_version(base_dir)
|
||||
|
||||
# update
|
||||
if ("1" == config.option("update")):
|
||||
base.git_update("core")
|
||||
base.git_update("sdkjs")
|
||||
base.sdkjs_addons_checkout()
|
||||
base.sdkjs_plugins_checkout()
|
||||
base.sdkjs_plugins_server_checkout()
|
||||
base.git_update("web-apps")
|
||||
base.web_apps_addons_checkout()
|
||||
base.git_update("desktop-sdk")
|
||||
base.git_update("dictionaries")
|
||||
|
||||
if config.check_option("module", "builder"):
|
||||
base.git_update("DocumentBuilder")
|
||||
|
||||
if config.check_option("module", "desktop"):
|
||||
base.git_update("desktop-apps")
|
||||
|
||||
if (config.check_option("module", "develop") or config.check_option("module", "server")):
|
||||
base.git_update("server")
|
||||
base.server_addons_checkout()
|
||||
base.git_update("document-server-integration")
|
||||
|
||||
if (config.check_option("module", "develop") or config.check_option("module", "server") or config.check_option("platform", "ios")):
|
||||
base.git_update("core-fonts")
|
||||
repositories = base.get_repositories()
|
||||
base.update_repositories(repositories)
|
||||
|
||||
base.configure_common_apps()
|
||||
|
||||
# developing...
|
||||
if ("develop" == config.option("module")):
|
||||
build_js.build_js_develop(base_dir + "/..")
|
||||
deploy.make()
|
||||
exit(0)
|
||||
develop.make();
|
||||
|
||||
# check only js builds
|
||||
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
|
||||
@ -90,20 +70,17 @@ if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
|
||||
make_common.make()
|
||||
|
||||
# build updmodule for desktop (only for windows version)
|
||||
if ("windows" == base.host_platform()) and (config.check_option("module", "desktop")):
|
||||
config.extend_option("config", "updmodule")
|
||||
config.extend_option("qmake_addon", "LINK=https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.xml")
|
||||
if config.check_option("module", "desktop"):
|
||||
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "-1/apps")
|
||||
|
||||
if not base.is_file(base_dir + "/tools/WinSparkle-0.7.0.zip"):
|
||||
base.cmd("curl.exe", ["https://d2ettrnqo7v976.cloudfront.net/winsparkle/WinSparkle-0.7.0.zip", "--output", base_dir + "/tools/WinSparkle-0.7.0.zip"])
|
||||
|
||||
if not base.is_dir(base_dir + "/tools/WinSparkle-0.7.0"):
|
||||
base.cmd("7z.exe", ["x", base_dir + "/tools/WinSparkle-0.7.0.zip", "-otools"])
|
||||
if "windows" == base.host_platform():
|
||||
config.extend_option("config", "updmodule")
|
||||
config.extend_option("qmake_addon", "LINK=https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
|
||||
|
||||
base.create_dir(base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle")
|
||||
#base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/include", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/include")
|
||||
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_32")
|
||||
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/x64/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_64")
|
||||
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.set_env("VIDEO_PLAYER_VLC_DIR", base_dir + "/../desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc")
|
||||
|
||||
# build
|
||||
build.make()
|
||||
|
||||
90
make_package.py
Executable file
90
make_package.py
Executable file
@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
sys.path.append("scripts")
|
||||
import argparse
|
||||
import package_common as common
|
||||
import package_utils as utils
|
||||
|
||||
# parse
|
||||
parser = argparse.ArgumentParser(description="Build packages.")
|
||||
parser.add_argument("-P", "--platform", dest="platform", type=str,
|
||||
action="store", help="Defines platform", required=True)
|
||||
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
|
||||
action="store", help="Defines targets", required=True)
|
||||
parser.add_argument("-R", "--branding", dest="branding", type=str,
|
||||
action="store", help="Provides branding path")
|
||||
parser.add_argument("-V", "--version", dest="version", type=str,
|
||||
action="store", help="Defines version")
|
||||
parser.add_argument("-B", "--build", dest="build", type=str,
|
||||
action="store", help="Defines build")
|
||||
args = parser.parse_args()
|
||||
|
||||
# vars
|
||||
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
|
||||
common.os_family = utils.host_platform()
|
||||
common.platform = args.platform
|
||||
common.targets = args.targets
|
||||
common.clean = "clean" in args.targets
|
||||
common.sign = "sign" in args.targets
|
||||
common.deploy = "deploy" in args.targets
|
||||
common.version = args.version if (args.version is not None) else utils.get_env("PRODUCT_VERSION", "1.0.0")
|
||||
common.build = args.build if (args.build is not None) else utils.get_env("BUILD_NUMBER", "1")
|
||||
common.channel = utils.get_env("BUILD_CHANNEL", "other")
|
||||
common.branding = args.branding
|
||||
common.timestamp = utils.get_timestamp()
|
||||
common.summary = []
|
||||
common.deploy_data = []
|
||||
utils.log("workspace_dir: " + common.workspace_dir)
|
||||
utils.log("os_family: " + common.os_family)
|
||||
utils.log("platform: " + str(common.platform))
|
||||
utils.log("targets: " + str(common.targets))
|
||||
utils.log("clean: " + str(common.clean))
|
||||
utils.log("sign: " + str(common.sign))
|
||||
utils.log("deploy: " + str(common.deploy))
|
||||
utils.log("version: " + common.version)
|
||||
utils.log("build: " + common.build)
|
||||
utils.log("branding: " + str(common.branding))
|
||||
utils.log("timestamp: " + common.timestamp)
|
||||
|
||||
# branding
|
||||
if common.branding is not None:
|
||||
sys.path.insert(-1, \
|
||||
utils.get_path("../" + common.branding + "/build_tools/scripts"))
|
||||
|
||||
import package_core
|
||||
import package_desktop
|
||||
import package_server
|
||||
import package_builder
|
||||
import package_mobile
|
||||
|
||||
# build
|
||||
utils.set_cwd(common.workspace_dir, verbose=True)
|
||||
utils.delete_file("deploy.json")
|
||||
if "core" in common.targets:
|
||||
package_core.make()
|
||||
if "desktop" in common.targets:
|
||||
package_desktop.make()
|
||||
if "builder" in common.targets:
|
||||
package_builder.make()
|
||||
if "server-community" in common.targets:
|
||||
package_server.make("community")
|
||||
if "server-enterprise" in common.targets:
|
||||
package_server.make("enterprise")
|
||||
if "server-developer" in common.targets:
|
||||
package_server.make("developer")
|
||||
if "mobile" in common.targets:
|
||||
package_mobile.make()
|
||||
|
||||
# summary
|
||||
utils.log_h1("Build summary")
|
||||
exitcode = 0
|
||||
for i in common.summary:
|
||||
if list(i.values())[0]:
|
||||
utils.log("[ OK ] " + list(i.keys())[0])
|
||||
else:
|
||||
utils.log("[FAILED] " + list(i.keys())[0])
|
||||
exitcode = 1
|
||||
|
||||
exit(exitcode)
|
||||
708
scripts/base.py
708
scripts/base.py
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import platform
|
||||
import struct
|
||||
import glob
|
||||
import shutil
|
||||
import os
|
||||
@ -27,6 +28,17 @@ def host_platform():
|
||||
return "mac"
|
||||
return ret
|
||||
|
||||
def is_os_64bit():
|
||||
return platform.machine().endswith('64')
|
||||
|
||||
def is_os_arm():
|
||||
if -1 == platform.machine().find('arm'):
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_python_64bit():
|
||||
return (struct.calcsize("P") == 8)
|
||||
|
||||
def get_path(path):
|
||||
if "windows" == host_platform():
|
||||
return path.replace("/", "\\")
|
||||
@ -41,7 +53,7 @@ def set_env(name, value):
|
||||
|
||||
def configure_common_apps(file=""):
|
||||
if ("windows" == host_platform()):
|
||||
os.environ["PATH"] = get_script_dir(file) + "/../tools/win/7z" + os.pathsep + get_script_dir() + "/../tools/win/curl" + os.pathsep + os.environ["PATH"]
|
||||
os.environ["PATH"] = get_script_dir(file) + "/../tools/win/7z" + os.pathsep + get_script_dir(file) + "/../tools/win/curl" + os.pathsep + get_script_dir(file) + "/../tools/win/vswhere" + os.pathsep + os.environ["PATH"]
|
||||
elif ("mac" == host_platform()):
|
||||
os.environ["PATH"] = get_script_dir(file) + "/../tools/mac" + os.pathsep + os.environ["PATH"]
|
||||
return
|
||||
@ -63,6 +75,13 @@ def print_info(info=""):
|
||||
print("------------------------------------------")
|
||||
return
|
||||
|
||||
def print_error(error=""):
|
||||
print("\033[91m" + error + "\033[0m")
|
||||
|
||||
def print_list(list):
|
||||
print('[%s]' % ', '.join(map(str, list)))
|
||||
return
|
||||
|
||||
# file system -------------------------------------------
|
||||
def is_file(path):
|
||||
return os.path.isfile(get_path(path))
|
||||
@ -83,6 +102,14 @@ def copy_file(src, dst):
|
||||
return
|
||||
return shutil.copy2(get_path(src), get_path(dst))
|
||||
|
||||
def move_file(src, dst):
|
||||
if is_file(dst):
|
||||
delete_file(dst)
|
||||
if not is_file(src):
|
||||
print("move warning [file not exist]: " + src)
|
||||
return
|
||||
return shutil.move(get_path(src), get_path(dst))
|
||||
|
||||
def copy_files(src, dst, override=True):
|
||||
for file in glob.glob(src):
|
||||
file_name = os.path.basename(file)
|
||||
@ -97,6 +124,20 @@ def copy_files(src, dst, override=True):
|
||||
copy_files(file + "/*", dst + "/" + file_name, override)
|
||||
return
|
||||
|
||||
def move_files(src, dst, override=True):
|
||||
for file in glob.glob(src):
|
||||
file_name = os.path.basename(file)
|
||||
if is_file(file):
|
||||
if override and is_file(dst + "/" + file_name):
|
||||
delete_file(dst + "/" + file_name)
|
||||
if not is_file(dst + "/" + file_name):
|
||||
move_file(file, dst)
|
||||
elif is_dir(file):
|
||||
if not is_dir(dst + "/" + file_name):
|
||||
create_dir(dst + "/" + file_name)
|
||||
move_files(file + "/*", dst + "/" + file_name, override)
|
||||
return
|
||||
|
||||
def copy_dir_content(src, dst, filterInclude = "", filterExclude = ""):
|
||||
src_folder = src
|
||||
if ("/" != src[-1:]):
|
||||
@ -134,6 +175,14 @@ def create_dir(path):
|
||||
os.makedirs(path2)
|
||||
return
|
||||
|
||||
def move_dir(src, dst):
|
||||
if is_dir(dst):
|
||||
delete_dir(dst)
|
||||
if is_dir(src):
|
||||
copy_dir(src, dst)
|
||||
delete_dir(src)
|
||||
return
|
||||
|
||||
def copy_dir(src, dst):
|
||||
if is_dir(dst):
|
||||
delete_dir(dst)
|
||||
@ -165,6 +214,20 @@ def delete_dir(path):
|
||||
def copy_lib(src, dst, name):
|
||||
if (config.check_option("config", "bundle_dylibs")) and is_dir(src + "/" + name + ".framework"):
|
||||
copy_dir(src + "/" + name + ".framework", dst + "/" + name + ".framework")
|
||||
|
||||
if (config.check_option("config", "bundle_xcframeworks")) and is_dir(src + "/simulator/" + name + ".framework"):
|
||||
create_dir(dst + "/simulator")
|
||||
copy_dir(src + "/simulator/" + name + ".framework", dst + "/simulator/" + name + ".framework")
|
||||
|
||||
cmd("xcodebuild", ["-create-xcframework",
|
||||
"-framework", dst + "/" + name + ".framework",
|
||||
"-framework", dst + "/simulator/" + name + ".framework",
|
||||
"-output", dst + "/" + name + ".xcframework"])
|
||||
|
||||
delete_dir(dst + "/" + name + ".framework")
|
||||
delete_dir(dst + "/simulator/" + name + ".framework")
|
||||
delete_dir(dst + "/simulator")
|
||||
|
||||
return
|
||||
|
||||
lib_ext = ".so"
|
||||
@ -199,6 +262,9 @@ def copy_exe(src, dst, name):
|
||||
return
|
||||
|
||||
def replaceInFile(path, text, textReplace):
|
||||
if not is_file(path):
|
||||
print("[replaceInFile] file not exist: " + path)
|
||||
return
|
||||
filedata = ""
|
||||
with open(get_path(path), "r") as file:
|
||||
filedata = file.read()
|
||||
@ -208,6 +274,9 @@ def replaceInFile(path, text, textReplace):
|
||||
file.write(filedata)
|
||||
return
|
||||
def replaceInFileRE(path, pattern, textReplace):
|
||||
if not is_file(path):
|
||||
print("[replaceInFile] file not exist: " + path)
|
||||
return
|
||||
filedata = ""
|
||||
with open(get_path(path), "r") as file:
|
||||
filedata = file.read()
|
||||
@ -293,6 +362,52 @@ def cmd_in_dir(directory, prog, args=[], is_no_errors=False):
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def cmd_and_return_cwd(prog, args=[], is_no_errors=False):
|
||||
cur_dir = os.getcwd()
|
||||
ret = cmd(prog, args, is_no_errors)
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def run_command(sCommand):
|
||||
popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
result = {'stdout' : '', 'stderr' : ''}
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
result['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
|
||||
result['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
|
||||
return result
|
||||
|
||||
def run_command_in_dir(directory, sCommand):
|
||||
host = host_platform()
|
||||
if (host == 'windows'):
|
||||
dir = get_path(directory)
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(dir)
|
||||
|
||||
ret = run_command(sCommand)
|
||||
|
||||
if (host == 'windows'):
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def exec_command_in_dir(directory, sCommand):
|
||||
host = host_platform()
|
||||
if (host == 'windows'):
|
||||
dir = get_path(directory)
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(dir)
|
||||
|
||||
ret = os.system(sCommand)
|
||||
|
||||
if (host == 'windows'):
|
||||
os.chdir(cur_dir)
|
||||
return ret
|
||||
|
||||
def run_process(args=[]):
|
||||
subprocess.Popen(args)
|
||||
|
||||
@ -348,17 +463,152 @@ def git_update(repo, is_no_errors=False, is_current_dir=False):
|
||||
print("branch does not exist...")
|
||||
print("switching to master...")
|
||||
cmd("git", ["checkout", "-f", "master"])
|
||||
cmd("git", ["submodule", "update", "--init", "--recursive"], True)
|
||||
if (0 != config.option("branch").find("tags/")):
|
||||
cmd("git", ["pull"], False if ("1" != config.option("update-light")) else True)
|
||||
cmd("git", ["submodule", "update", "--recursive", "--remote"], True)
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
def get_repositories():
|
||||
result = {}
|
||||
result["core"] = [False, False]
|
||||
result["sdkjs"] = [False, False]
|
||||
result.update(get_sdkjs_addons())
|
||||
result["onlyoffice.github.io"] = [False, False]
|
||||
result["web-apps"] = [False, False]
|
||||
result.update(get_web_apps_addons())
|
||||
result["dictionaries"] = [False, False]
|
||||
|
||||
if config.check_option("module", "builder"):
|
||||
result["document-templates"] = [False, False]
|
||||
|
||||
if config.check_option("module", "desktop"):
|
||||
result["desktop-sdk"] = [False, False]
|
||||
result["desktop-apps"] = [False, False]
|
||||
result["document-templates"] = [False, False]
|
||||
|
||||
if (config.check_option("module", "server")):
|
||||
result["server"] = [False, False]
|
||||
result.update(get_server_addons())
|
||||
result["document-server-integration"] = [False, False]
|
||||
result["document-templates"] = [False, False]
|
||||
|
||||
if (config.check_option("module", "server") or config.check_option("platform", "ios")):
|
||||
result["core-fonts"] = [False, False]
|
||||
|
||||
get_branding_repositories(result)
|
||||
return result
|
||||
|
||||
def get_branding_repositories(checker):
|
||||
modules = ["core", "server", "mobile", "desktop", "builder"]
|
||||
for mod in modules:
|
||||
if not config.check_option("module", mod):
|
||||
continue
|
||||
name = "repositories_" + mod
|
||||
repos = config.option(name).rsplit(", ")
|
||||
for repo in repos:
|
||||
if (repo != ""):
|
||||
checker[repo] = [False, False]
|
||||
return
|
||||
|
||||
def create_pull_request(branches_to, repo, is_no_errors=False, is_current_dir=False):
|
||||
print("[git] create pull request: " + repo)
|
||||
url = "https://github.com/ONLYOFFICE/" + repo + ".git"
|
||||
if config.option("git-protocol") == "ssh":
|
||||
url = "git@github.com:ONLYOFFICE/" + repo + ".git"
|
||||
folder = get_script_dir() + "/../../" + repo
|
||||
if is_current_dir:
|
||||
folder = repo
|
||||
is_not_exit = False
|
||||
if not is_dir(folder):
|
||||
retClone = cmd("git", ["clone", url, folder], is_no_errors)
|
||||
if retClone != 0:
|
||||
return
|
||||
is_not_exit = True
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(folder)
|
||||
branch_from = config.option("branch")
|
||||
cmd("git", ["checkout", "-f", branch_from], is_no_errors)
|
||||
cmd("git", ["pull"], is_no_errors)
|
||||
for branch_to in branches_to:
|
||||
if "" != run_command("git log origin/" + branch_to + "..origin/" + branch_from)["stdout"]:
|
||||
cmd("git", ["checkout", "-f", branch_to], is_no_errors)
|
||||
cmd("git", ["pull"], is_no_errors)
|
||||
cmd("gh", ["pr", "create", "--base", branch_to, "--head", branch_from, "--title", "Merge branch " + branch_from + " to " + branch_to, "--body", ""], is_no_errors)
|
||||
if 0 != cmd("git", ["merge", "origin/" + branch_from, "--no-ff", "--no-edit"], is_no_errors):
|
||||
print_error("[git] Conflicts merge " + "origin/" + branch_from + " to " + branch_to + " in repo " + url)
|
||||
cmd("git", ["merge", "--abort"], is_no_errors)
|
||||
else:
|
||||
cmd("git", ["push"], is_no_errors)
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
def update_repositories(repositories):
|
||||
for repo in repositories:
|
||||
value = repositories[repo]
|
||||
current_dir = value[1]
|
||||
if current_dir == False:
|
||||
git_update(repo, value[0], False)
|
||||
else:
|
||||
if is_dir(current_dir + "/.git"):
|
||||
delete_dir_with_access_error(current_dir);
|
||||
delete_dir(current_dir)
|
||||
if not is_dir(current_dir):
|
||||
create_dir(current_dir)
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(current_dir)
|
||||
git_update(repo, value[0], True)
|
||||
os.chdir(cur_dir)
|
||||
|
||||
def git_dir():
|
||||
if ("windows" == host_platform()):
|
||||
return run_command("git --info-path")['stdout'] + "/../../.."
|
||||
|
||||
def get_prefix_cross_compiler_arm64():
|
||||
cross_compiler_arm64 = config.option("arm64-toolchain-bin")
|
||||
if is_file(cross_compiler_arm64 + "/aarch64-linux-gnu-g++") and is_file(cross_compiler_arm64 + "/aarch64-linux-gnu-gcc"):
|
||||
return "aarch64-linux-gnu-"
|
||||
if is_file(cross_compiler_arm64 + "/aarch64-unknown-linux-gnu-g++") and is_file(cross_compiler_arm64 + "/aarch64-unknown-linux-gnu-gcc"):
|
||||
return "aarch64-unknown-linux-gnu-"
|
||||
return ""
|
||||
|
||||
def get_gcc_version():
|
||||
gcc_version_major = 4
|
||||
gcc_version_minor = 0
|
||||
gcc_version_str = run_command("gcc -dumpfullversion -dumpversion")['stdout']
|
||||
if (gcc_version_str != ""):
|
||||
try:
|
||||
gcc_ver = gcc_version_str.split(".")
|
||||
gcc_version_major = int(gcc_ver[0])
|
||||
gcc_version_minor = int(gcc_ver[1])
|
||||
except Exception as e:
|
||||
gcc_version_major = 4
|
||||
gcc_version_minor = 0
|
||||
return gcc_version_major * 1000 + gcc_version_minor
|
||||
|
||||
# qmake -------------------------------------------------
|
||||
def qt_setup(platform):
|
||||
compiler = config.check_compiler(platform)
|
||||
qt_dir = config.option("qt-dir") if (-1 == platform.find("_xp")) else config.option("qt-dir-xp")
|
||||
qt_dir = (qt_dir + "/" + compiler["compiler"]) if platform_is_32(platform) else (qt_dir + "/" + compiler["compiler_64"])
|
||||
compiler_platform = compiler["compiler"] if platform_is_32(platform) else compiler["compiler_64"]
|
||||
qt_dir = qt_dir + "/" + compiler_platform
|
||||
|
||||
if (0 == platform.find("linux_arm")) and not is_dir(qt_dir):
|
||||
if ("gcc_arm64" == compiler_platform):
|
||||
qt_dir = config.option("qt-dir") + "/gcc_64"
|
||||
if ("gcc_arm" == compiler_platform):
|
||||
qt_dir = config.option("qt-dir") + "/gcc"
|
||||
|
||||
set_env("QT_DEPLOY", qt_dir + "/bin")
|
||||
|
||||
if ("linux_arm64" == platform):
|
||||
cross_compiler_arm64 = config.option("arm64-toolchain-bin")
|
||||
if ("" != cross_compiler_arm64):
|
||||
set_env("ARM64_TOOLCHAIN_BIN", cross_compiler_arm64)
|
||||
set_env("ARM64_TOOLCHAIN_BIN_PREFIX", get_prefix_cross_compiler_arm64())
|
||||
|
||||
return qt_dir
|
||||
|
||||
def qt_version():
|
||||
@ -367,7 +617,7 @@ def qt_version():
|
||||
return "".join(i for i in qt_dir if (i.isdigit() or i == "."))
|
||||
|
||||
def qt_config(platform):
|
||||
config_param = config.option("module") + " " + config.option("config")
|
||||
config_param = config.option("module") + " " + config.option("config") + " " + config.option("features")
|
||||
config_param_lower = config_param.lower()
|
||||
if (-1 != platform.find("xp")):
|
||||
config_param += " build_xp"
|
||||
@ -378,6 +628,19 @@ def qt_config(platform):
|
||||
config_param += " iphoneos device"
|
||||
if (-1 == config_param_lower.find("debug")):
|
||||
config_param += " release"
|
||||
if ("mac_arm64" == platform):
|
||||
config_param += " apple_silicon use_javascript_core"
|
||||
if config.check_option("module", "mobile"):
|
||||
config_param += " support_web_socket"
|
||||
|
||||
if ("ios" == platform):
|
||||
config_param += " disable_precompiled_header"
|
||||
if (0 == platform.find("android")):
|
||||
config_param += " disable_precompiled_header"
|
||||
|
||||
if ("linux_arm64" == platform):
|
||||
config_param += " linux_arm64"
|
||||
|
||||
return config_param
|
||||
|
||||
def qt_major_version():
|
||||
@ -387,9 +650,21 @@ def qt_major_version():
|
||||
def qt_copy_lib(lib, dir):
|
||||
qt_dir = get_env("QT_DEPLOY")
|
||||
if ("windows" == host_platform()):
|
||||
copy_lib(qt_dir, dir, lib)
|
||||
if ("" == qt_dst_postfix()):
|
||||
copy_lib(qt_dir, dir, lib)
|
||||
else:
|
||||
copy_lib(qt_dir, dir, lib + "d")
|
||||
else:
|
||||
copy_file(qt_dir + "/../lib/lib" + lib + ".so." + qt_version(), dir + "/lib" + lib + ".so." + qt_major_version())
|
||||
src_file = qt_dir + "/../lib/lib" + lib + ".so." + qt_version()
|
||||
if (is_file(src_file)):
|
||||
copy_file(src_file, dir + "/lib" + lib + ".so." + qt_major_version())
|
||||
else:
|
||||
libFramework = lib
|
||||
libFramework = libFramework.replace("Qt5", "Qt")
|
||||
libFramework = libFramework.replace("Qt6", "Qt")
|
||||
libFramework += ".framework"
|
||||
if (is_dir(qt_dir + "/../lib/" + libFramework)):
|
||||
copy_dir(qt_dir + "/../lib/" + libFramework, dir + "/" + libFramework)
|
||||
return
|
||||
|
||||
def _check_icu_common(dir, out):
|
||||
@ -427,8 +702,12 @@ def qt_copy_plugin(name, out):
|
||||
for file in glob.glob(out + "/" + name + "/*d.dll"):
|
||||
fileCheck = file[0:-5] + ".dll"
|
||||
if is_file(fileCheck):
|
||||
delete_file(file)
|
||||
|
||||
if ("" == qt_dst_postfix()):
|
||||
delete_file(file)
|
||||
else:
|
||||
delete_file(fileCheck)
|
||||
for file in glob.glob(out + "/" + name + "/*.pdb"):
|
||||
delete_file(file)
|
||||
return
|
||||
|
||||
def qt_dst_postfix():
|
||||
@ -465,9 +744,9 @@ def generate_doctrenderer_config(path, root, product, vendor = ""):
|
||||
content += ("<file>" + root + "sdkjs/common/Native/jquery_native.js</file>\n")
|
||||
|
||||
if ("server" != product):
|
||||
content += ("<file>" + root + "sdkjs/common/AllFonts.js</file>\n")
|
||||
content += ("<allfonts>" + root + "sdkjs/common/AllFonts.js</allfonts>\n")
|
||||
else:
|
||||
content += ("<file>./AllFonts.js</file>\n")
|
||||
content += ("<allfonts>./AllFonts.js</allfonts>\n")
|
||||
|
||||
vendor_dir = vendor
|
||||
if ("" == vendor_dir):
|
||||
@ -475,27 +754,13 @@ def generate_doctrenderer_config(path, root, product, vendor = ""):
|
||||
vendor_dir = root + vendor_dir + "/vendor/"
|
||||
|
||||
content += ("<file>" + vendor_dir + "xregexp/xregexp-all-min.js</file>\n")
|
||||
content += ("<htmlfile>" + vendor_dir + "jquery/jquery.min.js</htmlfile>\n")
|
||||
content += ("<sdkjs>" + root + "sdkjs</sdkjs>\n")
|
||||
|
||||
content += "<DoctSdk>\n"
|
||||
content += ("<file>" + root + "sdkjs/word/sdk-all-min.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/common/libfont/js/fonts.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/word/sdk-all.js</file>\n")
|
||||
content += "</DoctSdk>\n"
|
||||
content += "<PpttSdk>\n"
|
||||
content += ("<file>" + root + "sdkjs/slide/sdk-all-min.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/common/libfont/js/fonts.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/slide/sdk-all.js</file>\n")
|
||||
content += "</PpttSdk>\n"
|
||||
content += "<XlstSdk>\n"
|
||||
content += ("<file>" + root + "sdkjs/cell/sdk-all-min.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/common/libfont/js/fonts.js</file>\n")
|
||||
content += ("<file>" + root + "sdkjs/cell/sdk-all.js</file>\n")
|
||||
content += "</XlstSdk>\n"
|
||||
|
||||
if ("desktop" == product):
|
||||
content += "<htmlnoxvfb/>\n"
|
||||
content += "<htmlfileinternal>./../</htmlfileinternal>\n"
|
||||
if (False): # old html file
|
||||
content += ("<htmlfile>" + vendor_dir + "jquery/jquery.min.js</htmlfile>\n")
|
||||
if ("desktop" == product):
|
||||
content += "<htmlnoxvfb/>\n"
|
||||
content += "<htmlfileinternal>./../</htmlfileinternal>\n"
|
||||
|
||||
content += "</Settings>"
|
||||
|
||||
@ -515,9 +780,9 @@ def generate_plist(path):
|
||||
bundle_version_natural = readFile(get_script_dir() + "/../../core/Common/version.txt").split(".")
|
||||
bundle_version = []
|
||||
for n in bundle_version_natural:
|
||||
bundle_version.append("255" if int(n) > 255 else n)
|
||||
bundle_version.append(n)
|
||||
|
||||
for file in glob.glob(path + "/*.framework"):
|
||||
for file in glob.glob(path + '/**/*.framework', recursive=True):
|
||||
if not is_dir(file):
|
||||
continue
|
||||
name = os.path.basename(file)
|
||||
@ -532,7 +797,7 @@ def generate_plist(path):
|
||||
content += "\t<key>CFBundleGetInfoString</key>\n"
|
||||
content += "\t<string>Created by " + bundle_creator + "</string>\n"
|
||||
content += "\t<key>CFBundleIdentifier</key>\n"
|
||||
content += "\t<string>" + bundle_id_url + name + "</string>\n"
|
||||
content += "\t<string>" + bundle_id_url + correct_bundle_identifier(name) + "</string>\n"
|
||||
content += "\t<key>CFBundlePackageType</key>\n"
|
||||
content += "\t<string>FMWK</string>\n"
|
||||
content += "\t<key>CFBundleShortVersionString</key>\n"
|
||||
@ -542,7 +807,7 @@ def generate_plist(path):
|
||||
content += "\t<key>CFBundleVersion</key>\n"
|
||||
content += "\t<string>" + bundle_version[0] + "." + bundle_version[1] + "." + bundle_version[2] + "</string>\n"
|
||||
content += "\t<key>MinimumOSVersion</key>\n"
|
||||
content += "\t<string>10.0</string>\n"
|
||||
content += "\t<string>13.0</string>\n"
|
||||
content += "</dict>\n"
|
||||
content += "</plist>"
|
||||
|
||||
@ -556,76 +821,40 @@ def generate_plist(path):
|
||||
|
||||
return
|
||||
|
||||
def sdkjs_addons_checkout():
|
||||
def correct_bundle_identifier(bundle_identifier):
|
||||
return re.sub("[^a-zA-Z0-9\.\-]", "-", bundle_identifier)
|
||||
|
||||
def get_sdkjs_addons():
|
||||
result = {}
|
||||
if ("" == config.option("sdkjs-addons")):
|
||||
return
|
||||
return result
|
||||
addons_list = config.option("sdkjs-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.sdkjs_addons:
|
||||
git_update(config.sdkjs_addons[name], True)
|
||||
result[name] = [True, False]
|
||||
|
||||
if ("" != config.option("sdkjs-addons-desktop")):
|
||||
addons_list = config.option("sdkjs-addons-desktop").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.sdkjs_addons_desktop:
|
||||
git_update(config.sdkjs_addons_desktop[name], True)
|
||||
return
|
||||
result[name] = [True, False]
|
||||
return result
|
||||
|
||||
def server_addons_checkout():
|
||||
def get_server_addons():
|
||||
result = {}
|
||||
if ("" == config.option("server-addons")):
|
||||
return
|
||||
return result
|
||||
addons_list = config.option("server-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.server_addons:
|
||||
git_update(config.server_addons[name], True)
|
||||
return
|
||||
result[name] = [True, False]
|
||||
return result
|
||||
|
||||
def web_apps_addons_checkout():
|
||||
def get_web_apps_addons():
|
||||
result = {}
|
||||
if ("" == config.option("web-apps-addons")):
|
||||
return
|
||||
return result
|
||||
addons_list = config.option("web-apps-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.web_apps_addons:
|
||||
git_update(config.web_apps_addons[name], True)
|
||||
return
|
||||
|
||||
def sdkjs_plugins_checkout():
|
||||
plugins_list_config = config.option("sdkjs-plugin")
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
plugins_list = plugins_list_config.rsplit(", ")
|
||||
plugins_dir = get_script_dir() + "/../../sdkjs-plugins"
|
||||
if is_dir(plugins_dir + "/.git"):
|
||||
delete_dir_with_access_error(plugins_dir);
|
||||
delete_dir(plugins_dir)
|
||||
if not is_dir(plugins_dir):
|
||||
create_dir(plugins_dir)
|
||||
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(plugins_dir)
|
||||
for name in plugins_list:
|
||||
git_update("plugin-" + name, True, True)
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
def sdkjs_plugins_server_checkout():
|
||||
plugins_list_config = config.option("sdkjs-plugin-server")
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
plugins_list = plugins_list_config.rsplit(", ")
|
||||
plugins_dir = get_script_dir() + "/../../sdkjs-plugins"
|
||||
if is_dir(plugins_dir + "/.git"):
|
||||
delete_dir_with_access_error(plugins_dir);
|
||||
delete_dir(plugins_dir)
|
||||
if not is_dir(plugins_dir):
|
||||
create_dir(plugins_dir)
|
||||
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(plugins_dir)
|
||||
for name in plugins_list:
|
||||
git_update("plugin-" + name, True, True)
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
result[name] = [True, False]
|
||||
return result
|
||||
|
||||
def sdkjs_addons_param():
|
||||
if ("" == config.option("sdkjs-addons")):
|
||||
@ -633,8 +862,7 @@ def sdkjs_addons_param():
|
||||
params = []
|
||||
addons_list = config.option("sdkjs-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.sdkjs_addons:
|
||||
params.append("--addon=" + config.sdkjs_addons[name])
|
||||
params.append("--addon=" + name)
|
||||
return params
|
||||
|
||||
def sdkjs_addons_desktop_param():
|
||||
@ -643,8 +871,7 @@ def sdkjs_addons_desktop_param():
|
||||
params = []
|
||||
addons_list = config.option("sdkjs-addons-desktop").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.sdkjs_addons_desktop:
|
||||
params.append("--addon=" + config.sdkjs_addons_desktop[name])
|
||||
params.append("--addon=" + name)
|
||||
return params
|
||||
|
||||
def server_addons_param():
|
||||
@ -653,8 +880,7 @@ def server_addons_param():
|
||||
params = []
|
||||
addons_list = config.option("server-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.server_addons:
|
||||
params.append("--addon=" + config.server_addons[name])
|
||||
params.append("--addon=" + name)
|
||||
return params
|
||||
|
||||
def web_apps_addons_param():
|
||||
@ -663,8 +889,7 @@ def web_apps_addons_param():
|
||||
params = []
|
||||
addons_list = config.option("web-apps-addons").rsplit(", ")
|
||||
for name in addons_list:
|
||||
if name in config.web_apps_addons:
|
||||
params.append("--addon=" + config.web_apps_addons[name])
|
||||
params.append("--addon=" + name)
|
||||
return params
|
||||
|
||||
# common apps
|
||||
@ -675,9 +900,15 @@ def extract(src, dst):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
return cmd_exe(app, ["x", "-y", src, "-o" + dst])
|
||||
|
||||
def extract_unicode(src, dst):
|
||||
if "windows" == host_platform():
|
||||
run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"])
|
||||
return
|
||||
return extract(src, dst)
|
||||
|
||||
def archive_folder(src, dst):
|
||||
app = "7za" if ("mac" == host_platform()) else "7z"
|
||||
return cmd_exe(app, ["a", "-r", dst, src])
|
||||
return cmd_exe(app, ["a", dst, src])
|
||||
|
||||
# windows vcvarsall
|
||||
def _call_vcvarsall_and_return_env(arch):
|
||||
@ -726,17 +957,34 @@ def vcvarsall_end():
|
||||
return
|
||||
|
||||
def run_as_bat(lines, is_no_errors=False):
|
||||
name = "tmp.bat"
|
||||
name = "tmp.bat" if ("windows" == host_platform()) else "./tmp.sh"
|
||||
content = "\n".join(lines)
|
||||
|
||||
file = codecs.open(name, "w", "utf-8")
|
||||
file.write(content)
|
||||
file.close()
|
||||
|
||||
if ("windows" != host_platform()):
|
||||
os.system("chmod +x " + name)
|
||||
|
||||
cmd(name, [], is_no_errors)
|
||||
delete_file(name)
|
||||
return
|
||||
|
||||
def run_as_bat_win_isolate(lines, is_no_errors=False):
|
||||
file = codecs.open("tmp.bat", "w", "utf-8")
|
||||
file.write("\n".join(lines))
|
||||
file.close()
|
||||
|
||||
file2 = codecs.open("tmp2.bat", "w", "utf-8")
|
||||
file2.write("start /wait /min tmp.bat")
|
||||
file2.close()
|
||||
|
||||
cmd("tmp2.bat", [], is_no_errors)
|
||||
delete_file("tmp.bat")
|
||||
delete_file("tmp2.bat")
|
||||
return
|
||||
|
||||
def save_as_script(path, lines):
|
||||
content = "\n".join(lines)
|
||||
|
||||
@ -758,32 +1006,23 @@ def join_scripts(files, path):
|
||||
return
|
||||
|
||||
def get_file_last_modified_url(url):
|
||||
curl_command = 'curl --head %s' % (url)
|
||||
popen = subprocess.Popen(curl_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
retvalue = ""
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
|
||||
lines = stdout.strip().decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
if ':' not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
key, value = line.split(':', 1)
|
||||
key = key.upper()
|
||||
if key == "LAST-MODIFIED":
|
||||
retvalue = value
|
||||
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
|
||||
curl_command = 'curl --head %s' % (url)
|
||||
lines = run_command(curl_command)['stdout'].split("\n")
|
||||
for line in lines:
|
||||
if ':' not in line:
|
||||
continue
|
||||
line = line.strip()
|
||||
key, value = line.split(':', 1)
|
||||
key = key.upper()
|
||||
if key == "LAST-MODIFIED":
|
||||
retvalue = value
|
||||
|
||||
return retvalue
|
||||
|
||||
def mac_correct_rpath_binary(path, libs):
|
||||
for lib in libs:
|
||||
cmd("install_name_tool", ["-change", "lib" + lib + ".dylib", "@rpath/lib" + lib + ".dylib", path])
|
||||
cmd("install_name_tool", ["-change", "lib" + lib + ".dylib", "@rpath/lib" + lib + ".dylib", path], True)
|
||||
return
|
||||
|
||||
def mac_correct_rpath_library(name, libs):
|
||||
@ -795,18 +1034,21 @@ def mac_correct_rpath_x2t(dir):
|
||||
mac_correct_rpath_library("icudata.58", [])
|
||||
mac_correct_rpath_library("icuuc.58", ["icudata.58"])
|
||||
mac_correct_rpath_library("UnicodeConverter", ["icuuc.58", "icudata.58"])
|
||||
mac_correct_rpath_library("kernel", [])
|
||||
mac_correct_rpath_library("kernel", ["UnicodeConverter"])
|
||||
mac_correct_rpath_library("kernel_network", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("graphics", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("doctrenderer", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("HtmlFile", ["UnicodeConverter", "kernel"])
|
||||
mac_correct_rpath_library("doctrenderer", ["UnicodeConverter", "kernel", "kernel_network", "graphics"])
|
||||
mac_correct_rpath_library("HtmlFile2", ["UnicodeConverter", "kernel", "kernel_network", "graphics"])
|
||||
mac_correct_rpath_library("EpubFile", ["UnicodeConverter", "kernel", "HtmlFile2", "graphics"])
|
||||
mac_correct_rpath_library("Fb2File", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("HtmlRenderer", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("PdfWriter", ["UnicodeConverter", "kernel", "graphics"])
|
||||
mac_correct_rpath_library("DjVuFile", ["kernel", "UnicodeConverter", "graphics", "PdfWriter"])
|
||||
mac_correct_rpath_library("PdfReader", ["kernel", "UnicodeConverter", "graphics", "PdfWriter", "HtmlRenderer"])
|
||||
mac_correct_rpath_library("XpsFile", ["kernel", "UnicodeConverter", "graphics", "PdfWriter"])
|
||||
mac_correct_rpath_library("PdfFile", ["UnicodeConverter", "kernel", "graphics", "kernel_network"])
|
||||
mac_correct_rpath_library("DjVuFile", ["UnicodeConverter", "kernel", "graphics", "PdfFile"])
|
||||
mac_correct_rpath_library("XpsFile", ["UnicodeConverter", "kernel", "graphics", "PdfFile"])
|
||||
mac_correct_rpath_library("DocxRenderer", ["UnicodeConverter", "kernel", "graphics"])
|
||||
cmd("chmod", ["-v", "+x", "./x2t"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./x2t"], True)
|
||||
mac_correct_rpath_binary("./x2t", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "PdfWriter", "HtmlRenderer", "PdfReader", "XpsFile", "DjVuFile", "HtmlFile", "doctrenderer"])
|
||||
mac_correct_rpath_binary("./x2t", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
|
||||
if is_file("./allfontsgen"):
|
||||
cmd("chmod", ["-v", "+x", "./allfontsgen"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allfontsgen"], True)
|
||||
@ -814,7 +1056,16 @@ def mac_correct_rpath_x2t(dir):
|
||||
if is_file("./allthemesgen"):
|
||||
cmd("chmod", ["-v", "+x", "./allthemesgen"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./allthemesgen"], True)
|
||||
mac_correct_rpath_binary("./allthemesgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "doctrenderer"])
|
||||
mac_correct_rpath_binary("./allthemesgen", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "graphics", "kernel_network", "doctrenderer"])
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
def mac_correct_rpath_docbuilder(dir):
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(dir)
|
||||
cmd("chmod", ["-v", "+x", "./docbuilder"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path", "./docbuilder"], True)
|
||||
mac_correct_rpath_binary("./docbuilder", ["icudata.58", "icuuc.58", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "HtmlFile2", "Fb2File", "EpubFile", "doctrenderer", "DocxRenderer"])
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
@ -824,9 +1075,9 @@ def mac_correct_rpath_desktop(dir):
|
||||
os.chdir(dir)
|
||||
mac_correct_rpath_library("hunspell", [])
|
||||
mac_correct_rpath_library("ooxmlsignature", ["kernel"])
|
||||
mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "PdfWriter", "HtmlRenderer", "PdfReader", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
mac_correct_rpath_library("ascdocumentscore", ["UnicodeConverter", "kernel", "graphics", "kernel_network", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
cmd("install_name_tool", ["-change", "@executable_path/../Frameworks/Chromium Embedded Framework.framework/Chromium Embedded Framework", "@rpath/Chromium Embedded Framework.framework/Chromium Embedded Framework", "libascdocumentscore.dylib"])
|
||||
mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "graphics", "PdfWriter", "HtmlRenderer", "PdfReader", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
mac_correct_rpath_binary("./editors_helper.app/Contents/MacOS/editors_helper", ["ascdocumentscore", "UnicodeConverter", "kernel", "kernel_network", "graphics", "PdfFile", "HtmlRenderer", "XpsFile", "DjVuFile", "hunspell", "ooxmlsignature"])
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Frameworks", "./editors_helper.app/Contents/MacOS/editors_helper"], True)
|
||||
cmd("install_name_tool", ["-add_rpath", "@executable_path/../../../../Resources/converter", "./editors_helper.app/Contents/MacOS/editors_helper"], True)
|
||||
cmd("chmod", ["-v", "+x", "./editors_helper.app/Contents/MacOS/editors_helper"])
|
||||
@ -861,7 +1112,7 @@ def common_check_version(name, good_version, clean_func):
|
||||
return
|
||||
|
||||
def copy_sdkjs_plugin(src_dir, dst_dir, name, is_name_as_guid=False, is_desktop_local=False):
|
||||
src_dir_path = src_dir + "/plugin-" + name
|
||||
src_dir_path = src_dir + "/" + name
|
||||
if not is_dir(src_dir_path):
|
||||
src_dir_path = src_dir + "/" + name
|
||||
if not is_file(src_dir_path + "/config.json"):
|
||||
@ -874,6 +1125,9 @@ def copy_sdkjs_plugin(src_dir, dst_dir, name, is_name_as_guid=False, is_desktop_
|
||||
delete_dir(dst_dir_path)
|
||||
create_dir(dst_dir_path)
|
||||
copy_dir_content(src_dir_path, dst_dir_path, "", ".git")
|
||||
if is_desktop_local:
|
||||
for file in glob.glob(dst_dir_path + "/*.html"):
|
||||
replaceInFile(file, "https://onlyoffice.github.io/sdkjs-plugins/", "../")
|
||||
return
|
||||
if not is_file(src_dir_path + "/config.json"):
|
||||
return
|
||||
@ -894,7 +1148,7 @@ def copy_sdkjs_plugin(src_dir, dst_dir, name, is_name_as_guid=False, is_desktop_
|
||||
return
|
||||
|
||||
def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
|
||||
plugins_dir = get_script_dir() + "/../../sdkjs-plugins"
|
||||
plugins_dir = get_script_dir() + "/../../onlyoffice.github.io/sdkjs-plugins/content"
|
||||
plugins_list_config = config.option("sdkjs-plugin")
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
@ -904,7 +1158,7 @@ def copy_sdkjs_plugins(dst_dir, is_name_as_guid=False, is_desktop_local=False):
|
||||
return
|
||||
|
||||
def copy_sdkjs_plugins_server(dst_dir, is_name_as_guid=False, is_desktop_local=False):
|
||||
plugins_dir = get_script_dir() + "/../../sdkjs-plugins"
|
||||
plugins_dir = get_script_dir() + "/../../onlyoffice.github.io/sdkjs-plugins/content"
|
||||
plugins_list_config = config.option("sdkjs-plugin-server")
|
||||
if ("" == plugins_list_config):
|
||||
return
|
||||
@ -932,16 +1186,7 @@ def support_old_versions_plugins(out_dir):
|
||||
return
|
||||
|
||||
def get_xcode_major_version():
|
||||
popen = subprocess.Popen("xcodebuild -version", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
version = ""
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
version = stdout.strip().decode("utf-8")
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
|
||||
version = run_command("xcodebuild -version")['stdout']
|
||||
return int(version.split('.')[0][6:])
|
||||
|
||||
def hack_xcode_ios():
|
||||
@ -966,3 +1211,180 @@ def hack_xcode_ios():
|
||||
with open(get_path(qmake_spec_file), "w") as file:
|
||||
file.write(filedata)
|
||||
return
|
||||
|
||||
def find_mac_sdk_version():
|
||||
sdk_dir = run_command("xcode-select -print-path")['stdout']
|
||||
sdk_dir = os.path.join(sdk_dir, "Platforms/MacOSX.platform/Developer/SDKs")
|
||||
sdks = [re.findall('^MacOSX(1\d\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
|
||||
sdks = [s[0] for s in sdks if s]
|
||||
return sdks[0]
|
||||
|
||||
def find_mac_sdk():
|
||||
return run_command("xcrun --sdk macosx --show-sdk-path")['stdout']
|
||||
|
||||
def get_mac_sdk_version_number():
|
||||
ver = find_mac_sdk_version()
|
||||
ver_arr = ver.split(".")
|
||||
if 0 == len(ver_arr):
|
||||
return 0
|
||||
if 1 == len(ver_arr):
|
||||
return 1000 * int(ver_arr[0])
|
||||
return 1000 * int(ver_arr[0]) + int(ver_arr[1])
|
||||
|
||||
def make_sln(directory, args, is_no_errors):
|
||||
programFilesDir = get_env("ProgramFiles")
|
||||
if ("" != get_env("ProgramFiles(x86)")):
|
||||
programFilesDir = get_env("ProgramFiles(x86)")
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
|
||||
if ("2019" == config.option("vs-version")):
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio\\2019\\Community\\Common7\\IDE"
|
||||
if not is_dir(dev_path):
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio\\2019\\Enterprise\\Common7\\IDE"
|
||||
if not is_dir(dev_path):
|
||||
dev_path = programFilesDir + "\\Microsoft Visual Studio\\2019\\Professional\\Common7\\IDE"
|
||||
|
||||
old_env = dict(os.environ)
|
||||
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(directory)
|
||||
run_as_bat(["call devenv " + " ".join(args)], is_no_errors)
|
||||
os.chdir(old_cur)
|
||||
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
def make_sln_project(directory, sln_path):
|
||||
args = []
|
||||
args.append(sln_path)
|
||||
args.append("/Rebuild")
|
||||
if (config.check_option("platform", "win_64")):
|
||||
make_sln(directory, args + ["\"Release|x64\""], True)
|
||||
if True:#(config.check_option("platform", "win_32")):
|
||||
make_sln(directory, args + ["\"Release|Win32\""], True)
|
||||
return
|
||||
|
||||
def get_android_sdk_home():
|
||||
ndk_root_path = get_env("ANDROID_NDK_ROOT")
|
||||
if (-1 != ndk_root_path.find("/ndk/")):
|
||||
return ndk_root_path + "/../.."
|
||||
return ndk_root_path + "/.."
|
||||
|
||||
def readFileLicence(path):
|
||||
content = readFile(path)
|
||||
index = content.find("*/")
|
||||
if index >= 0:
|
||||
return content[0:index+2]
|
||||
return ""
|
||||
|
||||
def replaceFileLicence(path, license):
|
||||
old_licence = readFileLicence(path)
|
||||
replaceInFile(path, old_licence, license)
|
||||
return
|
||||
|
||||
def copy_v8_files(core_dir, deploy_dir, platform, is_xp=False):
|
||||
if (-1 != config.option("config").find("use_javascript_core")):
|
||||
return
|
||||
directory_v8 = core_dir + "/Common/3dParty"
|
||||
if is_xp:
|
||||
directory_v8 += "/v8/v8_xp"
|
||||
|
||||
if (-1 != config.option("config").lower().find("v8_version_89")) and not is_xp:
|
||||
directory_v8 += "/v8_89/v8/out.gn/"
|
||||
else:
|
||||
directory_v8 += "/v8/v8/out.gn/"
|
||||
|
||||
if is_xp:
|
||||
copy_files(directory_v8 + platform + "/release/icudt*.dll", deploy_dir + "/")
|
||||
return
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
copy_files(directory_v8 + platform + "/release/icudt*.dat", deploy_dir + "/")
|
||||
else:
|
||||
copy_files(directory_v8 + platform + "/icudt*.dat", deploy_dir + "/")
|
||||
return
|
||||
|
||||
def clone_marketplace_plugin(out_dir, is_name_as_guid=False):
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(out_dir)
|
||||
git_update("onlyoffice.github.io", False, True)
|
||||
os.chdir(old_cur)
|
||||
|
||||
dst_dir_name = "marketplace"
|
||||
if is_name_as_guid:
|
||||
config_content = readFile(out_dir + "/onlyoffice.github.io/store/plugin/config.json")
|
||||
index_start = config_content.find("\"asc.{")
|
||||
index_start += 5
|
||||
index_end = config_content.find("}", index_start)
|
||||
index_end += 1
|
||||
guid = config_content[index_start:index_end]
|
||||
dst_dir_name = guid
|
||||
|
||||
dst_dir_path = out_dir + "/" + dst_dir_name
|
||||
|
||||
if is_dir(dst_dir_path):
|
||||
delete_dir(dst_dir_path)
|
||||
|
||||
copy_dir(out_dir + "/onlyoffice.github.io/store/plugin", dst_dir_path)
|
||||
delete_dir_with_access_error(out_dir + "/onlyoffice.github.io")
|
||||
return
|
||||
|
||||
def correctPathForBuilder(path):
|
||||
replace_value = "../../../build/"
|
||||
if (config.option("branding") != ""):
|
||||
replace_value += (config.option("branding") + "/")
|
||||
replace_value += "lib/"
|
||||
if (config.check_option("config", "debug")):
|
||||
replace_value += ("debug/")
|
||||
if (replace_value == "../../../build/lib/"):
|
||||
return ""
|
||||
new_path = path + ".bak"
|
||||
copy_file(path, new_path)
|
||||
replaceInFile(path, "../../../build/lib/", replace_value)
|
||||
return new_path
|
||||
|
||||
def restorePathForBuilder(new_path):
|
||||
if ("" == new_path):
|
||||
return
|
||||
old_path = new_path[:-4]
|
||||
delete_file(old_path)
|
||||
copy_file(new_path, old_path)
|
||||
delete_file(new_path);
|
||||
return
|
||||
|
||||
def generate_check_linux_system(build_tools_dir, out_dir):
|
||||
create_dir(out_dir + "/.system")
|
||||
copy_file(build_tools_dir + "/tools/linux/check_system/check.sh", out_dir + "/.system/check.sh")
|
||||
copy_file(build_tools_dir + "/tools/linux/check_system/libstdc++.so.6", out_dir + "/.system/libstdc++.so.6")
|
||||
return
|
||||
|
||||
def convert_ios_framework_to_xcframework(folder, lib):
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(folder)
|
||||
|
||||
create_dir(lib + "_xc_tmp")
|
||||
create_dir(lib + "_xc_tmp/iphoneos")
|
||||
create_dir(lib + "_xc_tmp/iphonesimulator")
|
||||
copy_dir(lib + ".framework", lib + "_xc_tmp/iphoneos/" + lib + ".framework")
|
||||
copy_dir(lib + ".framework", lib + "_xc_tmp/iphonesimulator/" + lib + ".framework")
|
||||
|
||||
cmd("xcrun", ["lipo", "-remove", "x86_64", "./" + lib + "_xc_tmp/iphoneos/" + lib + ".framework/" + lib,
|
||||
"-o", "./" + lib + "_xc_tmp/iphoneos/" + lib + ".framework/" + lib])
|
||||
cmd("xcrun", ["lipo", "-remove", "arm64", "./" + lib + "_xc_tmp/iphonesimulator/" + lib + ".framework/" + lib,
|
||||
"-o", "./" + lib + "_xc_tmp/iphonesimulator/" + lib + ".framework/" + lib])
|
||||
|
||||
cmd("xcodebuild", ["-create-xcframework",
|
||||
"-framework", "./" + lib + "_xc_tmp/iphoneos/" + lib + ".framework/",
|
||||
"-framework", "./" + lib + "_xc_tmp/iphonesimulator/" + lib + ".framework/",
|
||||
"-output", lib + ".xcframework"])
|
||||
|
||||
delete_dir(lib + "_xc_tmp")
|
||||
|
||||
os.chdir(cur_dir)
|
||||
return
|
||||
|
||||
def convert_ios_framework_to_xcframework_folder(folder, libs):
|
||||
for lib in libs:
|
||||
convert_ios_framework_to_xcframework(folder, lib)
|
||||
return
|
||||
|
||||
@ -3,8 +3,9 @@
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import multiprocessing
|
||||
|
||||
def make_pro_file(makefiles_dir, pro_file):
|
||||
def make_pro_file(makefiles_dir, pro_file, qmake_config_addon=""):
|
||||
platforms = config.option("platform").split()
|
||||
for platform in platforms:
|
||||
if not platform in config.platforms:
|
||||
@ -13,7 +14,7 @@ def make_pro_file(makefiles_dir, pro_file):
|
||||
print("------------------------------------------")
|
||||
print("BUILD_PLATFORM: " + platform)
|
||||
print("------------------------------------------")
|
||||
old_env = os.environ.copy()
|
||||
old_env = dict(os.environ)
|
||||
|
||||
# if you need change output libraries path - set the env variable
|
||||
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
|
||||
@ -29,10 +30,7 @@ def make_pro_file(makefiles_dir, pro_file):
|
||||
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
|
||||
new_path += old_path
|
||||
base.set_env("PATH", new_path)
|
||||
if ("android_arm64_v8a" == platform):
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
|
||||
else:
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-16")
|
||||
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
|
||||
|
||||
if (-1 != platform.find("ios")):
|
||||
base.hack_xcode_ios()
|
||||
@ -49,11 +47,13 @@ def make_pro_file(makefiles_dir, pro_file):
|
||||
|
||||
# qmake CONFIG+=...
|
||||
config_param = base.qt_config(platform)
|
||||
if ("" != qmake_config_addon):
|
||||
config_param += (" " + qmake_config_addon)
|
||||
|
||||
# qmake ADDON
|
||||
qmake_addon = []
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon.append(config.option("qmake_addon"))
|
||||
qmake_addon = config.option("qmake_addon").split()
|
||||
|
||||
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
|
||||
print("THIS PLATFORM IS NOT SUPPORTED")
|
||||
@ -61,36 +61,58 @@ def make_pro_file(makefiles_dir, pro_file):
|
||||
|
||||
# non windows platform
|
||||
if not base.is_windows():
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
|
||||
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
|
||||
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
base.cmd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
|
||||
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("1" == config.option("clean")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if not base.is_file(pro_file):
|
||||
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
|
||||
else:
|
||||
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
|
||||
else:
|
||||
qmake_bat = []
|
||||
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("1" == config.option("clean")):
|
||||
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_addon_string = ""
|
||||
if ("" != config.option("qmake_addon")):
|
||||
qmake_addon_string = " \"" + config.option("qmake_addon") + "\""
|
||||
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
|
||||
if ("0" != config.option("multiprocess")):
|
||||
qmake_bat.append("set CL=/MP")
|
||||
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
|
||||
base.run_as_bat(qmake_bat)
|
||||
|
||||
os.environ = old_env.copy()
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
|
||||
base.delete_file(".qmake.stash")
|
||||
|
||||
# make build.pro
|
||||
def make():
|
||||
is_no_brandind_build = base.is_file("config")
|
||||
make_pro_file("makefiles", "build.pro")
|
||||
if config.check_option("module", "builder") and base.is_windows() and is_no_brandind_build:
|
||||
base.bash("../core/DesktopEditor/doctrenderer/docbuilder.com/build")
|
||||
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
|
||||
make_pro_file("makefiles", "build.pro", "xcframework_platform_ios_simulator")
|
||||
|
||||
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
|
||||
# check replace
|
||||
new_replace_path = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder.h")
|
||||
if ("2019" == config.option("vs-version")):
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
|
||||
if (True):
|
||||
new_path_net = base.correctPathForBuilder(os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.net/src/docbuilder.net.cpp")
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
|
||||
base.restorePathForBuilder(new_path_net)
|
||||
else:
|
||||
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
|
||||
base.restorePathForBuilder(new_replace_path)
|
||||
return
|
||||
|
||||
@ -2,12 +2,34 @@
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
def correct_sdkjs_licence(directory):
|
||||
branding = config.option("branding")
|
||||
if "" == branding or "onlyoffice" == branding:
|
||||
return
|
||||
license = base.readFileLicence(directory + "/word/sdk-all-min.js")
|
||||
base.replaceFileLicence(directory + "/common/Charts/ChartStyles.js", license)
|
||||
base.replaceFileLicence(directory + "/common/hash/hash/engine.js", license)
|
||||
base.replaceFileLicence(directory + "/common/hash/hash/engine_ie.js", license)
|
||||
base.replaceFileLicence(directory + "/common/Native/native.js", license)
|
||||
base.replaceFileLicence(directory + "/common/Native/native_graphics.js", license)
|
||||
base.replaceFileLicence(directory + "/common/spell/spell/spell.js", license)
|
||||
base.replaceFileLicence(directory + "/common/spell/spell/spell_ie.js", license)
|
||||
base.replaceFileLicence(directory + "/pdf/src/engine/drawingfile.js", license)
|
||||
base.replaceFileLicence(directory + "/pdf/src/engine/drawingfile_ie.js", license)
|
||||
base.replaceInFile(directory + "/word/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
|
||||
base.replaceInFile(directory + "/slide/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
|
||||
base.replaceInFile(directory + "/cell/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
|
||||
return
|
||||
|
||||
# make build.pro
|
||||
def make():
|
||||
if ("1" == base.get_env("OO_NO_BUILD_JS")):
|
||||
return
|
||||
|
||||
base.set_env('NODE_ENV', 'production')
|
||||
|
||||
base_dir = base.get_script_dir() + "/.."
|
||||
out_dir = base_dir + "/out/js/";
|
||||
branding = config.option("branding-name")
|
||||
@ -22,12 +44,14 @@ def make():
|
||||
base.create_dir(out_dir + "/builder")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/builder/web-apps")
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/builder/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/builder/sdkjs")
|
||||
|
||||
# desktop
|
||||
if config.check_option("module", "desktop"):
|
||||
build_sdk_desktop(base_dir + "/../sdkjs/build")
|
||||
base.create_dir(out_dir + "/desktop")
|
||||
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
|
||||
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
|
||||
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
|
||||
if not base.is_file(out_dir + "/desktop/sdkjs/common/AllFonts.js"):
|
||||
base.copy_file(base_dir + "/../sdkjs/common/HtmlFileInternal/AllFonts.js", out_dir + "/desktop/sdkjs/common/AllFonts.js")
|
||||
@ -44,25 +68,43 @@ def make():
|
||||
|
||||
# mobile
|
||||
if config.check_option("module", "mobile"):
|
||||
build_sdk_native(base_dir + "/../sdkjs/build")
|
||||
build_sdk_native(base_dir + "/../sdkjs/build", False)
|
||||
base.create_dir(out_dir + "/mobile")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs")
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
# banners
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_word.js")
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
sdk_dir_src + "common/Native/native.js",
|
||||
sdk_dir_src + "../../common/Native/Wrappers/common.js",
|
||||
sdk_dir_src + "common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners.js")
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/cell/native/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_cell.js")
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/mobile/sdkjs/banners_slide.js")
|
||||
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/word")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/cell")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
|
||||
base.create_dir(out_dir + "/mobile/sdkjs/slide")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
|
||||
base.join_scripts([out_dir + "/mobile/sdkjs/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_word.js")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_cell.js")
|
||||
base.delete_file(out_dir + "/mobile/sdkjs/banners_slide.js")
|
||||
return
|
||||
|
||||
# JS build
|
||||
@ -80,30 +122,53 @@ def build_interface(directory):
|
||||
_run_grunt(directory, ["--force"] + base.web_apps_addons_param())
|
||||
return
|
||||
|
||||
def get_build_param(minimize=True):
|
||||
minimize_scripts = minimize
|
||||
if config.check_option("jsminimize", "0"):
|
||||
minimize_scripts = False
|
||||
beta = "true" if config.check_option("beta", "1") else "false"
|
||||
params = ["--beta=" + beta]
|
||||
return params + (["--level=ADVANCED"] if minimize_scripts else ["--level=WHITESPACE_ONLY", "--formatting=PRETTY_PRINT"])
|
||||
|
||||
def build_sdk_desktop(directory):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, ["--level=ADVANCED", "--desktop=true"] + base.sdkjs_addons_param() + base.sdkjs_addons_desktop_param())
|
||||
_run_grunt(directory, get_build_param() + ["--desktop=true"] + base.sdkjs_addons_param() + base.sdkjs_addons_desktop_param())
|
||||
return
|
||||
|
||||
def build_sdk_builder(directory):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, ["--level=ADVANCED"] + base.sdkjs_addons_param())
|
||||
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param())
|
||||
return
|
||||
|
||||
def build_sdk_native(directory):
|
||||
def build_sdk_native(directory, minimize=True):
|
||||
#_run_npm_cli(directory)
|
||||
_run_npm(directory)
|
||||
_run_grunt(directory, ["--level=ADVANCED", "--mobile=true"] + base.sdkjs_addons_param())
|
||||
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
|
||||
return
|
||||
|
||||
def build_js_develop(root_dir):
|
||||
#_run_npm_cli(root_dir + "/sdkjs/build")
|
||||
_run_npm(root_dir + "/sdkjs/build")
|
||||
_run_grunt(root_dir + "/sdkjs/build", ["--level=WHITESPACE_ONLY", "--formatting=PRETTY_PRINT"] + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
_run_npm(root_dir + "/web-apps/build")
|
||||
_run_npm(root_dir + "/web-apps/build/sprites")
|
||||
_run_grunt(root_dir + "/web-apps/build/sprites", [])
|
||||
external_folder = config.option("--external-folder")
|
||||
if (external_folder != ""):
|
||||
external_folder = "/" + external_folder
|
||||
|
||||
_run_npm(root_dir + external_folder + "/sdkjs/build")
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
|
||||
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build")
|
||||
_run_npm(root_dir + external_folder + "/web-apps/build/sprites")
|
||||
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
|
||||
|
||||
old_cur = os.getcwd()
|
||||
old_product_version = base.get_env("PRODUCT_VERSION")
|
||||
base.set_env("PRODUCT_VERSION", old_product_version + "d")
|
||||
os.chdir(root_dir + external_folder + "/web-apps/vendor/framework7-react")
|
||||
base.cmd("npm", ["install"])
|
||||
base.cmd("npm", ["run", "deploy-word"])
|
||||
base.cmd("npm", ["run", "deploy-cell"])
|
||||
base.cmd("npm", ["run", "deploy-slide"])
|
||||
base.set_env("PRODUCT_VERSION", old_product_version)
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
87
scripts/build_js_native.py
Normal file
87
scripts/build_js_native.py
Normal file
@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import base
|
||||
import build_js
|
||||
import config
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("--output",
|
||||
action="store", type="string", dest="output",
|
||||
help="Directory for output the build result")
|
||||
parser.add_option("--write-version",
|
||||
action="store_true", dest="write_version", default=False,
|
||||
help="Create version file of build")
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
|
||||
def write_version_files(output_dir):
|
||||
if (base.is_dir(output_dir)):
|
||||
last_version_tag = base.run_command('git describe --abbrev=0 --tags')['stdout']
|
||||
version_numbers=last_version_tag.replace('v', '').split('.')
|
||||
major=(version_numbers[0:1] or ('0',))[0]
|
||||
minor=(version_numbers[1:2] or ('0',))[0]
|
||||
maintenance=(version_numbers[2:3] or ('0',))[0]
|
||||
build=(version_numbers[3:4] or ('0',))[0]
|
||||
full_version='%s.%s.%s.%s' % (major, minor, maintenance, build)
|
||||
|
||||
for name in ['word', 'cell', 'slide']:
|
||||
base.writeFile(output_dir + '/%s/sdk.version' % name, full_version)
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
config.extend_option("jsminimize", "0")
|
||||
|
||||
branding = config.option("branding-name")
|
||||
if ("" == branding):
|
||||
branding = "onlyoffice"
|
||||
|
||||
base_dir = base.get_script_dir() + "/.."
|
||||
out_dir = base_dir + "/../native-sdk/examples/win-linux-mac/build/sdkjs"
|
||||
|
||||
if (options.output):
|
||||
out_dir = options.output
|
||||
|
||||
base.create_dir(out_dir)
|
||||
|
||||
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
|
||||
vendor_dir_src = base_dir + "/../web-apps/vendor/"
|
||||
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_word.js")
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/cell/native/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_cell.js")
|
||||
|
||||
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
|
||||
vendor_dir_src + "underscore/underscore-min.js",
|
||||
base_dir + "/../sdkjs/common/Native/native.js",
|
||||
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
|
||||
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
|
||||
out_dir + "/banners_slide.js")
|
||||
|
||||
base.create_dir(out_dir + "/word")
|
||||
base.join_scripts([out_dir + "/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
|
||||
base.create_dir(out_dir + "/cell")
|
||||
base.join_scripts([out_dir + "/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
|
||||
base.create_dir(out_dir + "/slide")
|
||||
base.join_scripts([out_dir + "/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
|
||||
|
||||
base.delete_file(out_dir + "/banners_word.js")
|
||||
base.delete_file(out_dir + "/banners_cell.js")
|
||||
base.delete_file(out_dir + "/banners_slide.js")
|
||||
|
||||
# Write sdk version mark file if needed
|
||||
if (options.write_version):
|
||||
write_version_files(out_dir)
|
||||
@ -41,10 +41,12 @@ def make():
|
||||
if(base.is_exist(custom_public_key)):
|
||||
base.copy_file(custom_public_key, server_build_dir + '/Common/sources')
|
||||
|
||||
pkg_target = "node10"
|
||||
pkg_target = "node14"
|
||||
|
||||
if ("linux" == base.host_platform()):
|
||||
pkg_target += "-linux"
|
||||
if (-1 != config.option("platform").find("linux_arm64")):
|
||||
pkg_target += "-arm64"
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
pkg_target += "-win"
|
||||
@ -52,8 +54,13 @@ def make():
|
||||
base.cmd_in_dir(server_build_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
|
||||
base.cmd_in_dir(server_build_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
|
||||
base.cmd_in_dir(server_build_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
|
||||
base.cmd_in_dir(server_build_dir + "/SpellChecker", "pkg", [".", "-t", pkg_target, "-o", "spellchecker"])
|
||||
|
||||
example_dir = base.get_script_dir() + "/../../document-server-integration/web/documentserver-example/nodejs"
|
||||
base.delete_dir(example_dir + "/node_modules")
|
||||
base.cmd_in_dir(example_dir, "npm", ["install"])
|
||||
base.cmd_in_dir(example_dir, "pkg", [".", "-t", pkg_target, "-o", "example"])
|
||||
|
||||
def build_server_develop():
|
||||
server_dir = base.get_script_dir() + "/../../server"
|
||||
base.cmd_in_dir(server_dir, "npm", ["install"])
|
||||
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())
|
||||
|
||||
@ -25,8 +25,8 @@ def parse():
|
||||
# all platforms
|
||||
global platforms
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp",
|
||||
"linux_64", "linux_32",
|
||||
"mac_64",
|
||||
"linux_64", "linux_32", "linux_arm64",
|
||||
"mac_64", "mac_arm64",
|
||||
"ios",
|
||||
"android_arm64_v8a", "android_armv7", "android_x86", "android_x86_64"]
|
||||
|
||||
@ -53,42 +53,67 @@ def parse():
|
||||
else:
|
||||
options["platform"] += (" mac_" + bits)
|
||||
|
||||
if ("mac" == host_platform) and check_option("platform", "mac_arm64") and not base.is_os_arm():
|
||||
if not check_option("platform", "mac_64"):
|
||||
options["platform"] = "mac_64 " + options["platform"]
|
||||
|
||||
if ("linux" == host_platform) and check_option("platform", "linux_arm64") and not base.is_os_arm():
|
||||
if not check_option("platform", "linux_64"):
|
||||
# linux_64 binaries need only for desktop
|
||||
if check_option("module", "desktop"):
|
||||
options["platform"] = "linux_64 " + options["platform"]
|
||||
|
||||
if check_option("platform", "xp") and ("windows" == host_platform):
|
||||
options["platform"] += " win_64_xp win_32_xp"
|
||||
|
||||
if check_option("platform", "android"):
|
||||
options["platform"] += " android_arm64_v8a android_armv7 android_x86 android_x86_64"
|
||||
|
||||
#if check_option("platform", "ios"):
|
||||
# extend_option("config", "core_ios_32")
|
||||
|
||||
# check vs-version
|
||||
if ("" == option("vs-version")):
|
||||
options["vs-version"] = "2015"
|
||||
|
||||
# enable v8 8.9 version, if compiler support sources
|
||||
if ("linux" == host_platform) and (5004 <= base.get_gcc_version()) and not check_option("platform", "android"):
|
||||
extend_option("config", "v8_version_89")
|
||||
|
||||
if ("windows" == host_platform) and ("2019" == option("vs-version")):
|
||||
extend_option("config", "v8_version_89")
|
||||
extend_option("config", "vs2019")
|
||||
|
||||
if check_option("platform", "linux_arm64"):
|
||||
extend_option("config", "v8_version_89")
|
||||
|
||||
# check vs-path
|
||||
if ("windows" == host_platform):
|
||||
options["vs-path"] = base.get_env("ProgramFiles") + "/Microsoft Visual Studio 14.0/VC"
|
||||
if ("windows" == host_platform) and ("" == option("vs-path")):
|
||||
programFilesDir = base.get_env("ProgramFiles")
|
||||
if ("" != base.get_env("ProgramFiles(x86)")):
|
||||
options["vs-path"] = base.get_env("ProgramFiles(x86)") + "/Microsoft Visual Studio 14.0/VC"
|
||||
programFilesDir = base.get_env("ProgramFiles(x86)")
|
||||
if ("2015" == options["vs-version"]):
|
||||
options["vs-path"] = programFilesDir + "/Microsoft Visual Studio 14.0/VC"
|
||||
elif ("2019" == options["vs-version"]):
|
||||
if base.is_dir(programFilesDir + "/Microsoft Visual Studio/2019/Enterprise/VC/Auxiliary/Build"):
|
||||
options["vs-path"] = programFilesDir + "/Microsoft Visual Studio/2019/Enterprise/VC/Auxiliary/Build"
|
||||
elif base.is_dir(programFilesDir + "/Microsoft Visual Studio/2019/Professional/VC/Auxiliary/Build"):
|
||||
options["vs-path"] = programFilesDir + "/Microsoft Visual Studio/2019/Professional/VC/Auxiliary/Build"
|
||||
else:
|
||||
options["vs-path"] = programFilesDir + "/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build"
|
||||
|
||||
# check sdkjs-plugins
|
||||
if not "sdkjs-plugin" in options:
|
||||
options["sdkjs-plugin"] = "default"
|
||||
if not "sdkjs-plugin-server" in options:
|
||||
options["sdkjs-plugin-server"] = "default"
|
||||
options["sdkjs-plugin-server"] = "default"
|
||||
|
||||
global sdkjs_addons
|
||||
sdkjs_addons = {}
|
||||
sdkjs_addons["comparison"] = "sdkjs-comparison"
|
||||
sdkjs_addons["content-controls"] = "sdkjs-content-controls"
|
||||
sdkjs_addons["sheet-views"] = "sdkjs-sheet-views"
|
||||
if not "arm64-toolchain-bin" in options:
|
||||
options["arm64-toolchain-bin"] = "/usr/bin"
|
||||
|
||||
global sdkjs_addons_desktop
|
||||
sdkjs_addons_desktop = {}
|
||||
sdkjs_addons_desktop["disable-features"] = "sdkjs-disable-features"
|
||||
|
||||
global server_addons
|
||||
server_addons = {}
|
||||
server_addons["license"] = "server-license"
|
||||
server_addons["lockstorage"] = "server-lockstorage"
|
||||
|
||||
global web_apps_addons
|
||||
web_apps_addons = {}
|
||||
web_apps_addons["mobile"] = "web-apps-mobile"
|
||||
if check_option("config", "bundle_xcframeworks"):
|
||||
if not check_option("config", "bundle_dylibs"):
|
||||
extend_option("config", "bundle_dylibs")
|
||||
|
||||
return
|
||||
|
||||
@ -103,11 +128,14 @@ def check_compiler(platform):
|
||||
return compiler
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
compiler["compiler"] = "msvc2015"
|
||||
compiler["compiler_64"] = "msvc2015_64"
|
||||
compiler["compiler"] = "msvc" + options["vs-version"]
|
||||
compiler["compiler_64"] = "msvc" + options["vs-version"] + "_64"
|
||||
elif (0 == platform.find("linux")):
|
||||
compiler["compiler"] = "gcc"
|
||||
compiler["compiler_64"] = "gcc_64"
|
||||
if (0 == platform.find("linux_arm")) and not base.is_os_arm():
|
||||
compiler["compiler"] = "gcc_arm"
|
||||
compiler["compiler_64"] = "gcc_arm64"
|
||||
elif (0 == platform.find("mac")):
|
||||
compiler["compiler"] = "clang"
|
||||
compiler["compiler_64"] = "clang_64"
|
||||
@ -118,6 +146,12 @@ def check_compiler(platform):
|
||||
compiler["compiler"] = platform
|
||||
compiler["compiler_64"] = platform
|
||||
|
||||
if base.host_platform() == "mac":
|
||||
if not base.is_dir(options["qt-dir"] + "/" + compiler["compiler_64"]):
|
||||
if base.is_dir(options["qt-dir"] + "/macos"):
|
||||
compiler["compiler"] = "macos"
|
||||
compiler["compiler_64"] = "macos"
|
||||
|
||||
return compiler
|
||||
|
||||
def check_option(name, value):
|
||||
@ -145,6 +179,14 @@ def branding():
|
||||
branding = "onlyoffice"
|
||||
return branding
|
||||
|
||||
def is_mobile_platform():
|
||||
all_platforms = option("platform")
|
||||
if (-1 != all_platforms.find("android")):
|
||||
return True
|
||||
if (-1 != all_platforms.find("ios")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse_defaults():
|
||||
defaults_path = base.get_script_dir() + "/../defaults"
|
||||
if ("" != option("branding")):
|
||||
@ -166,4 +208,6 @@ def parse_defaults():
|
||||
for name in defaults_options:
|
||||
if name in options:
|
||||
options[name] = options[name].replace("default", defaults_options[name])
|
||||
else:
|
||||
options[name] = defaults_options[name]
|
||||
return
|
||||
|
||||
31
scripts/core_common/make_common.py
Normal file → Executable file
31
scripts/core_common/make_common.py
Normal file → Executable file
@ -6,17 +6,48 @@ sys.path.append('..')
|
||||
|
||||
import config
|
||||
import base
|
||||
import glob
|
||||
|
||||
import boost
|
||||
import cef
|
||||
import icu
|
||||
import openssl
|
||||
import curl
|
||||
import websocket
|
||||
import v8
|
||||
import html2
|
||||
import hunspell
|
||||
import glew
|
||||
import harfbuzz
|
||||
import hyphen
|
||||
import socket_io
|
||||
|
||||
def check_android_ndk_macos_arm(dir):
|
||||
if base.is_dir(dir + "/darwin-x86_64") and not base.is_dir(dir + "/darwin-arm64"):
|
||||
print("copy toolchain... [" + dir + "]")
|
||||
base.copy_dir(dir + "/darwin-x86_64", dir + "/darwin-arm64")
|
||||
return
|
||||
|
||||
|
||||
def make():
|
||||
if (config.check_option("platform", "android")) and (base.host_platform() == "mac") and (base.is_os_arm()):
|
||||
for toolchain in glob.glob(base.get_env("ANDROID_NDK_ROOT") + "/toolchains/*"):
|
||||
if base.is_dir(toolchain):
|
||||
check_android_ndk_macos_arm(toolchain + "/prebuilt")
|
||||
|
||||
boost.make()
|
||||
cef.make()
|
||||
icu.make()
|
||||
openssl.make()
|
||||
v8.make()
|
||||
html2.make()
|
||||
hunspell.make(False)
|
||||
harfbuzz.make()
|
||||
glew.make()
|
||||
hyphen.make()
|
||||
socket_io.make()
|
||||
|
||||
if config.check_option("module", "mobile"):
|
||||
curl.make()
|
||||
websocket.make()
|
||||
return
|
||||
|
||||
@ -67,41 +67,67 @@ def make():
|
||||
# build
|
||||
if ("windows" == base.host_platform()):
|
||||
win_toolset = "msvc-14.0"
|
||||
if (-1 != config.option("platform").find("win_64")) and not base.is_dir("../build/win_64"):
|
||||
base.cmd("bootstrap.bat")
|
||||
win_boot_arg = "vc14"
|
||||
win_vs_version = "vc140"
|
||||
if (config.option("vs-version") == "2019"):
|
||||
win_toolset = "msvc-14.2"
|
||||
win_boot_arg = "vc142"
|
||||
win_vs_version = "vc142"
|
||||
if (-1 != config.option("platform").find("win_64")) and not base.is_file("../build/win_64/lib/libboost_system-" + win_vs_version + "-mt-x64-1_72.lib"):
|
||||
base.cmd("bootstrap.bat", [win_boot_arg])
|
||||
base.cmd("b2.exe", ["headers"])
|
||||
base.cmd("b2.exe", ["--clean"])
|
||||
base.cmd("b2.exe", ["--prefix=./../build/win_64", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=64", "install"])
|
||||
if (-1 != config.option("platform").find("win_32")) and not base.is_dir("../build/win_32"):
|
||||
base.cmd("bootstrap.bat")
|
||||
if (-1 != config.option("platform").find("win_32")) and not base.is_file("../build/win_32/lib/libboost_system-" + win_vs_version + "-mt-x32-1_72.lib"):
|
||||
base.cmd("bootstrap.bat", [win_boot_arg])
|
||||
base.cmd("b2.exe", ["headers"])
|
||||
base.cmd("b2.exe", ["--clean"])
|
||||
base.cmd("b2.exe", ["--prefix=./../build/win_32", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=32", "install"])
|
||||
correct_install_includes_win(base_dir, "win_64")
|
||||
correct_install_includes_win(base_dir, "win_32")
|
||||
|
||||
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
|
||||
if config.check_option("platform", "linux_64") and not base.is_dir("../build/linux_64"):
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"])
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
base.cmd("./b2", ["--prefix=./../build/linux_64", "link=static", "cxxflags=-fPIC", "install"])
|
||||
# TODO: support x86
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
|
||||
clang_correct()
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"])
|
||||
base.cmd("./b2", ["headers"])
|
||||
base.cmd("./b2", ["--clean"])
|
||||
base.cmd("./b2", ["--prefix=./../build/mac_64", "link=static", "install"])
|
||||
if config.check_option("platform", "linux_arm64") and not base.is_dir("../build/linux_arm64"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "linux_arm64")
|
||||
directory_build = base_dir + "/build/linux_arm64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
base.delete_file(directory_build + "/libboost_system.so")
|
||||
base.copy_files(directory_build + "/linux_arm64/*.a", directory_build)
|
||||
|
||||
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios"):
|
||||
old_cur2 = os.getcwd()
|
||||
clang_correct()
|
||||
os.chdir("../")
|
||||
base.bash("./boost_ios")
|
||||
os.chdir(old_cur2)
|
||||
|
||||
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios_xcframework"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios_simulator", "xcframework_platform_ios_simulator")
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios")
|
||||
|
||||
if (-1 != config.option("platform").find("android")) and not base.is_dir("../build/android"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"])
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_64")
|
||||
directory_build = base_dir + "/build/mac_64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
base.delete_file(directory_build + "/libboost_system.dylib")
|
||||
base.copy_files(directory_build + "/mac_64/*.a", directory_build)
|
||||
|
||||
if (-1 != config.option("platform").find("mac_arm64")) and not base.is_dir("../build/mac_arm64"):
|
||||
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_arm64")
|
||||
directory_build = base_dir + "/build/mac_arm64/lib"
|
||||
base.delete_file(directory_build + "/libboost_system.a")
|
||||
base.delete_file(directory_build + "/libboost_system.dylib")
|
||||
base.copy_files(directory_build + "/mac_arm64/*.a", directory_build)
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
|
||||
@ -7,12 +7,12 @@ import base
|
||||
import os
|
||||
import build
|
||||
|
||||
def make(src_dir, modules):
|
||||
def make(src_dir, modules, build_platform="android", qmake_addon=""):
|
||||
old_cur = os.getcwd()
|
||||
|
||||
print("boost-headers...")
|
||||
base.cmd("./bootstrap.sh", ["--with-libraries=system"])
|
||||
base.cmd("./b2", ["--prefix=./../build/android", "headers", "install"])
|
||||
base.cmd("./b2", ["--prefix=./../build/" + build_platform, "headers", "install"])
|
||||
|
||||
for module in modules:
|
||||
print("boost-module: " + module + " ...")
|
||||
@ -28,7 +28,7 @@ def make(src_dir, modules):
|
||||
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
|
||||
pro_file_content.append("include($$PWD/../../../../../base.pri)")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("MAKEFILE=$$PWD/makefiles/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
|
||||
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
|
||||
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
|
||||
@ -40,9 +40,10 @@ def make(src_dir, modules):
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("SOURCES += $$files($$PWD/src/*.cpp, true)")
|
||||
pro_file_content.append("")
|
||||
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/android/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
|
||||
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
|
||||
build.make_pro_file(module_dir + "/makefiles", module_dir + "/" + module + ".pro")
|
||||
os.chdir(module_dir)
|
||||
build.make_pro_file("./", module + ".pro", qmake_addon)
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
@ -13,9 +13,9 @@ def make():
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64"]
|
||||
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64"]
|
||||
|
||||
url = "http://d2ettrnqo7v976.cloudfront.net/cef/3770/"
|
||||
url = "http://d2ettrnqo7v976.cloudfront.net/cef/4280/"
|
||||
|
||||
for platform in platforms:
|
||||
if not config.check_option("platform", platform):
|
||||
@ -54,14 +54,14 @@ def make():
|
||||
base.create_dir("./build")
|
||||
|
||||
# deploy
|
||||
if ("mac_64" != platform):
|
||||
if (0 != platform.find("mac")):
|
||||
base.copy_files("cef_binary/Release/*", "build/")
|
||||
base.copy_files("cef_binary/Resources/*", "build/")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.cmd("chmod", ["a+xr", "build/locales"])
|
||||
|
||||
if ("mac_64" == platform):
|
||||
if (0 == platform.find("mac")):
|
||||
base.cmd("mv", ["Chromium Embedded Framework.framework", "build/Chromium Embedded Framework.framework"])
|
||||
|
||||
os.chdir(base_dir)
|
||||
|
||||
27
scripts/core_common/modules/curl.py
Normal file
27
scripts/core_common/modules/curl.py
Normal file
@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import subprocess
|
||||
import os
|
||||
import base
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
if base.is_dir(path + "/build/android"):
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
subprocess.call(["./build-android-curl.sh"])
|
||||
|
||||
elif (-1 != config.option("platform").find("ios")):
|
||||
if base.is_dir(path + "/build/ios"):
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
subprocess.call(["./build-ios-curl.sh"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
34
scripts/core_common/modules/glew.py
Normal file
34
scripts/core_common/modules/glew.py
Normal file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
def clean():
|
||||
if base.is_dir("glew-2.1.0"):
|
||||
base.delete_dir("glew-2.1.0");
|
||||
return
|
||||
|
||||
def make():
|
||||
if ("windows" != base.host_platform()):
|
||||
return
|
||||
|
||||
if not config.check_option("module", "mobile"):
|
||||
return;
|
||||
|
||||
print("[fetch & build]: glew")
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/glew"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
base.common_check_version("glew", "1", clean)
|
||||
|
||||
if not base.is_dir("glew-2.1.0"):
|
||||
base.download("https://deac-ams.dl.sourceforge.net/project/glew/glew/2.1.0/glew-2.1.0-win32.zip", "./archive.zip")
|
||||
base.extract("./archive.zip", "./")
|
||||
base.delete_file("./archive.zip")
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
15
scripts/core_common/modules/harfbuzz.py
Executable file
15
scripts/core_common/modules/harfbuzz.py
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: harfbuzz")
|
||||
base.cmd_in_dir(base.get_script_dir() + "/../../core/Common/3dParty/harfbuzz", "./make.py")
|
||||
return
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make()
|
||||
13
scripts/core_common/modules/html2.py
Normal file
13
scripts/core_common/modules/html2.py
Normal file
@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/html"
|
||||
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
|
||||
return
|
||||
24
scripts/core_common/modules/hunspell.py
Normal file
24
scripts/core_common/modules/hunspell.py
Normal file
@ -0,0 +1,24 @@
|
||||
import sys
|
||||
sys.path.append('../../../scripts')
|
||||
import base
|
||||
import os
|
||||
|
||||
def make(build_js = True):
|
||||
|
||||
old_cur_dir = os.getcwd()
|
||||
#fetch libhunspell
|
||||
print("[fetch & build]: hunspell")
|
||||
core_common_dir = base.get_script_dir() + "/../../core/Common"
|
||||
|
||||
os.chdir(core_common_dir + "/3dParty/hunspell")
|
||||
base.cmd("python", ["./before.py"])
|
||||
|
||||
if (build_js):
|
||||
os.chdir(core_common_dir + "/js")
|
||||
base.cmd("python", ["./make.py", core_common_dir + "/3dParty/hunspell/hunspell.json"])
|
||||
|
||||
os.chdir(old_cur_dir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# manual compile
|
||||
make(True)
|
||||
21
scripts/core_common/modules/hyphen.py
Normal file
21
scripts/core_common/modules/hyphen.py
Normal file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
def make():
|
||||
print("[fetch]: hyphen")
|
||||
new_dir = base.get_script_dir() + "/../../core/Common/3dParty/hyphen"
|
||||
old_dir = os.getcwd()
|
||||
os.chdir(new_dir)
|
||||
|
||||
if not base.is_dir("hyphen"):
|
||||
base.cmd("git", ["clone", "https://github.com/hunspell/hyphen"])
|
||||
|
||||
|
||||
os.chdir(old_dir)
|
||||
return
|
||||
|
||||
@ -5,12 +5,13 @@ sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import icu_android
|
||||
|
||||
def make():
|
||||
print("[fetch & build]: icu")
|
||||
|
||||
if (config.option("module") == "mobile") and (-1 == config.option("platform").find("ios")):
|
||||
return
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
icu_android.make()
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
|
||||
old_cur = os.getcwd()
|
||||
@ -23,6 +24,9 @@ def make():
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
platformToolset = "v140"
|
||||
if (config.option("vs-version") == "2019"):
|
||||
platformToolset = "v142"
|
||||
need_platforms = []
|
||||
if (-1 != config.option("platform").find("win_64")):
|
||||
need_platforms.append("win_64")
|
||||
@ -36,12 +40,9 @@ def make():
|
||||
compile_bat = []
|
||||
compile_bat.append("setlocal")
|
||||
compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
|
||||
compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=v140 /p:Platform=" + ("Win32" if base.platform_is_32(platform) else "X64"))
|
||||
compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + ("Win32" if base.platform_is_32(platform) else "X64"))
|
||||
compile_bat.append("endlocal")
|
||||
base.run_as_bat(compile_bat)
|
||||
#base.vcvarsall_start("x64" if ("win_64" == platform) else "x86")
|
||||
#base.cmd("MSBuild.exe", ["icu/source/allinone/allinone.sln", "/p:Configuration=Release", "/p:PlatformToolset=v140", "/p:Platform=" + ("X64" if ("win_64" == platform) else "Win32")])
|
||||
#base.vcvarsall_end()
|
||||
bin_dir = "icu/bin64/" if ("win_64" == platform) else "icu/bin/"
|
||||
lib_dir = "icu/lib64/" if ("win_64" == platform) else "icu/lib/"
|
||||
base.create_dir(platform + "/build")
|
||||
@ -52,39 +53,56 @@ def make():
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
platform = ""
|
||||
if ("linux" == base.host_platform()):
|
||||
platform = "linux_64"
|
||||
if not base.is_dir(platform + "/build"):
|
||||
base.replaceInFile("./icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
if not base.is_file("./icu/source/i18n/digitlst.cpp.bak"):
|
||||
base.copy_file("./icu/source/i18n/digitlst.cpp", "./icu/source/i18n/digitlst.cpp.bak")
|
||||
base.replaceInFile("./icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
if base.is_dir(base_dir + "/linux_64"):
|
||||
base.delete_dir(base_dir + "/linux_64")
|
||||
if base.is_dir(base_dir + "/linux_arm64"):
|
||||
base.delete_dir(base_dir + "/linux_arm64")
|
||||
|
||||
if not base.is_dir(base_dir + "/linux_64"):
|
||||
base.create_dir(base_dir + "/icu/cross_build")
|
||||
os.chdir("icu/cross_build")
|
||||
base.cmd("./../source/runConfigureICU", ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
base.create_dir(base_dir + "/linux_64")
|
||||
base.create_dir(base_dir + "/linux_64/build")
|
||||
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicudata.so." + icu_major)
|
||||
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicuuc.so." + icu_major)
|
||||
base.copy_dir(base_dir + "/icu/cross_build_install/include", base_dir + "/linux_64/build/include")
|
||||
|
||||
if config.check_option("platform", "linux_arm64") and not base.is_dir(base_dir + "/linux_arm64") and not base.is_os_arm():
|
||||
base.create_dir(base_dir + "/icu/linux_arm64")
|
||||
os.chdir(base_dir + "/icu/linux_arm64")
|
||||
base_arm_tool_dir = base.get_prefix_cross_compiler_arm64()
|
||||
base.cmd("./../source/configure", ["--host=arm-linux", "--prefix=" + base_dir + "/icu/linux_arm64_install", "--with-cross-build=" + base_dir + "/icu/cross_build",
|
||||
"CC=" + base_arm_tool_dir + "gcc", "CXX=" + base_arm_tool_dir + "g++", "AR=" + base_arm_tool_dir + "ar", "RANLIB=" + base_arm_tool_dir + "ranlib"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
base.create_dir(base_dir + "/linux_arm64")
|
||||
base.create_dir(base_dir + "/linux_arm64/build")
|
||||
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicudata.so." + icu_major)
|
||||
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicuuc.so." + icu_major)
|
||||
base.copy_dir(base_dir + "/icu/linux_arm64_install/include", base_dir + "/linux_arm64/build/include")
|
||||
|
||||
os.chdir("../..")
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
platform = "mac_64"
|
||||
if not base.is_dir(platform + "/build"):
|
||||
if not base.is_file("./icu/source/tools/pkgdata/pkgdata.cpp.bak"):
|
||||
base.copy_file("./icu/source/tools/pkgdata/pkgdata.cpp", "./icu/source/tools/pkgdata/pkgdata.cpp.bak")
|
||||
base.replaceInFile("./icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
|
||||
|
||||
if (-1 != config.option("platform").find("ios")):
|
||||
if not base.is_dir("build"):
|
||||
base.bash("./icu_ios")
|
||||
elif ("" != platform) and not base.is_dir(platform + "/build"):
|
||||
base.create_dir(platform)
|
||||
os.chdir("icu/source")
|
||||
base.cmd("./runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX"])
|
||||
old_dest_dir = base.get_env("DESTDIR")
|
||||
base.set_env("DESTDIR", base_dir + "/" + platform)
|
||||
base.cmd("make", ["install"])
|
||||
if ("" == old_dest_dir):
|
||||
os.environ.pop("DESTDIR")
|
||||
else:
|
||||
base.set_env("DEST_DIR", old_dest_dir)
|
||||
os.chdir("../..")
|
||||
base.create_dir(platform + "/build")
|
||||
if ("linux_64" == platform):
|
||||
base.copy_file("icu/source/lib/libicudata.so." + icu_major + "." + icu_minor, platform + "/build/libicudata.so." + icu_major)
|
||||
base.copy_file("icu/source/lib/libicuuc.so." + icu_major + "." + icu_minor, platform + "/build/libicuuc.so." + icu_major)
|
||||
elif ("mac_64" == platform):
|
||||
base.copy_file("icu/source/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", platform + "/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file("icu/source/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", platform + "/build/libicuuc." + icu_major + ".dylib")
|
||||
# mac
|
||||
if (-1 != config.option("platform").find("mac_")) and not base.is_dir("mac_64/build"):
|
||||
base.cmd_in_dir(base_dir + "/../../../../build_tools/scripts/core_common/modules", "python", ["icu_mac.py"])
|
||||
|
||||
# ios
|
||||
if (-1 != config.option("platform").find("ios")):
|
||||
if not base.is_dir("build"):
|
||||
base.bash("./icu_ios")
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
172
scripts/core_common/modules/icu_android.py
Executable file
172
scripts/core_common/modules/icu_android.py
Executable file
@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
|
||||
|
||||
toolshains_dir = current_dir + "/toolchains"
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
icu_is_shared = False
|
||||
|
||||
current_path = base.get_env("PATH")
|
||||
|
||||
platforms = {
|
||||
"arm64" : {
|
||||
"arch" : "aarch64-linux-android",
|
||||
"bin" : "aarch64-linux-android"
|
||||
},
|
||||
"arm" : {
|
||||
"arch" : "arm-linux-androideabi",
|
||||
"bin" : "arm-linux-androideabi"
|
||||
},
|
||||
"x86_64" : {
|
||||
"arch" : "x86_64-linux-android",
|
||||
"bin" : "x86_64-linux-android"
|
||||
},
|
||||
"x86" : {
|
||||
"arch" : "x86-linux-android",
|
||||
"bin" : "i686-linux-android"
|
||||
}
|
||||
}
|
||||
|
||||
def build_arch(arch, api_version):
|
||||
print("icu build: " + arch + " ----------------------------------------")
|
||||
|
||||
if base.is_dir(current_dir + "/icu/" + arch):
|
||||
base.delete_dir(current_dir + "/icu/" + arch)
|
||||
base.create_dir(current_dir + "/icu/" + arch)
|
||||
os.chdir(current_dir + "/icu/" + arch)
|
||||
|
||||
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
|
||||
"--platform=android-" + api_version,
|
||||
"--install-dir=" + current_dir + "/toolchain/" + arch,
|
||||
"--toolchain=" + platforms[arch]["arch"],
|
||||
"--force"
|
||||
])
|
||||
|
||||
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
|
||||
|
||||
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
|
||||
if not icu_is_shared:
|
||||
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
|
||||
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
|
||||
|
||||
base.cmd("../source/configure", command_args.split())
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.set_env("PATH", current_path)
|
||||
os.chdir(current_dir)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
if base.is_dir(current_dir + "/build"):
|
||||
return
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
|
||||
print("[fetch & build]: icu_android")
|
||||
os.chdir(current_dir)
|
||||
|
||||
if not base.is_dir("icu"):
|
||||
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
|
||||
if ("linux" == base.host_platform()):
|
||||
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
|
||||
#if ("mac" == base.host_platform()):
|
||||
# base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
|
||||
|
||||
if not base.is_dir(current_dir + "/icu/cross_build"):
|
||||
base.create_dir(current_dir + "/icu/cross_build")
|
||||
os.chdir(current_dir + "/icu/cross_build")
|
||||
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
|
||||
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
build_arch("arm64", "21")
|
||||
build_arch("arm", "16")
|
||||
build_arch("x86_64","21")
|
||||
build_arch("x86", "16")
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_dir(current_dir + "/build")
|
||||
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
|
||||
|
||||
if icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
|
||||
|
||||
# patch elf information
|
||||
os.chdir(current_dir + "/build")
|
||||
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
|
||||
os.chdir("./patchelf")
|
||||
base.cmd("./bootstrap.sh")
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
|
||||
base.cmd("make")
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
|
||||
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
|
||||
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
|
||||
|
||||
base.delete_dir(current_dir + "/build/patchelf")
|
||||
|
||||
if not icu_is_shared:
|
||||
base.create_dir(current_dir + "/build/arm64_v8a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/armv7")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86_64")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
|
||||
|
||||
base.create_dir(current_dir + "/build/x86")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
|
||||
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
|
||||
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
98
scripts/core_common/modules/icu_mac.py
Executable file
98
scripts/core_common/modules/icu_mac.py
Executable file
@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
|
||||
def change_icu_defs(current_dir, arch):
|
||||
icudef_file = current_dir + "/icudefs.mk"
|
||||
icudef_file_old = current_dir + "/icudefs.mk.back"
|
||||
|
||||
param = "-arch x86_64"
|
||||
if arch == "arm64":
|
||||
param = "-arch arm64 -isysroot " + base.find_mac_sdk()
|
||||
|
||||
param += " -mmacosx-version-min=10.12"
|
||||
|
||||
base.copy_file(icudef_file, icudef_file_old)
|
||||
|
||||
base.replaceInFile(icudef_file, "CFLAGS = ", "CFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "CXXFLAGS = ", "CXXFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "RPATHLDFLAGS =", "RPATHLDFLAGS2 =")
|
||||
base.replaceInFile(icudef_file, "LDFLAGS = ", "LDFLAGS = " + param + " ")
|
||||
base.replaceInFile(icudef_file, "RPATHLDFLAGS2 =", "RPATHLDFLAGS =")
|
||||
|
||||
return
|
||||
|
||||
def restore_icu_defs(current_dir):
|
||||
icudef_file = current_dir + "/icudefs.mk"
|
||||
icudef_file_old = current_dir + "/icudefs.mk.back"
|
||||
|
||||
base.delete_file(icudef_file)
|
||||
base.copy_file(icudef_file_old, icudef_file)
|
||||
base.delete_file(icudef_file_old)
|
||||
return
|
||||
|
||||
icu_major = "58"
|
||||
icu_minor = "2"
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
if not base.is_dir(current_dir + "/mac_cross_64"):
|
||||
base.create_dir(current_dir + "/mac_cross_64")
|
||||
os.chdir(current_dir + "/mac_cross_64")
|
||||
|
||||
base.cmd("../icu/source/runConfigureICU", ["MacOSX",
|
||||
"--prefix=" + current_dir + "/mac_cross_64", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
|
||||
|
||||
change_icu_defs(current_dir + "/mac_cross_64", "x86_64")
|
||||
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
restore_icu_defs(current_dir + "/mac_cross_64")
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
os.chdir(current_dir + "/icu/source")
|
||||
|
||||
base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm_64",
|
||||
"--with-cross-build=" + current_dir + "/mac_cross_64", "VERBOSE=1"])
|
||||
|
||||
change_icu_defs(current_dir + "/icu/source", "arm64")
|
||||
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["install"])
|
||||
|
||||
restore_icu_defs(current_dir + "/icu/source")
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
if base.is_dir(current_dir + "/mac_64"):
|
||||
base.delete_dir(current_dir + "/mac_64")
|
||||
|
||||
if base.is_dir(current_dir + "/mac_arm64"):
|
||||
base.delete_dir(current_dir + "/mac_arm64")
|
||||
|
||||
base.create_dir(current_dir + "/mac_64")
|
||||
base.create_dir(current_dir + "/mac_64/build")
|
||||
|
||||
base.create_dir(current_dir + "/mac_arm64")
|
||||
base.create_dir(current_dir + "/mac_arm64/build")
|
||||
|
||||
base.copy_dir(current_dir + "/mac_cross_64/include", current_dir + "/mac_64/build/include")
|
||||
base.copy_file(current_dir + "/mac_cross_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_cross_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicuuc." + icu_major + ".dylib")
|
||||
|
||||
base.copy_dir(current_dir + "/mac_arm_64/include", current_dir + "/mac_arm64/build/include")
|
||||
base.copy_file(current_dir + "/mac_arm_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicudata." + icu_major + ".dylib")
|
||||
base.copy_file(current_dir + "/mac_arm_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicuuc." + icu_major + ".dylib")
|
||||
|
||||
base.delete_dir(current_dir + "/mac_cross_64")
|
||||
base.delete_dir(current_dir + "/mac_arm_64")
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
204
scripts/core_common/modules/ixwebsocket.py
Normal file
204
scripts/core_common/modules/ixwebsocket.py
Normal file
@ -0,0 +1,204 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import base
|
||||
import os
|
||||
import config
|
||||
from distutils.version import LooseVersion, StrictVersion
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/ixwebsocket"
|
||||
|
||||
CMAKE = "cmake"
|
||||
|
||||
def find_last_version(arr_input, base_directory):
|
||||
arr = []
|
||||
for arr_rec in arr_input:
|
||||
if base.is_file(base_directory + "/" + arr_rec + "/bin/cmake"):
|
||||
arr.append(arr_rec)
|
||||
res = arr[0]
|
||||
for version in arr:
|
||||
if(LooseVersion(version) > LooseVersion(res)):
|
||||
res = version
|
||||
return res
|
||||
|
||||
def build_arch(platform, arch, params, is_debug=False):
|
||||
print("ixwebsocket build: " + platform + "....." + arch + " ----------------------------------------")
|
||||
|
||||
if base.is_dir(current_dir + "/IXWebSocket/build/"+ platform + "/" + arch):
|
||||
base.delete_dir(current_dir + "/IXWebSocket/build/" + platform + "/" + arch)
|
||||
base.create_dir(current_dir + "/IXWebSocket/build/" + platform + "/" + arch)
|
||||
cache_dir = current_dir + "/IXWebSocket/build/" + platform + "/cache"
|
||||
base.create_dir(cache_dir)
|
||||
os.chdir(cache_dir)
|
||||
|
||||
libext = "a"
|
||||
prefix = "/"
|
||||
zlib = "1"
|
||||
if (0 == platform.find("windows")):
|
||||
zlib = "0"
|
||||
libext = "lib"
|
||||
prefix = cache_dir + "/../" + arch
|
||||
|
||||
path = platform
|
||||
if(platform == "ios" or platform == "android"):
|
||||
path += "/"
|
||||
else:
|
||||
path = ""
|
||||
|
||||
base.cmd(CMAKE, ["../../..",
|
||||
"-DUSE_WS=0", "-DUSE_ZLIB=" + zlib, "-DUSE_TLS=1", "-DUSE_OPEN_SSL=1",
|
||||
"-DOPENSSL_ROOT_DIR=" + cache_dir + "/../../../../../openssl/build/" + path + arch,
|
||||
"-DOPENSSL_INCLUDE_DIR=" + cache_dir + "/../../../../../openssl/build/" + path + arch + "/include",
|
||||
"-DOPENSSL_CRYPTO_LIBRARY=" + cache_dir + "/../../../../../openssl/build/" + path + arch + "/lib/libcrypto." + libext,
|
||||
"-DOPENSSL_SSL_LIBRARY=" + cache_dir + "/../../../../../openssl/build/" + path + arch + "/lib/libssl." + libext,
|
||||
"-DCMAKE_INSTALL_PREFIX:PATH=" + prefix] + params)
|
||||
|
||||
if(-1 != platform.find("ios") or -1 != platform.find("mac")):
|
||||
base.cmd(CMAKE, ["--build", ".", "--config", "Release"])
|
||||
base.cmd(CMAKE, ["--install", ".", "--config", "Release", "--prefix", cache_dir + "/../" + arch])
|
||||
elif(-1 != platform.find("android") or -1 != platform.find("linux")):
|
||||
base.cmd("make", ["-j4"])
|
||||
base.cmd("make", ["DESTDIR=" + cache_dir + "/../" + arch, "install"])
|
||||
elif(-1 != platform.find("windows")):
|
||||
conf = "Debug" if is_debug else "Release"
|
||||
base.cmd(CMAKE, ["--build", ".", "--target", "install", "--config", conf])
|
||||
|
||||
base.delete_dir(cache_dir)
|
||||
os.chdir(current_dir)
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
if not base.is_dir(current_dir):
|
||||
base.create_dir(current_dir)
|
||||
|
||||
print("[fetch & build]: ixwebsocket")
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
if not base.is_dir(current_dir + "/IXWebSocket"):
|
||||
base.cmd("git", ["clone", "https://github.com/machinezone/IXWebSocket"])
|
||||
|
||||
|
||||
# build for platform
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
if base.is_dir(current_dir + "/IXWebSocket/build/android"):
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
|
||||
os.chdir(current_dir + "/IXWebSocket")
|
||||
|
||||
global CMAKE
|
||||
|
||||
CMAKE_TOOLCHAIN_FILE = base.get_env("ANDROID_NDK_ROOT") + "/build/cmake/android.toolchain.cmake"
|
||||
CMAKE_DIR = base.get_android_sdk_home() + "/cmake/"
|
||||
CMAKE = CMAKE_DIR + find_last_version(os.listdir(CMAKE_DIR), CMAKE_DIR) + "/bin/cmake"
|
||||
|
||||
def param_android(arch, api):
|
||||
return ["-G","Unix Makefiles", "-DANDROID_NATIVE_API_LEVEL=" + api, "-DANDROID_ABI=" + arch, "-DANDROID_TOOLCHAIN=clang", "-DANDROID_NDK=" + base.get_env("ANDROID_NDK_ROOT"), "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE, "-DCMAKE_MAKE_PROGRAM=make"]
|
||||
|
||||
build_arch("android", "arm64-v8a", param_android("arm64-v8a", "21"))
|
||||
build_arch("android", "armeabi-v7a", param_android("armeabi-v7a", "16"))
|
||||
build_arch("android", "x86_64", param_android("x86_64", "21"))
|
||||
build_arch("android", "x86", param_android("x86", "16"))
|
||||
|
||||
|
||||
elif (-1 != config.option("platform").find("ios") or -1 != config.option("platform").find("mac")):
|
||||
platform = "ios" if -1 != config.option("platform").find("ios") else "mac"
|
||||
if base.is_dir(current_dir + "/IXWebSocket/build/" + platform):
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
|
||||
if not base.is_dir(current_dir + "/ios-cmake"):
|
||||
base.cmd("git", ["clone", "https://github.com/leetal/ios-cmake"])
|
||||
|
||||
CMAKE_TOOLCHAIN_FILE = current_dir + "/ios-cmake/ios.toolchain.cmake"
|
||||
|
||||
os_cmd = 'cmake'
|
||||
if os.system(os_cmd) != 0:
|
||||
base.cmd("brew install", ["cmake"])
|
||||
|
||||
os.chdir(current_dir + "/IXWebSocket")
|
||||
|
||||
def param_apple(platform, arch):
|
||||
return ["-G","Xcode", "-DDEPLOYMENT_TARGET=10", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
|
||||
|
||||
def param_apple_ios(platform, arch, params=[]):
|
||||
return params + ["-G","Xcode", "-DDEPLOYMENT_TARGET=11", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
|
||||
|
||||
if(platform == "ios"):
|
||||
#build_arch("ios", "armv7", param_apple("OS", "armv7"))
|
||||
build_arch("ios", "arm64", param_apple_ios("OS64", "arm64"))
|
||||
#build_arch("ios", "i386", param_apple_ios("SIMULATOR", "i386"))
|
||||
build_arch("ios", "x86_64", param_apple_ios("SIMULATOR64", "x86_64", ["-DCMAKE_CXX_FLAGS=-std=c++11"]))
|
||||
else:
|
||||
build_arch("mac", "mac_arm64", param_apple("MAC_ARM64", "arm64"))
|
||||
build_arch("mac", "mac_64", param_apple("MAC", "x86_64"))
|
||||
|
||||
os.chdir(current_dir)
|
||||
|
||||
if(platform == "ios"):
|
||||
base.create_dir(current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal/include")
|
||||
base.create_dir(current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal/lib")
|
||||
|
||||
#copy include
|
||||
prefix_dir = current_dir + "/IXWebSocket/build/ios/"
|
||||
postfix_dir = ""
|
||||
if base.is_dir(prefix_dir + "arm64/usr"):
|
||||
postfix_dir = "/usr"
|
||||
|
||||
if base.is_dir(prefix_dir + "armv7" + postfix_dir + "/include"):
|
||||
base.cmd("cp", [ "-r", prefix_dir + "armv7" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
|
||||
elif base.is_dir(prefix_dir + "armv64" + postfix_dir + "/include"):
|
||||
base.cmd("cp", [ "-r", prefix_dir + "armv64" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
|
||||
elif base.is_dir(prefix_dir + "i386" + postfix_dir + "/include"):
|
||||
base.cmd("cp", [ "-r", prefix_dir + "i386" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
|
||||
elif base.is_dir(prefix_dir + "x86_64" + postfix_dir + "/include"):
|
||||
base.cmd("cp", [ "-r", prefix_dir + "x86_64" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
|
||||
|
||||
# Create fat lib
|
||||
if (True):
|
||||
base.cmd("lipo", ["IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"-create", "-output",
|
||||
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
|
||||
else:
|
||||
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
|
||||
"-create", "-output",
|
||||
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
|
||||
|
||||
|
||||
elif (-1 != config.option("platform").find("linux")):
|
||||
if base.is_dir(current_dir + "/IXWebSocket/build/linux"):
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
|
||||
#will support when openssl x86 will support
|
||||
#if (-1 != config.option("platform").find("linux_32")):
|
||||
#build_arch("linux", "linux_32", ["-G","Unix Makefiles", "-DCMAKE_MAKE_PROGRAM=make", "-DCMAKE_C_FLAGS=-m32", "-DCMAKE_CXX_FLAGS=-m32"])
|
||||
if (-1 != config.option("platform").find("linux_64")):
|
||||
build_arch("linux", "linux_64", ["-G","Unix Makefiles", "-DCMAKE_MAKE_PROGRAM=make"])
|
||||
|
||||
|
||||
elif ("windows" == base.host_platform()):
|
||||
if base.is_dir(current_dir + "/IXWebSocket/build/windows"):
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
|
||||
vsVersion = "14 2015"
|
||||
if (config.option("vs-version") == "2019"):
|
||||
vsVersion = "16 2019"
|
||||
|
||||
if (-1 != config.option("platform").find("win_32")):
|
||||
build_arch("windows", "win_32", ["-G","Visual Studio " + vsVersion, "-A", "Win32"])
|
||||
build_arch("windows_debug", "win_32", ["-G","Visual Studio" + vsVersion, "-A", "Win32"], True)
|
||||
if (-1 != config.option("platform").find("win_64")):
|
||||
build_arch("windows", "win_64", ["-G","Visual Studio " + vsVersion + " Win64"])
|
||||
build_arch("windows_debug", "win_64", ["-G","Visual Studio " + vsVersion + " Win64"], True)
|
||||
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
@ -5,23 +5,29 @@ sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import platform
|
||||
import openssl_mobile
|
||||
|
||||
def clean():
|
||||
if base.is_dir("openssl"):
|
||||
base.delete_dir("openssl")
|
||||
base.delete_dir_with_access_error("openssl")
|
||||
if base.is_dir("build"):
|
||||
base.delete_dir("build")
|
||||
return
|
||||
|
||||
def make():
|
||||
if ("android" == base.host_platform() or "ios" == config.option("platform")):
|
||||
return
|
||||
|
||||
print("[fetch & build]: openssl")
|
||||
|
||||
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
|
||||
openssl_mobile.make()
|
||||
return
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
base.common_check_version("openssl", "1", clean)
|
||||
base.common_check_version("openssl", "3", clean)
|
||||
|
||||
if not base.is_dir("openssl"):
|
||||
base.cmd("git", ["clone", "--depth=1", "--branch", "OpenSSL_1_1_1f", "https://github.com/openssl/openssl.git"])
|
||||
@ -80,8 +86,40 @@ def make():
|
||||
base.cmd("make", ["install"])
|
||||
# TODO: support x86
|
||||
|
||||
if (-1 != config.option("platform").find("linux_arm64")) and not base.is_dir("../build/linux_arm64"):
|
||||
if ("x86_64" != platform.machine()):
|
||||
base.copy_dir("../build/linux_64", "../build/linux_arm64")
|
||||
else:
|
||||
cross_compiler_arm64 = config.option("arm64-toolchain-bin")
|
||||
if ("" == cross_compiler_arm64):
|
||||
cross_compiler_arm64 = "/usr/bin"
|
||||
cross_compiler_arm64_prefix = cross_compiler_arm64 + "/" + base.get_prefix_cross_compiler_arm64()
|
||||
base.cmd("./Configure", ["linux-aarch64", "--cross-compile-prefix=" + cross_compiler_arm64_prefix, "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
|
||||
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
|
||||
base.cmd("make", [], True)
|
||||
base.cmd("make", ["install"], True)
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
|
||||
base.cmd("./config", ["no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64"])
|
||||
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
|
||||
base.cmd("make", ["build_libs", "install"])
|
||||
|
||||
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_arm64"):
|
||||
os.chdir(base_dir)
|
||||
base.cmd("git", ["clone", "--depth=1", "--branch", "OpenSSL_1_1_1f", "https://github.com/openssl/openssl.git", "openssl2"])
|
||||
os.chdir(base_dir + "/openssl2")
|
||||
replace1 = "\"darwin64-x86_64-cc\" => {"
|
||||
replace2 = "\"darwin64-arm64-cc\" => {\n\
|
||||
inherit_from => [ \"darwin-common\", asm(\"aarch64_asm\") ],\n\
|
||||
CFLAGS => add(\"-Wall\"),\n\
|
||||
cflags => add(\"-arch arm64 -isysroot " + base.find_mac_sdk() + "\"),\n\
|
||||
lib_cppflags => add(\"-DL_ENDIAN\"),\n\
|
||||
bn_ops => \"SIXTY_FOUR_BIT_LONG\",\n\
|
||||
perlasm_scheme => \"macosx\",\n\
|
||||
},\n\
|
||||
\"darwin64-x86_64-cc\" => {"
|
||||
base.replaceInFile(base_dir + "/openssl2/Configurations/10-main.conf", replace1, replace2)
|
||||
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
|
||||
base.cmd("make", ["build_libs", "install"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
|
||||
21
scripts/core_common/modules/openssl_mobile.py
Executable file
21
scripts/core_common/modules/openssl_mobile.py
Executable file
@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import base
|
||||
import config
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def make():
|
||||
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(path)
|
||||
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
|
||||
|
||||
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
|
||||
subprocess.call(["./build-android-openssl.sh"])
|
||||
|
||||
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
|
||||
subprocess.call(["./build-ios-openssl.sh"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
16
scripts/core_common/modules/socket_io.py
Normal file
16
scripts/core_common/modules/socket_io.py
Normal file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
|
||||
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
|
||||
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
|
||||
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
|
||||
return
|
||||
66
scripts/core_common/modules/socketrocket.py
Normal file
66
scripts/core_common/modules/socketrocket.py
Normal file
@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import config
|
||||
|
||||
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketrocket"
|
||||
|
||||
def buildIOS():
|
||||
# Build for iphone
|
||||
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-devices.xcarchive", "-sdk", "iphoneos", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
|
||||
base.cmd("xcodebuild", ["-sdk", "iphoneos", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
|
||||
|
||||
# Build for simulator
|
||||
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-simulators.xcarchive", "-sdk", "iphonesimulator", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
|
||||
base.cmd("xcodebuild", ["-sdk", "iphonesimulator", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
|
||||
|
||||
# Package xcframework
|
||||
base.cmd("xcodebuild", ["-create-xcframework", "-library", current_dir + "/build/SocketRocket-devices.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-library", current_dir + "/build/SocketRocket-simulators.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-output", current_dir + "/build/SocketRocket.xcframework"])
|
||||
|
||||
# Remove arm64 for simulator for SDK 14
|
||||
base.cmd("lipo", ["-remove", "arm64", "-output", "build/Release-iphonesimulator/libSocketRocket.a", "build/Release-iphonesimulator/libSocketRocket.a"])
|
||||
|
||||
base.create_dir(current_dir + "/build/ios/lib")
|
||||
|
||||
# Create fat lib
|
||||
base.cmd("lipo", ["./build/Release-iphonesimulator/libSocketRocket.a", "./build/Release-iphoneos/libSocketRocket.a", "-create", "-output",
|
||||
"./build/ios/lib/libSoсketRocket.a"])
|
||||
|
||||
return
|
||||
|
||||
def buildMacOS():
|
||||
|
||||
# Build for iphone
|
||||
base.cmd("xcodebuild", ["-sdk", "macosx", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
|
||||
|
||||
base.create_dir(current_dir + "/build/mac_64/lib")
|
||||
base.create_dir(current_dir + "/build/mac_arm64/lib")
|
||||
|
||||
base.cmd("lipo", ["build/Release/libSocketRocket.a", "-thin", "x86_64", "-output", "build/mac_64/lib/libSoсketRocket.a"])
|
||||
base.cmd("lipo", ["build/Release/libSocketRocket.a", "-thin", "arm64", "-output", "build/mac_arm64/lib/libSoсketRocket.a"])
|
||||
|
||||
base.delete_file("build/Release/libSocketRocket.a")
|
||||
|
||||
return
|
||||
|
||||
def make():
|
||||
if (-1 == config.option("platform").find("mac") and -1 == config.option("platform").find("ios")):
|
||||
return
|
||||
|
||||
current_dir_old = os.getcwd()
|
||||
|
||||
print("[build]: socketrocket")
|
||||
os.chdir(current_dir)
|
||||
|
||||
if (-1 != config.option("platform").find("mac")):
|
||||
if not base.is_dir(current_dir + "/build/mac_64") or not base.is_dir(current_dir + "/build/mac_arm_64"):
|
||||
buildMacOS()
|
||||
elif (-1 != config.option("platform").find("ios")):
|
||||
if not base.is_dir(current_dir + "/build/ios"):
|
||||
buildIOS()
|
||||
os.chdir(current_dir_old)
|
||||
return
|
||||
@ -6,6 +6,7 @@ import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
import v8_89
|
||||
|
||||
def clean():
|
||||
if base.is_dir("depot_tools"):
|
||||
@ -25,10 +26,14 @@ def clean():
|
||||
def is_main_platform():
|
||||
if (config.check_option("platform", "win_64") or config.check_option("platform", "win_32")):
|
||||
return True
|
||||
if (config.check_option("platform", "linux_64") or config.check_option("platform", "linux_32")):
|
||||
if (config.check_option("platform", "linux_64") or config.check_option("platform", "linux_32") or config.check_option("platform", "linux_arm64")):
|
||||
return True
|
||||
if config.check_option("platform", "mac_64"):
|
||||
return True
|
||||
if config.check_option("platform", "ios"):
|
||||
return True
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_xp_platform():
|
||||
@ -37,40 +42,43 @@ def is_xp_platform():
|
||||
return False
|
||||
|
||||
def is_use_clang():
|
||||
get_gcc_version = "gcc -dumpfullversion -dumpversion"
|
||||
popen = subprocess.Popen(get_gcc_version, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
gcc_version = 4
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
gcc_version_str = stdout.strip().decode("utf-8")
|
||||
gcc_version_major = gcc_version_str.split(".")[0]
|
||||
gcc_version = int(gcc_version_major)
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
gcc_version = base.get_gcc_version()
|
||||
|
||||
is_clang = "false"
|
||||
if (gcc_version >= 6):
|
||||
if (gcc_version >= 6000):
|
||||
is_clang = "true"
|
||||
|
||||
print("gcc major version: " + str(gcc_version) + ", use clang:" + is_clang)
|
||||
print("gcc version: " + str(gcc_version) + ", use clang:" + is_clang)
|
||||
return is_clang
|
||||
|
||||
def make():
|
||||
if config.option("module") == "mobile":
|
||||
return
|
||||
|
||||
if not is_main_platform():
|
||||
make_xp()
|
||||
return
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/v8"
|
||||
if ("ios" == config.option("platform")):
|
||||
return
|
||||
|
||||
print("[fetch & build]: v8")
|
||||
if (-1 != config.option("platform").find("android")):
|
||||
base.cmd_in_dir(base_dir + "/android", "python", ["./make.py"])
|
||||
if (-1 == config.option("platform").find("linux")) and (-1 == config.option("platform").find("mac")) and (-1 == config.option("platform").find("win")):
|
||||
return
|
||||
|
||||
if ("mac" == base.host_platform()) and (-1 == config.option("config").find("use_v8")):
|
||||
return
|
||||
|
||||
use_v8_89 = False
|
||||
if (-1 != config.option("config").lower().find("v8_version_89")):
|
||||
use_v8_89 = True
|
||||
|
||||
if (use_v8_89):
|
||||
v8_89.make()
|
||||
return
|
||||
|
||||
print("[fetch & build]: v8")
|
||||
old_env = dict(os.environ)
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/v8"
|
||||
old_cur = os.getcwd()
|
||||
os.chdir(base_dir)
|
||||
|
||||
@ -144,7 +152,13 @@ def make():
|
||||
if ("windows" == base.host_platform()):
|
||||
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
|
||||
if ("mac" == base.host_platform()):
|
||||
base.replaceInFile("v8/build/config/mac/mac_sdk.gni", "if (mac_sdk_version != mac_sdk_min_build_override", "if (false && mac_sdk_version != mac_sdk_min_build_override")
|
||||
base.replaceInFile("v8/build/config/mac/mac_sdk.gni", "if (mac_sdk_version != mac_sdk_min_build_override", "if (false && mac_sdk_version != mac_sdk_min_build_override")
|
||||
base.replaceInFile("v8/build/mac/find_sdk.py", "^MacOSX(10\\.\\d+)\\.sdk$", "^MacOSX(1\\d\\.\\d+)\\.sdk$")
|
||||
|
||||
if (11003 <= base.get_mac_sdk_version_number()):
|
||||
base.copy_dir("v8/third_party/llvm-build/Release+Asserts/include", "v8/third_party/llvm-build/Release+Asserts/__include")
|
||||
base.delete_dir("v8/third_party/llvm-build/Release+Asserts/include")
|
||||
base.replaceInFile("v8/build/config/mac/BUILD.gn", "\"-mmacosx-version-min=$mac_deployment_target\",", "\"-mmacosx-version-min=$mac_deployment_target\",\n \"-Wno-deprecated-declarations\",")
|
||||
|
||||
# --------------------------------------------------------------------------
|
||||
# build
|
||||
@ -182,9 +196,10 @@ def make():
|
||||
base.cmd("ninja", ["-C", "out.gn/win_32/release"])
|
||||
|
||||
os.chdir(old_cur)
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
|
||||
make_xp()
|
||||
|
||||
return
|
||||
|
||||
def make_xp():
|
||||
@ -192,6 +207,7 @@ def make_xp():
|
||||
return
|
||||
|
||||
print("[fetch & build]: v8_xp")
|
||||
old_env = dict(os.environ)
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/v8/v8_xp"
|
||||
old_cur = os.getcwd()
|
||||
@ -213,7 +229,6 @@ def make_xp():
|
||||
if base.is_file("depot_tools/cipd.ps1"):
|
||||
base.replaceInFile("depot_tools/cipd.ps1", "windows-386", "windows-amd64")
|
||||
|
||||
old_path = os.environ["PATH"]
|
||||
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
|
||||
base_dir + "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin",
|
||||
config.option("vs-path") + "/../Common7/IDE",
|
||||
@ -276,6 +291,7 @@ def make_xp():
|
||||
base.copy_files("v8/build/Debug/lib/*", "win_32/debug/")
|
||||
base.copy_file("v8/build/Debug/icudt.dll", "win_32/debug/icudt.dll")
|
||||
|
||||
os.environ["PATH"] = old_path
|
||||
os.chdir(old_cur)
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
return
|
||||
|
||||
130
scripts/core_common/modules/v8_89.py
Normal file
130
scripts/core_common/modules/v8_89.py
Normal file
@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def make_args(args, platform, is_64=True, is_debug=False):
|
||||
args_copy = args[:]
|
||||
if is_64:
|
||||
args_copy.append("target_cpu=\\\"x64\\\"")
|
||||
args_copy.append("v8_target_cpu=\\\"x64\\\"")
|
||||
else:
|
||||
args_copy.append("target_cpu=\\\"x86\\\"")
|
||||
args_copy.append("v8_target_cpu=\\\"x86\\\"")
|
||||
|
||||
if (platform == "linux_arm64"):
|
||||
args_copy = args[:]
|
||||
args_copy.append("target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("v8_target_cpu=\\\"arm64\\\"")
|
||||
args_copy.append("use_sysroot=true")
|
||||
|
||||
if is_debug:
|
||||
args_copy.append("is_debug=true")
|
||||
else:
|
||||
args_copy.append("is_debug=false")
|
||||
|
||||
if (platform == "linux"):
|
||||
args_copy.append("is_clang=true")
|
||||
args_copy.append("use_sysroot=false")
|
||||
if (platform == "windows"):
|
||||
args_copy.append("is_clang=false")
|
||||
|
||||
return "--args=\"" + " ".join(args_copy) + "\""
|
||||
|
||||
def ninja_windows_make(args, is_64=True, is_debug=False):
|
||||
directory_out = "out.gn/"
|
||||
directory_out += ("win_64/" if is_64 else "win_32/")
|
||||
directory_out += ("debug" if is_debug else "release")
|
||||
|
||||
base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)])
|
||||
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
|
||||
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
|
||||
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
|
||||
base.cmd("ninja", ["-C", directory_out])
|
||||
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
|
||||
base.move_file("./" + directory_out + "/obj/v8_wrappers.ninja.bak", "./" + directory_out + "/obj/v8_wrappers.ninja")
|
||||
return
|
||||
|
||||
def make():
|
||||
old_env = dict(os.environ)
|
||||
old_cur = os.getcwd()
|
||||
|
||||
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/v8_89"
|
||||
if not base.is_dir(base_dir):
|
||||
base.create_dir(base_dir)
|
||||
|
||||
os.chdir(base_dir)
|
||||
if not base.is_dir("depot_tools"):
|
||||
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
|
||||
|
||||
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.set_env("DEPOT_TOOLS_WIN_TOOLCHAIN", "0")
|
||||
base.set_env("GYP_MSVS_VERSION", config.option("vs-version"))
|
||||
|
||||
if not base.is_dir("v8"):
|
||||
base.cmd("./depot_tools/fetch", ["v8"], True)
|
||||
if ("windows" == base.host_platform()):
|
||||
os.chdir("v8")
|
||||
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
|
||||
os.chdir("../")
|
||||
base.cmd("./depot_tools/gclient", ["sync", "-r", "remotes/branch-heads/8.9"], True)
|
||||
base.cmd("gclient", ["sync", "--force"], True)
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
|
||||
|
||||
if not base.is_file("v8/src/base/platform/wrappers.cc"):
|
||||
base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n")
|
||||
|
||||
os.chdir("v8")
|
||||
|
||||
gn_args = ["v8_static_library=true",
|
||||
"is_component_build=false",
|
||||
"v8_monolithic=true",
|
||||
"v8_use_external_startup_data=false",
|
||||
"use_custom_libcxx=false",
|
||||
"treat_warnings_as_errors=false"]
|
||||
|
||||
if config.check_option("platform", "linux_64"):
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")])
|
||||
base.cmd("ninja", ["-C", "out.gn/linux_64"])
|
||||
|
||||
if config.check_option("platform", "linux_32"):
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_32", make_args(gn_args, "linux", False)])
|
||||
base.cmd("ninja", ["-C", "out.gn/linux_32"])
|
||||
|
||||
if config.check_option("platform", "linux_arm64"):
|
||||
base.cmd("build/linux/sysroot_scripts/install-sysroot.py", ["--arch=arm64"], False)
|
||||
base.cmd2("gn", ["gen", "out.gn/linux_arm64", make_args(gn_args, "linux_arm64", False)])
|
||||
base.cmd("ninja", ["-C", "out.gn/linux_arm64"])
|
||||
|
||||
if config.check_option("platform", "mac_64"):
|
||||
base.cmd2("gn", ["gen", "out.gn/mac_64", make_args(gn_args, "mac")])
|
||||
base.cmd("ninja", ["-C", "out.gn/mac_64"])
|
||||
|
||||
if config.check_option("platform", "win_64"):
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
if not base.is_file("out.gn/win_64/debug/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args, True, True)
|
||||
|
||||
if not base.is_file("out.gn/win_64/release/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args)
|
||||
|
||||
if config.check_option("platform", "win_32"):
|
||||
if (-1 != config.option("config").lower().find("debug")):
|
||||
if not base.is_file("out.gn/win_32/debug/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args, False, True)
|
||||
|
||||
if not base.is_file("out.gn/win_32/release/obj/v8_monolith.lib"):
|
||||
ninja_windows_make(gn_args, False)
|
||||
|
||||
os.chdir(old_cur)
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
16
scripts/core_common/modules/websocket.py
Executable file
16
scripts/core_common/modules/websocket.py
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../..')
|
||||
import config
|
||||
import base
|
||||
import ixwebsocket
|
||||
import socketrocket
|
||||
|
||||
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
|
||||
|
||||
def make():
|
||||
ixwebsocket.make()
|
||||
socketrocket.make()
|
||||
|
||||
return
|
||||
@ -5,7 +5,6 @@ import base
|
||||
import deploy_desktop
|
||||
import deploy_builder
|
||||
import deploy_server
|
||||
import deploy_develop
|
||||
import deploy_core
|
||||
import deploy_mobile
|
||||
|
||||
@ -16,8 +15,6 @@ def make():
|
||||
deploy_builder.make()
|
||||
if config.check_option("module", "server"):
|
||||
deploy_server.make()
|
||||
if config.check_option("module", "develop"):
|
||||
deploy_develop.make()
|
||||
if config.check_option("module", "core"):
|
||||
deploy_core.make()
|
||||
if config.check_option("module", "mobile"):
|
||||
|
||||
@ -34,19 +34,26 @@ def make():
|
||||
# x2t
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
if ("ios" == platform):
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
|
||||
else:
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
|
||||
@ -64,29 +71,16 @@ def make():
|
||||
if isWindowsXP:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir, "doctrenderer")
|
||||
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/xp/doctrenderer.lib", root_dir + "/doctrenderer.lib")
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8_xp/" + platform + "/release/icudt*.dll", root_dir + "/")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", root_dir + "/")
|
||||
else:
|
||||
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", root_dir + "/icudtl.dat")
|
||||
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
|
||||
|
||||
# app
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
|
||||
base.copy_dir(git_dir + "/DocumentBuilder/empty", root_dir + "/empty")
|
||||
base.copy_dir(git_dir + "/DocumentBuilder/samples", root_dir + "/samples")
|
||||
|
||||
# html
|
||||
base.create_dir(root_dir + "/HtmlFileInternal")
|
||||
if (False == isWindowsXP) and (0 != platform.find("mac")) and (0 != platform.find("ios")):
|
||||
base.copy_exe(core_build_dir + "/lib/" + platform_postfix, root_dir + "/HtmlFileInternal", "HtmlFileInternal")
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", root_dir + "/HtmlFileInternal")
|
||||
if (0 == platform.find("win")):
|
||||
base.delete_file(root_dir + "/HtmlFileInternal/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/HtmlFileInternal/libcef.lib")
|
||||
base.copy_dir(git_dir + "/document-templates/new/en-US", root_dir + "/empty")
|
||||
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", root_dir + "/sdkjs")
|
||||
@ -101,9 +95,12 @@ def make():
|
||||
base.replaceInFile(root_dir + "/include/docbuilder.h", "Q_DECL_EXPORT", "BUILDING_DOCBUILDER")
|
||||
|
||||
if ("win_64" == platform):
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/x64/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_64/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_64/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
|
||||
|
||||
elif ("win_32" == platform):
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/Win32/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_32/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
|
||||
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_32/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
|
||||
|
||||
# correct ios frameworks
|
||||
if ("ios" == platform):
|
||||
@ -111,6 +108,7 @@ def make():
|
||||
|
||||
if (0 == platform.find("mac")):
|
||||
base.mac_correct_rpath_x2t(root_dir)
|
||||
|
||||
base.mac_correct_rpath_docbuilder(root_dir)
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -24,18 +24,23 @@ def make():
|
||||
base.create_dir(archive_dir)
|
||||
|
||||
platform = native_platform
|
||||
platform_postfix = platform + base.qt_dst_postfix()
|
||||
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "HtmlFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform, archive_dir, "UnicodeConverter")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform, archive_dir, "x2t")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
|
||||
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", archive_dir + "/sdkjs")
|
||||
base.create_dir(archive_dir + "/sdkjs/vendor")
|
||||
@ -44,26 +49,20 @@ def make():
|
||||
|
||||
if ("windows" == base.host_platform()):
|
||||
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*.dll", archive_dir + "/")
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", archive_dir + "/")
|
||||
else:
|
||||
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*", archive_dir + "/")
|
||||
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", archive_dir + "/")
|
||||
base.copy_v8_files(core_dir, archive_dir, platform)
|
||||
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform, archive_dir, "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform, archive_dir, "allthemesgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform, archive_dir, "standardtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allthemesgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "standardtester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2ttester")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
|
||||
|
||||
base.create_dir(archive_dir + "/HtmlFileInternal")
|
||||
|
||||
base.copy_exe(core_build_dir + "/lib/" + platform, archive_dir + "/HtmlFileInternal", "HtmlFileInternal")
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", archive_dir + "/HtmlFileInternal")
|
||||
if (0 == platform.find("win")):
|
||||
base.delete_file(archive_dir + "/HtmlFileInternal/cef_sandbox.lib")
|
||||
base.delete_file(archive_dir + "/HtmlFileInternal/libcef.lib")
|
||||
|
||||
if base.is_file(archive_dir + "/core.7z"):
|
||||
base.delete_file(archive_dir + "/core.7z")
|
||||
base.archive_folder(archive_dir, archive_dir + "/core.7z")
|
||||
if base.is_file(archive_dir + ".7z"):
|
||||
base.delete_file(archive_dir + ".7z")
|
||||
base.archive_folder(archive_dir + "/*", archive_dir + ".7z")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import platform
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
@ -42,24 +44,32 @@ def make():
|
||||
# x2t
|
||||
base.create_dir(root_dir + "/converter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
if ("ios" == platform):
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "x2t")
|
||||
else:
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/converter/icudt58.dll")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/converter/icuuc58.dll")
|
||||
base.copy_file(git_dir + "/desktop-apps/common/converter/package.config", root_dir + "/converter/package.config")
|
||||
|
||||
if (0 == platform.find("linux")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/converter/libicudata.so.58")
|
||||
@ -72,19 +82,12 @@ def make():
|
||||
# doctrenderer
|
||||
if isWindowsXP:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir + "/converter", "doctrenderer")
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8_xp/" + platform + "/release/icudt*.dll", root_dir + "/converter/")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", root_dir + "/converter/")
|
||||
else:
|
||||
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", root_dir + "/converter/icudtl.dat")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
|
||||
base.copy_v8_files(core_dir, root_dir + "/converter", platform, isWindowsXP)
|
||||
|
||||
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/converter/empty", root_dir + "/converter/empty")
|
||||
|
||||
if (False == isWindowsXP) and (0 != platform.find("mac")) and (0 != platform.find("ios")):
|
||||
base.copy_exe(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFileInternal")
|
||||
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
|
||||
|
||||
# dictionaries
|
||||
base.create_dir(root_dir + "/dictionaries")
|
||||
@ -154,13 +157,50 @@ def make():
|
||||
elif (0 == platform.find("linux")):
|
||||
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors", root_dir + "/DesktopEditors")
|
||||
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
|
||||
if ("" != base.get_env("VIDEO_PLAYER_VLC_DIR")):
|
||||
vlc_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/videoplayerlib/vlc/"
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.dll", root_dir + "/libvlc.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.dll", root_dir + "/libvlccore.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.dll", root_dir + "/VLCQtCore.dll")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.dll", root_dir + "/VLCQtWidgets.dll")
|
||||
else:
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.so", root_dir + "/libvlc.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlc.so.5", root_dir + "/libvlc.so.5")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so", root_dir + "/libvlccore.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/libvlccore.so.8", root_dir + "/libvlccore.so.8")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtCore.so", root_dir + "/VLCQtCore.so")
|
||||
base.copy_file(vlc_dir + platform + "/bin/VLCQtWidgets.so", root_dir + "/VLCQtWidgets.so")
|
||||
|
||||
if isWindowsXP:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer/xp", root_dir, "videoplayer")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform + "/mediaplayer", root_dir, "videoplayer")
|
||||
|
||||
base.copy_dir(vlc_dir + platform + "/bin/plugins", root_dir + "/plugins")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
|
||||
|
||||
base.create_dir(root_dir + "/editors")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
|
||||
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
|
||||
|
||||
# desktopeditors-help
|
||||
root_help_dir = root_dir + "-help"
|
||||
if (base.is_dir(root_help_dir)):
|
||||
base.delete_dir(root_help_dir)
|
||||
for i in ["documenteditor", "presentationeditor", "spreadsheeteditor"]:
|
||||
base.copy_dir(
|
||||
base_dir + "/js/" + branding + "/desktop/web-apps/apps/%s/main/resources/help" % i,
|
||||
root_help_dir + "/editors/web-apps/apps/%s/main/resources/help" % i)
|
||||
|
||||
if ("1" != config.option("preinstalled-help") and not isWindowsXP):
|
||||
# remove help from install until web-apps containes help
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/documenteditor/main/resources/help")
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/presentationeditor/main/resources/help")
|
||||
base.delete_dir(root_dir + "/editors/web-apps/apps/spreadsheeteditor/main/resources/help")
|
||||
|
||||
base.create_dir(root_dir + "/editors/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
|
||||
# remove some default plugins
|
||||
@ -178,18 +218,29 @@ def make():
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt", root_dir + "/editors/sdkjs-plugins", "advanced2", True)
|
||||
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/common/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}", root_dir + "/editors/sdkjs-plugins/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}")
|
||||
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/engine/database/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}", root_dir + "/editors/sdkjs-plugins/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}")
|
||||
|
||||
if (0 != platform.find("mac")):
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
|
||||
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
|
||||
|
||||
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
|
||||
base.copy_file(git_dir + "/desktop-apps/common/loginpage/addon/externalcloud.json", root_dir + "/editors/externalcloud.json")
|
||||
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
|
||||
|
||||
isUseJSC = False
|
||||
if (0 == platform.find("mac")):
|
||||
file_size_doctrenderer = os.path.getsize(root_dir + "/converter/libdoctrenderer.dylib")
|
||||
print("file_size_doctrenderer: " + str(file_size_doctrenderer))
|
||||
if (file_size_doctrenderer < 5*1024*1024):
|
||||
isUseJSC = True
|
||||
|
||||
if isUseJSC:
|
||||
base.delete_file(root_dir + "/converter/icudtl.dat")
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_lib(git_dir + "/desktop-apps/win-linux/3dparty/WinSparkle/" + platform, root_dir, "WinSparkle")
|
||||
base.delete_file(root_dir + "/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/libcef.lib")
|
||||
|
||||
isMacArmPlaformOnIntel = False
|
||||
if (platform == "mac_arm64") and not base.is_os_arm():
|
||||
isMacArmPlaformOnIntel = True
|
||||
|
||||
# all themes generate ----
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allfontsgen")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allthemesgen")
|
||||
@ -197,17 +248,27 @@ def make():
|
||||
if (0 == platform.find("mac")):
|
||||
base.mac_correct_rpath_desktop(root_dir)
|
||||
|
||||
themes_params = []
|
||||
if ("" != config.option("themesparams")):
|
||||
themes_params = ["--params=\"" + config.option("themesparams") + "\""]
|
||||
base.cmd_exe(root_dir + "/converter/allfontsgen", ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""])
|
||||
base.cmd_exe(root_dir + "/converter/allthemesgen", ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params)
|
||||
if isMacArmPlaformOnIntel:
|
||||
sdkjs_dir = root_dir + "/editors/sdkjs"
|
||||
end_find_platform = sdkjs_dir.rfind("/mac_arm64/")
|
||||
sdkjs_dir_mac64 = sdkjs_dir[0:end_find_platform] + "/mac_64/" + sdkjs_dir[end_find_platform+11:]
|
||||
base.delete_dir(sdkjs_dir)
|
||||
base.copy_dir(sdkjs_dir_mac64, sdkjs_dir)
|
||||
else:
|
||||
themes_params = []
|
||||
if ("" != config.option("themesparams")):
|
||||
themes_params = ["--params=\"" + config.option("themesparams") + "\""]
|
||||
base.cmd_exe(root_dir + "/converter/allfontsgen", ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""])
|
||||
base.cmd_exe(root_dir + "/converter/allthemesgen", ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params)
|
||||
base.delete_file(root_dir + "/converter/AllFonts.js")
|
||||
base.delete_file(root_dir + "/converter/font_selection.bin")
|
||||
base.delete_file(root_dir + "/converter/fonts.log")
|
||||
|
||||
base.delete_exe(root_dir + "/converter/allfontsgen")
|
||||
base.delete_exe(root_dir + "/converter/allthemesgen")
|
||||
base.delete_file(root_dir + "/converter/AllFonts.js")
|
||||
base.delete_file(root_dir + "/converter/font_selection.bin")
|
||||
base.delete_file(root_dir + "/editors/sdkjs/slide/sdk-all.cache")
|
||||
|
||||
if not isUseJSC:
|
||||
base.delete_file(root_dir + "/editors/sdkjs/slide/sdk-all.cache")
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -3,6 +3,11 @@
|
||||
import config
|
||||
import base
|
||||
|
||||
def exclude_arch(directory, frameworks):
|
||||
for lib in frameworks:
|
||||
base.cmd("lipo", ["-remove", "arm64", directory + "/" + lib + ".framework/" + lib, "-o", directory + "/" + lib + ".framework/" + lib])
|
||||
return
|
||||
|
||||
def make():
|
||||
base_dir = base.get_script_dir() + "/../out"
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
@ -34,16 +39,24 @@ def make():
|
||||
|
||||
# x2t
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
|
||||
|
||||
if (0 == platform.find("win") or 0 == platform.find("linux") or 0 == platform.find("mac")):
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
|
||||
else:
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
|
||||
|
||||
# icu
|
||||
if (0 == platform.find("win")):
|
||||
@ -58,6 +71,11 @@ def make():
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib")
|
||||
|
||||
if (0 == platform.find("android")):
|
||||
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicudata.so", root_dir + "/libicudata.so")
|
||||
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicuuc.so", root_dir + "/libicuuc.so")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/icudt58l.dat", root_dir + "/icudt58l.dat")
|
||||
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
|
||||
|
||||
@ -78,17 +96,24 @@ def make():
|
||||
# js
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
|
||||
# app
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
|
||||
base.copy_dir(git_dir + "/DocumentBuilder/empty", root_dir + "/empty")
|
||||
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
|
||||
libs_dir = root_dir + "/lib"
|
||||
base.create_dir(libs_dir + "/arm64-v8a")
|
||||
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so", libs_dir + "/arm64-v8a")
|
||||
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so.*", libs_dir + "/arm64-v8a")
|
||||
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.dat", libs_dir + "/arm64-v8a")
|
||||
base.create_dir(libs_dir + "/armeabi-v7a")
|
||||
base.copy_files(base_dir + "/android_armv7/" + branding + "/mobile/*.so", libs_dir + "/armeabi-v7a")
|
||||
base.copy_files(base_dir + "/android_armv7/" + branding + "/mobile/*.so.*", libs_dir + "/armeabi-v7a")
|
||||
base.copy_files(base_dir + "/android_armv7/" + branding + "/mobile/*.dat", libs_dir + "/armeabi-v7a")
|
||||
base.create_dir(libs_dir + "/x86")
|
||||
base.copy_files(base_dir + "/android_x86/" + branding + "/mobile/*.so", libs_dir + "/x86")
|
||||
base.copy_files(base_dir + "/android_x86/" + branding + "/mobile/*.so.*", libs_dir + "/x86")
|
||||
base.copy_files(base_dir + "/android_x86/" + branding + "/mobile/*.dat", libs_dir + "/x86")
|
||||
base.create_dir(libs_dir + "/x86_64")
|
||||
base.copy_files(base_dir + "/android_x86_64/" + branding + "/mobile/*.so", libs_dir + "/x86_64")
|
||||
base.copy_files(base_dir + "/android_x86_64/" + branding + "/mobile/*.so.*", libs_dir + "/x86_64")
|
||||
base.copy_files(base_dir + "/android_x86_64/" + branding + "/mobile/*.dat", libs_dir + "/x86_64")
|
||||
break
|
||||
|
||||
return
|
||||
|
||||
@ -32,7 +32,8 @@ def make():
|
||||
continue
|
||||
|
||||
root_dir = base_dir + ("/" + native_platform + "/" + branding + "/documentserver")
|
||||
root_dir_snap = root_dir + '-snap'
|
||||
root_dir_snap = root_dir + '-snap/var/www/onlyoffice/documentserver'
|
||||
root_dir_snap_example = root_dir_snap + '-example'
|
||||
if (base.is_dir(root_dir)):
|
||||
base.delete_dir(root_dir)
|
||||
base.create_dir(root_dir)
|
||||
@ -57,11 +58,6 @@ def make():
|
||||
base.create_dir(build_server_dir + '/Metrics/node_modules/modern-syslog/build/Release')
|
||||
base.copy_file(bin_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
|
||||
|
||||
base.create_dir(build_server_dir + '/SpellChecker')
|
||||
base.copy_exe(bin_server_dir + "/SpellChecker", build_server_dir + '/SpellChecker', "spellchecker")
|
||||
base.create_dir(build_server_dir + '/SpellChecker/node_modules/nodehun/build/Release')
|
||||
base.copy_file(bin_server_dir + "/SpellChecker/node_modules/nodehun/build/Release/nodehun.node", build_server_dir + '/SpellChecker/node_modules/nodehun/build/Release/nodehun.node')
|
||||
|
||||
|
||||
qt_dir = base.qt_setup(native_platform)
|
||||
platform = native_platform
|
||||
@ -76,17 +72,24 @@ def make():
|
||||
base.create_dir(converter_dir)
|
||||
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "kernel")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "kernel_network")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "UnicodeConverter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "graphics")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfWriter")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfReader")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DjVuFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "XpsFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile2")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlRenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "doctrenderer")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "Fb2File")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "EpubFile")
|
||||
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DocxRenderer")
|
||||
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", converter_dir + "/cmap.bin")
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "x2t")
|
||||
|
||||
if (native_platform == "linux_64"):
|
||||
base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
|
||||
|
||||
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server")
|
||||
|
||||
# icu
|
||||
@ -101,34 +104,32 @@ def make():
|
||||
if (0 == platform.find("mac")):
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", converter_dir + "/libicudata.58.dylib")
|
||||
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", converter_dir + "/libicuuc.58.dylib")
|
||||
|
||||
if (0 == platform.find("win")):
|
||||
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", converter_dir + "/")
|
||||
else:
|
||||
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", converter_dir + "/icudtl.dat")
|
||||
|
||||
base.copy_v8_files(core_dir, converter_dir, platform)
|
||||
|
||||
# builder
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "docbuilder")
|
||||
base.copy_dir(git_dir + "/DocumentBuilder/empty", converter_dir + "/empty")
|
||||
|
||||
# html
|
||||
base.create_dir(converter_dir + "/HtmlFileInternal")
|
||||
base.copy_exe(core_build_dir + "/lib/" + platform_postfix, converter_dir + "/HtmlFileInternal", "HtmlFileInternal")
|
||||
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", converter_dir + "/HtmlFileInternal")
|
||||
if (0 == platform.find("win")):
|
||||
base.delete_file(root_dir + "/HtmlFileInternal/cef_sandbox.lib")
|
||||
base.delete_file(root_dir + "/HtmlFileInternal/libcef.lib")
|
||||
base.copy_dir(git_dir + "/document-templates/new/en-US", converter_dir + "/empty")
|
||||
|
||||
# js
|
||||
js_dir = root_dir
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
|
||||
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
|
||||
|
||||
# add embed worker code
|
||||
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
|
||||
|
||||
# plugins
|
||||
base.create_dir(js_dir + "/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins")
|
||||
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
|
||||
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
|
||||
base.create_dir(js_dir + "/sdkjs-plugins/v1")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", js_dir + "/sdkjs-plugins/v1/plugins.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", js_dir + "/sdkjs-plugins/v1/plugins-ui.js")
|
||||
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", js_dir + "/sdkjs-plugins/v1/plugins.css")
|
||||
base.support_old_versions_plugins(js_dir + "/sdkjs-plugins")
|
||||
|
||||
base.clone_marketplace_plugin(root_dir + "/sdkjs-plugins")
|
||||
|
||||
# tools
|
||||
tools_dir = root_dir + "/server/tools"
|
||||
@ -137,11 +138,11 @@ def make():
|
||||
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allthemesgen")
|
||||
|
||||
branding_dir = server_dir + "/branding"
|
||||
if("" != config.option("branding")):
|
||||
if("" != config.option("branding") and "onlyoffice" != config.option("branding")):
|
||||
branding_dir = git_dir + '/' + config.option("branding") + '/server'
|
||||
|
||||
#dictionaries
|
||||
spellchecker_dictionaries = build_server_dir + '/SpellChecker/dictionaries'
|
||||
spellchecker_dictionaries = root_dir + '/dictionaries'
|
||||
spellchecker_dictionaries_files = server_dir + '/../dictionaries/*_*'
|
||||
base.create_dir(spellchecker_dictionaries)
|
||||
base.copy_files(spellchecker_dictionaries_files, spellchecker_dictionaries)
|
||||
@ -163,6 +164,12 @@ def make():
|
||||
base.create_dir(core_fonts)
|
||||
base.copy_dir_content(core_fonts_files, core_fonts, "", ".git")
|
||||
|
||||
#document-templates
|
||||
document_templates_files = server_dir + '/../document-templates'
|
||||
document_templates = build_server_dir + '/../document-templates'
|
||||
base.copy_dir(document_templates_files + '/new', document_templates + '/new')
|
||||
base.copy_dir(document_templates_files + '/sample', document_templates + '/sample')
|
||||
|
||||
#license
|
||||
license_file1 = server_dir + '/LICENSE.txt'
|
||||
license_file2 = server_dir + '/3rd-Party.txt'
|
||||
@ -193,6 +200,8 @@ def make():
|
||||
|
||||
# snap
|
||||
if (0 == platform.find("linux")):
|
||||
if (base.is_dir(root_dir_snap)):
|
||||
base.delete_dir(root_dir_snap)
|
||||
base.create_dir(root_dir_snap)
|
||||
base.copy_dir(root_dir, root_dir_snap)
|
||||
base.copy_dir(bin_server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
|
||||
@ -202,11 +211,13 @@ def make():
|
||||
base.copy_dir(bin_server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
|
||||
base.delete_file(root_dir_snap + '/server/FileConverter/converter')
|
||||
base.copy_dir(bin_server_dir + '/SpellChecker/node_modules', root_dir_snap + '/server/SpellChecker/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/SpellChecker/sources', root_dir_snap + '/server/SpellChecker/sources')
|
||||
base.delete_file(root_dir_snap + '/server/SpellChecker/spellchecker')
|
||||
base.copy_dir(bin_server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
|
||||
base.copy_dir(bin_server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
|
||||
if (base.is_dir(root_dir_snap_example)):
|
||||
base.delete_dir(root_dir_snap_example)
|
||||
base.create_dir(root_dir_snap_example)
|
||||
base.copy_dir(bin_example_dir + '/..', root_dir_snap_example)
|
||||
base.delete_file(root_dir_snap + '/example/nodejs/example')
|
||||
|
||||
return
|
||||
|
||||
|
||||
@ -3,6 +3,10 @@
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
import json
|
||||
|
||||
def get_core_url(arch, branch):
|
||||
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
|
||||
|
||||
def make():
|
||||
git_dir = base.get_script_dir() + "/../.."
|
||||
@ -20,11 +24,15 @@ def make():
|
||||
arch = "x86"
|
||||
arch2 = "_32"
|
||||
|
||||
url = "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + config.option("branch") + "/latest/" + arch + "/core.7z"
|
||||
url = get_core_url(arch, config.option("branch"))
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
if (data_url == "" and config.option("branch") != "develop"):
|
||||
url = get_core_url(arch, "develop")
|
||||
data_url = base.get_file_last_modified_url(url)
|
||||
|
||||
old_data_url = base.readFile("./core.7z.data")
|
||||
|
||||
if (old_data_url != data_url):
|
||||
if (data_url != "" and old_data_url != data_url):
|
||||
print("-----------------------------------------------------------")
|
||||
print("Downloading core last version... --------------------------")
|
||||
print("-----------------------------------------------------------")
|
||||
@ -32,8 +40,6 @@ def make():
|
||||
base.delete_file("./core.7z")
|
||||
if (base.is_dir("./core")):
|
||||
base.delete_dir("./core")
|
||||
if (base.is_dir("./HtmlFileInternal")):
|
||||
base.delete_dir("./HtmlFileInternal")
|
||||
base.download(url, "./core.7z")
|
||||
|
||||
print("-----------------------------------------------------------")
|
||||
@ -50,13 +56,21 @@ def make():
|
||||
platform = base.host_platform() + arch2
|
||||
|
||||
base.copy_files("./core/*", "./")
|
||||
else:
|
||||
print("-----------------------------------------------------------")
|
||||
print("Core is up to date. ---------------------------------------")
|
||||
print("-----------------------------------------------------------")
|
||||
|
||||
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
|
||||
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
|
||||
|
||||
if base.is_dir(git_dir + "/fonts"):
|
||||
base.delete_dir(git_dir + "/fonts")
|
||||
base.create_dir(git_dir + "/fonts")
|
||||
if not base.is_dir(git_dir + "/sdkjs-plugins"):
|
||||
base.create_dir(git_dir + "/sdkjs-plugins")
|
||||
|
||||
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
|
||||
base.clone_marketplace_plugin(git_dir + "/sdkjs-plugins")
|
||||
|
||||
if not base.is_dir(git_dir + "/fonts"):
|
||||
base.create_dir(git_dir + "/fonts")
|
||||
|
||||
if ("mac" == base.host_platform()):
|
||||
base.mac_correct_rpath_x2t("./")
|
||||
@ -79,28 +93,62 @@ def make():
|
||||
#base.cmd_exe("./allthemesgen", ["--converter-dir=\"" + git_dir + "/server/FileConverter/bin\"", "--src=\"" + git_dir + "/sdkjs/slide/themes\"", "--output=\"" + git_dir + "/sdkjs/common/Images\"", "--postfix=android", "--params=280,224"])
|
||||
|
||||
# add directories to open directories
|
||||
data_local_devel = "{\n"
|
||||
data_local_devel += "\"services\": {\n"
|
||||
data_local_devel += "\"CoAuthoring\": {\n"
|
||||
data_local_devel += "\"server\": {\n"
|
||||
data_local_devel += "\"static_content\": {\n"
|
||||
is_exist_addons = False
|
||||
for addon in config.sdkjs_addons:
|
||||
data_local_devel += ("\"/" + config.sdkjs_addons[addon] + "\" : { \"path\": \"../../../" + config.sdkjs_addons[addon] + "\" },\n")
|
||||
is_exist_addons = True
|
||||
for addon in config.web_apps_addons:
|
||||
data_local_devel += ("\"/" + config.web_apps_addons[addon] + "\" : { \"path\": \"../../../" + config.web_apps_addons[addon] + "\" },\n")
|
||||
is_exist_addons = True
|
||||
if is_exist_addons:
|
||||
data_local_devel = data_local_devel[:-2]
|
||||
data_local_devel += "\n"
|
||||
data_local_devel += "}\n"
|
||||
data_local_devel += "}\n"
|
||||
data_local_devel += "}\n"
|
||||
data_local_devel += "}\n"
|
||||
data_local_devel += "}\n"
|
||||
base.writeFile(git_dir + "/server/Common/config/local-development-" + base.host_platform() + ".json", data_local_devel)
|
||||
addon_base_path = "../../"
|
||||
server_config = {}
|
||||
static_content = {}
|
||||
sql = {}
|
||||
|
||||
server_addons = []
|
||||
if (config.option("server-addons") != ""):
|
||||
server_addons = config.option("server-addons").rsplit(", ")
|
||||
if ("server-lockstorage" in server_addons):
|
||||
server_config["editorDataStorage"] = "editorDataRedis"
|
||||
|
||||
sdkjs_addons = []
|
||||
if (config.option("sdkjs-addons") != ""):
|
||||
sdkjs_addons = config.option("sdkjs-addons").rsplit(", ")
|
||||
for addon in sdkjs_addons:
|
||||
static_content["/" + addon] = {"path": addon_base_path + addon}
|
||||
|
||||
web_apps_addons = []
|
||||
if (config.option("web-apps-addons") != ""):
|
||||
web_apps_addons = config.option("web-apps-addons").rsplit(", ")
|
||||
for addon in web_apps_addons:
|
||||
static_content["/" + addon] = {"path": addon_base_path + addon}
|
||||
|
||||
if (config.option("external-folder") != ""):
|
||||
external_folder = config.option("external-folder")
|
||||
static_content["/sdkjs"] = {"path": addon_base_path + external_folder + "/sdkjs"}
|
||||
static_content["/web-apps"] = {"path": addon_base_path + external_folder + "/web-apps"}
|
||||
|
||||
if (config.option("sql-type") != ""):
|
||||
sql["type"] = config.option("sql-type")
|
||||
if (config.option("db-port") != ""):
|
||||
sql["dbPort"] = config.option("db-port")
|
||||
if (config.option("db-user") != ""):
|
||||
sql["dbUser"] = config.option("db-user")
|
||||
if (config.option("db-pass") != ""):
|
||||
sql["dbPass"] = config.option("db-pass")
|
||||
|
||||
server_config["static_content"] = static_content
|
||||
|
||||
json_file = git_dir + "/server/Common/config/local-development-" + base.host_platform() + ".json"
|
||||
base.writeFile(json_file, json.dumps({"services": {"CoAuthoring": {"server": server_config, "sql": sql}}}, indent=2))
|
||||
|
||||
#site url
|
||||
example_config = {}
|
||||
if (base.host_platform() == "linux"):
|
||||
example_config["port"] = 3000
|
||||
else:
|
||||
example_config["port"] = 80
|
||||
example_config["siteUrl"] = "http://" + config.option("siteUrl") + ":8000/"
|
||||
example_config["apiUrl"] = "web-apps/apps/api/documents/api.js"
|
||||
example_config["preloaderUrl"] = "web-apps/apps/api/documents/cache-scripts.html"
|
||||
json_dir = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/"
|
||||
json_file = json_dir + "/local-development-" + base.host_platform() + ".json"
|
||||
if base.is_exist(json_dir):
|
||||
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
|
||||
|
||||
os.chdir(old_cur)
|
||||
return
|
||||
|
||||
988
scripts/develop/dependence.py
Normal file
988
scripts/develop/dependence.py
Normal file
@ -0,0 +1,988 @@
|
||||
import sys
|
||||
sys.path.append('vendor')
|
||||
sys.path.append('..')
|
||||
import os
|
||||
import base
|
||||
import subprocess
|
||||
import config
|
||||
|
||||
host_platform = base.host_platform()
|
||||
|
||||
if (sys.version_info[0] >= 3):
|
||||
unicode = str
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
import libwindows
|
||||
if (sys.version_info[0] >= 3):
|
||||
import winreg
|
||||
else:
|
||||
import _winreg as winreg
|
||||
|
||||
class CDependencies:
|
||||
def __init__(self):
|
||||
self.install = []
|
||||
self.uninstall = []
|
||||
self.removepath = []
|
||||
self.sqlPath = ''
|
||||
|
||||
def append(self, oCdependencies):
|
||||
for item in oCdependencies.install:
|
||||
self.append_install(item)
|
||||
for item in oCdependencies.uninstall:
|
||||
self.append_uninstall(item)
|
||||
for item in oCdependencies.removepath:
|
||||
self.append_removepath(item)
|
||||
|
||||
if (oCdependencies.sqlPath != ''):
|
||||
self.sqlPath = oCdependencies.sqlPath
|
||||
|
||||
def append_install(self, item):
|
||||
if (item not in self.install):
|
||||
self.install.append(item)
|
||||
|
||||
def append_uninstall(self, item):
|
||||
if (item not in self.uninstall):
|
||||
self.uninstall.append(item)
|
||||
|
||||
def append_removepath(self, item):
|
||||
if (item not in self.removepath):
|
||||
self.removepath.append(item)
|
||||
|
||||
def get_install(self):
|
||||
res = []
|
||||
for item in self.install:
|
||||
res += ['--install', item]
|
||||
return res
|
||||
|
||||
def get_uninstall(self):
|
||||
res = []
|
||||
for item in self.uninstall:
|
||||
res += ['--uninstall', item]
|
||||
return res
|
||||
|
||||
def get_removepath(self):
|
||||
res = []
|
||||
for item in self.removepath:
|
||||
res += ['--remove-path', item]
|
||||
return res
|
||||
|
||||
def check__docker_dependencies():
|
||||
if (host_platform == 'windows' and not check_vc_components()):
|
||||
return False
|
||||
if (host_platform == 'mac'):
|
||||
return True
|
||||
|
||||
checksResult = CDependencies()
|
||||
checksResult.append(check_nodejs())
|
||||
checksResult.append(check_7z())
|
||||
if (len(checksResult.install) > 0):
|
||||
install_args = ['install.py']
|
||||
install_args += checksResult.get_uninstall()
|
||||
install_args += checksResult.get_removepath()
|
||||
install_args += checksResult.get_install()
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
install_args[0] = './scripts/develop/' + install_args[0]
|
||||
if (host_platform == 'windows'):
|
||||
code = libwindows.sudo(unicode(sys.executable), install_args)
|
||||
elif (host_platform == 'linux'):
|
||||
get_updates()
|
||||
base.cmd_in_dir(base_dir + "/../../", 'python', install_args, False)
|
||||
|
||||
def check_dependencies():
|
||||
if (host_platform == 'windows' and not check_vc_components()):
|
||||
return False
|
||||
if (host_platform == 'mac'):
|
||||
return True
|
||||
|
||||
checksResult = CDependencies()
|
||||
|
||||
checksResult.append(check_git())
|
||||
if (host_platform == 'linux'):
|
||||
checksResult.append(check_curl())
|
||||
checksResult.append(check_nodejs())
|
||||
checksResult.append(check_npm())
|
||||
checksResult.append(check_7z())
|
||||
|
||||
checksResult.append(check_java())
|
||||
checksResult.append(check_erlang())
|
||||
checksResult.append(check_rabbitmq())
|
||||
checksResult.append(check_gruntcli())
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
checksResult.append(check_nodejs())
|
||||
|
||||
if (config.option("sql-type") == 'mysql' and host_platform == 'windows'):
|
||||
checksResult.append(check_mysqlServer())
|
||||
else:
|
||||
checksResult.append(check_postgreSQL())
|
||||
|
||||
server_addons = []
|
||||
if (config.option("server-addons") != ""):
|
||||
server_addons = config.option("server-addons").rsplit(", ")
|
||||
if ("server-lockstorage" in server_addons):
|
||||
checksResult.append(check_redis())
|
||||
|
||||
if (len(checksResult.install) > 0):
|
||||
install_args = ['install.py']
|
||||
install_args += checksResult.get_uninstall()
|
||||
install_args += checksResult.get_removepath()
|
||||
install_args += checksResult.get_install()
|
||||
install_args[0] = './scripts/develop/' + install_args[0]
|
||||
if (host_platform == 'windows'):
|
||||
code = libwindows.sudo(unicode(sys.executable), install_args)
|
||||
elif (host_platform == 'linux'):
|
||||
get_updates()
|
||||
base.cmd('python', install_args, False)
|
||||
|
||||
check_npmPath()
|
||||
if (config.option("sql-type") == 'mysql' and host_platform == 'windows'):
|
||||
return check_MySQLConfig(checksResult.sqlPath)
|
||||
return check_postgreConfig(checksResult.sqlPath)
|
||||
|
||||
def check_pythonPath():
|
||||
path = base.get_env('PATH')
|
||||
if (path.find(sys.exec_prefix) == -1):
|
||||
base.set_env('PATH', sys.exec_prefix + os.pathsep + path)
|
||||
|
||||
def check_npmPath():
|
||||
if (host_platform != 'windows'):
|
||||
return None
|
||||
path = base.get_env('PATH')
|
||||
npmPath = os.environ['AppData'] + '\\npm'
|
||||
if (path.find(npmPath) == -1):
|
||||
base.set_env('PATH', npmPath + os.pathsep + path)
|
||||
|
||||
def check_gitPath():
|
||||
path = base.get_env('PATH')
|
||||
gitExecPath = base.find_file(os.path.join(os.environ['PROGRAMW6432'], 'Git\\cmd'), 'git.exe') or base.find_file(os.path.join(os.environ['ProgramFiles(x86)'], 'Git\\cmd'), 'git.exe')
|
||||
gitDir = base.get_script_dir(gitExecPath)
|
||||
if (path.find(gitDir) == -1):
|
||||
base.set_env('PATH', gitDir + os.pathsep + path)
|
||||
|
||||
def check_git():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed Git')
|
||||
|
||||
result = base.run_command('git --version')['stderr']
|
||||
|
||||
if (result != ''):
|
||||
print('Git not found')
|
||||
dependence.append_install('Git')
|
||||
return dependence
|
||||
|
||||
print('Git is installed')
|
||||
return dependence
|
||||
|
||||
def check_nodejs():
|
||||
dependence = CDependencies()
|
||||
|
||||
isNeedReinstall = False
|
||||
base.print_info('Check installed Node.js')
|
||||
nodejs_version = base.run_command('node -v')['stdout']
|
||||
if (nodejs_version == ''):
|
||||
print('Node.js not found')
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_install('Node.js')
|
||||
elif (host_platform == 'linux'):
|
||||
dependence.append_install('NodeJs')
|
||||
return dependence
|
||||
|
||||
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
|
||||
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
|
||||
print('Installed Node.js version: ' + nodejs_version[1:])
|
||||
nodejs_min_version = '14.14'
|
||||
nodejs_min_version_minor = 0
|
||||
major_minor_min_version = nodejs_min_version.split('.')
|
||||
nodejs_min_version_major = int(major_minor_min_version[0])
|
||||
if len(major_minor_min_version) > 1:
|
||||
nodejs_min_version_minor = int(major_minor_min_version[1])
|
||||
nodejs_max_version = '14'
|
||||
nodejs_max_version_minor = float("inf")
|
||||
major_minor_max_version = nodejs_max_version.split('.')
|
||||
nodejs_max_version_major = int(major_minor_max_version[0])
|
||||
if len(major_minor_max_version) > 1:
|
||||
nodejs_max_version_minor = int(major_minor_max_version[1])
|
||||
|
||||
if (nodejs_min_version_major > nodejs_cur_version_major or nodejs_cur_version_major > nodejs_max_version_major):
|
||||
print('Installed Node.js version must be 14.14 to 14.x')
|
||||
isNeedReinstall = True
|
||||
elif (nodejs_min_version_major == nodejs_cur_version_major):
|
||||
if (nodejs_min_version_minor > nodejs_cur_version_minor):
|
||||
isNeedReinstall = True
|
||||
elif (nodejs_cur_version_major == nodejs_max_version_major):
|
||||
if (nodejs_cur_version_minor > nodejs_max_version_minor):
|
||||
isNeedReinstall = True
|
||||
|
||||
if (True == isNeedReinstall):
|
||||
print('Installed Node.js version must be 14.14 to 14.x')
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_uninstall('Node.js')
|
||||
dependence.append_install('Node.js')
|
||||
elif (host_platform == 'linux'):
|
||||
dependence.append_uninstall('nodejs')
|
||||
dependence.append_install('NodeJs')
|
||||
|
||||
return dependence
|
||||
|
||||
print('Installed Node.js is valid')
|
||||
return dependence
|
||||
|
||||
def check_java():
|
||||
dependence = CDependencies()
|
||||
|
||||
base.print_info('Check installed Java')
|
||||
java_version = base.run_command('java -version')['stderr']
|
||||
|
||||
if (java_version.find('64-Bit') != -1):
|
||||
print('Installed Java is valid')
|
||||
return dependence
|
||||
|
||||
if (java_version.find('32-Bit') != -1):
|
||||
print('Installed Java must be x64')
|
||||
else:
|
||||
print('Java not found')
|
||||
|
||||
dependence.append_install('Java')
|
||||
return dependence
|
||||
|
||||
def get_erlang_path_to_bin():
|
||||
erlangPath = ''
|
||||
if (host_platform == 'windows'):
|
||||
erlangPath = os.getenv("ERLANG_HOME", "")
|
||||
if (erlangPath != ""):
|
||||
erlangPath += "\\bin"
|
||||
return erlangPath
|
||||
def check_erlang():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed Erlang')
|
||||
|
||||
erlangBitness = ""
|
||||
erlang_path_home = get_erlang_path_to_bin()
|
||||
if base.is_exist(erlang_path_home) == False and host_platform == 'windows':
|
||||
dependence.append_uninstall('Erlang')
|
||||
dependence.append_uninstall('RabbitMQ')
|
||||
return dependence
|
||||
|
||||
if ("" != erlang_path_home or host_platform != 'windows'):
|
||||
erlangBitness = base.run_command_in_dir(erlang_path_home, 'erl -eval "erlang:display(erlang:system_info(wordsize)), halt()." -noshell')['stdout']
|
||||
|
||||
if (erlangBitness == '8'):
|
||||
print("Installed Erlang is valid")
|
||||
return dependence
|
||||
|
||||
print('Need Erlang with bitness x64')
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_removepath(os.environ['AppData'] + '\\RabbitMQ\\db')
|
||||
dependence.append_uninstall('Erlang')
|
||||
dependence.append_uninstall('RabbitMQ')
|
||||
else:
|
||||
dependence.append_uninstall('erlang')
|
||||
dependence.append_uninstall('rabbitmq-server')
|
||||
dependence.append_install('Erlang')
|
||||
dependence.append_install('RabbitMQ')
|
||||
|
||||
return dependence
|
||||
|
||||
def check_rabbitmq():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed RabbitMQ')
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
result = base.run_command('sc query RabbitMQ')['stdout']
|
||||
if (result.find('RabbitMQ') != -1):
|
||||
print('RabbitMQ is installed')
|
||||
return dependence
|
||||
elif (host_platform == 'linux'):
|
||||
result = base.run_command('service rabbitmq-server status')['stdout']
|
||||
if (result != ''):
|
||||
print('Installed RabbitMQ is valid')
|
||||
return dependence
|
||||
|
||||
print('RabbitMQ not found')
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_removepath(os.environ['AppData'] + '\\RabbitMQ\\db')
|
||||
dependence.append_uninstall('Erlang')
|
||||
dependence.append_uninstall('RabbitMQ')
|
||||
else:
|
||||
dependence.append_uninstall('erlang')
|
||||
dependence.append_uninstall('rabbitmq-server')
|
||||
dependence.append_install('Erlang')
|
||||
dependence.append_install('RabbitMQ')
|
||||
|
||||
return dependence
|
||||
|
||||
def find_redis(base_path):
|
||||
return base.find_file(os.path.join(base_path, 'Redis'), 'redis-cli.exe')
|
||||
|
||||
def check_redis():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check Redis server')
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
if (len(get_programUninstalls('Redis on Windows')) == 0):
|
||||
print('Redis not found')
|
||||
dependence.append_install('RedisServer')
|
||||
return dependence
|
||||
|
||||
checkService = base.run_command('sc query Redis')['stdout']
|
||||
if (checkService.find('Redis') != -1) and (checkService.find('STOPPED') != -1):
|
||||
print('Installed Redis is not valid')
|
||||
dependence.append_uninstall('Redis on Windows')
|
||||
dependence.append_install('RedisServer')
|
||||
return dependence
|
||||
|
||||
redis_cli = find_redis(os.environ['PROGRAMW6432']) or find_redis(os.environ['ProgramFiles(x86)'])
|
||||
elif (host_platform == 'linux'):
|
||||
checkService = base.run_command('service redis-server status')['stderr']
|
||||
if (checkService == ''):
|
||||
print('Redis not found')
|
||||
dependence.append_install('Redis')
|
||||
return dependence
|
||||
redis_cli = 'redis-cli'
|
||||
|
||||
if (redis_cli == None):
|
||||
print('Redis not found in default folder')
|
||||
dependence.append_uninstall('Redis on Windows')
|
||||
dependence.append_install('RedisServer')
|
||||
return dependence
|
||||
|
||||
result = base.run_command('"' + redis_cli + '"' + ' info server')['stdout']
|
||||
if (result == ''):
|
||||
print('Redis client is invalid')
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_uninstall('Redis on Windows')
|
||||
dependence.append_install('RedisServer')
|
||||
else:
|
||||
dependence.append_uninstall('redis-server')
|
||||
dependence.append_install('Redis')
|
||||
return dependence
|
||||
|
||||
info = result.split('tcp_port:')[1]
|
||||
tcp_port = info.split('\r', 1)[0]
|
||||
config_port = install_params['Redis'].split('PORT=', 1)[1]
|
||||
config_port = config_port.split(' ', 1)[0]
|
||||
if (tcp_port != config_port):
|
||||
print('Invalid Redis port, need reinstall')
|
||||
if (host_platform == 'windows'):
|
||||
dependence.append_uninstall('Redis on Windows')
|
||||
dependence.append_install('RedisServer')
|
||||
else:
|
||||
dependence.append_uninstall('redis-server')
|
||||
dependence.append_install('Redis')
|
||||
return dependence
|
||||
|
||||
print('Installed Redis is valid')
|
||||
return dependence
|
||||
|
||||
def check_npm():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed Npm')
|
||||
|
||||
result = base.run_command('npm')['stdout']
|
||||
if (result != ''):
|
||||
print('Npm is installed')
|
||||
return dependence
|
||||
|
||||
print('Npm not found')
|
||||
dependence.append_install('Npm')
|
||||
|
||||
return dependence
|
||||
|
||||
def check_vc_components():
|
||||
base.print_info('Check Visual C++ components')
|
||||
result = True
|
||||
if (len(get_programUninstalls('Microsoft Visual C++ 2015-')) == 0):
|
||||
print('Microsoft Visual C++ 2015-20** Redistributable (x64) not found')
|
||||
result = installProgram('VC2019x64') and result
|
||||
|
||||
|
||||
print('Installed Visual C++ components is valid')
|
||||
return result
|
||||
|
||||
def check_gruntcli():
|
||||
dependence = CDependencies()
|
||||
|
||||
base.print_info('Check installed Grunt-Cli')
|
||||
result = base.run_command('npm list -g --depth=0')['stdout']
|
||||
|
||||
if (result.find('grunt-cli') == -1):
|
||||
print('Grunt-Cli not found')
|
||||
dependence.append_install('GruntCli')
|
||||
return dependence
|
||||
|
||||
print('Installed Grunt-Cli is valid')
|
||||
return dependence
|
||||
|
||||
def check_buildTools():
|
||||
dependence = CDependencies()
|
||||
|
||||
base.print_info('Check installed Build Tools')
|
||||
result = base.run_command('vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property DisplayName')['stdout']
|
||||
if (result == ''):
|
||||
print('Build Tools not found')
|
||||
dependence.append_install('BuildTools')
|
||||
else:
|
||||
print('Installed Build Tools is valid')
|
||||
|
||||
return dependence
|
||||
|
||||
def check_curl():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed Curl')
|
||||
|
||||
if (base.run_command('curl -V')['stdout'] == ''):
|
||||
dependence.append_install('Curl')
|
||||
|
||||
return dependence
|
||||
|
||||
def check_7z():
|
||||
dependence = CDependencies()
|
||||
base.print_info('Check installed 7z')
|
||||
|
||||
if (base.run_command('7z')['stdout'] == ''):
|
||||
dependence.append_install('7z')
|
||||
|
||||
return dependence
|
||||
|
||||
def check_gh():
|
||||
base.print_info('Check installed GitHub CLI')
|
||||
|
||||
result = base.run_command('gh --version')['stdout']
|
||||
|
||||
if (result == ''):
|
||||
base.print_info('GitHub CLI not found')
|
||||
# ToDo install
|
||||
return False
|
||||
|
||||
base.print_info('GitHub CLI is installed')
|
||||
return True
|
||||
|
||||
def check_gh_auth():
|
||||
base.print_info('Check auth for GitHub CLI')
|
||||
|
||||
result = base.run_command('gh auth status')['stderr']
|
||||
|
||||
if (result.find('not logged') != -1):
|
||||
base.print_info('GitHub CLI not logged in to github')
|
||||
return False
|
||||
|
||||
base.print_info('GitHub CLI logged in to github')
|
||||
return True
|
||||
|
||||
def get_mysql_path_to_bin(mysqlPath = ''):
|
||||
if (host_platform == 'windows'):
|
||||
if (mysqlPath == ''):
|
||||
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
|
||||
mysqlPath += 'bin'
|
||||
return mysqlPath
|
||||
def get_mysqlLoginSrting():
|
||||
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
|
||||
def get_mysqlServersInfo():
|
||||
arrInfo = []
|
||||
|
||||
try:
|
||||
aReg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
aKey = winreg.OpenKey(aReg, "SOFTWARE\\", 0, winreg.KEY_READ | winreg.KEY_WOW64_32KEY)
|
||||
|
||||
asubkey = winreg.OpenKey(aKey, 'MySQL AB')
|
||||
count_subkey = winreg.QueryInfoKey(asubkey)[0]
|
||||
|
||||
for i in range(count_subkey):
|
||||
MySQLsubkey_name = winreg.EnumKey(asubkey, i)
|
||||
if (MySQLsubkey_name.find('MySQL Server') != - 1):
|
||||
MySQLsubkey = winreg.OpenKey(asubkey, MySQLsubkey_name)
|
||||
dictInfo = {}
|
||||
dictInfo['Location'] = winreg.QueryValueEx(MySQLsubkey, 'Location')[0]
|
||||
dictInfo['Version'] = winreg.QueryValueEx(MySQLsubkey, 'Version')[0]
|
||||
dictInfo['DataLocation'] = winreg.QueryValueEx(MySQLsubkey, 'DataLocation')[0]
|
||||
arrInfo.append(dictInfo)
|
||||
except:
|
||||
pass
|
||||
|
||||
return arrInfo
|
||||
def check_mysqlServer():
|
||||
base.print_info('Check MySQL Server')
|
||||
dependence = CDependencies()
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
|
||||
|
||||
if (host_platform != 'windows'):
|
||||
result = os.system(mysqlLoginSrt + ' -e "exit"')
|
||||
if (result == 0):
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
print('MySQL configuration is valid')
|
||||
dependence.sqlPath = 'mysql'
|
||||
return dependence
|
||||
print('Valid MySQL Server not found')
|
||||
dependence.append_install('MySQLServer')
|
||||
dependence.append_uninstall('mysql-server')
|
||||
return dependence
|
||||
|
||||
arrInfo = get_mysqlServersInfo()
|
||||
for info in arrInfo:
|
||||
if (base.is_dir(info['Location']) == False):
|
||||
continue
|
||||
|
||||
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
|
||||
|
||||
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
|
||||
print(mysql_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
print(mysql_full_name + 'configuration is not valid')
|
||||
|
||||
print('Valid MySQL Server not found')
|
||||
dependence.append_uninstall('MySQL Server')
|
||||
dependence.append_uninstall('MySQL Installer')
|
||||
dependence.append_install('MySQLInstaller')
|
||||
dependence.append_install('MySQLServer')
|
||||
|
||||
MySQLData = os.environ['ProgramData'] + '\\MySQL\\'
|
||||
if base.is_exist(MySQLData) == False:
|
||||
return dependence
|
||||
|
||||
dir = os.listdir(MySQLData)
|
||||
for path in dir:
|
||||
if (path.find('MySQL Server') != -1) and (base.is_file(MySQLData + path) == False):
|
||||
dependence.append_removepath(MySQLData + path)
|
||||
|
||||
return dependence
|
||||
def check_MySQLConfig(mysqlPath = ''):
|
||||
result = True
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
|
||||
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
|
||||
print('Database onlyoffice not found')
|
||||
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
|
||||
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
|
||||
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
|
||||
print('Password encryption is not valid')
|
||||
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
|
||||
|
||||
return result
|
||||
def execMySQLScript(mysql_path_to_bin, scriptPath):
|
||||
print('Execution ' + scriptPath)
|
||||
mysqlLoginSrt = get_mysqlLoginSrting()
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
|
||||
if (code != 0):
|
||||
print('Execution failed!')
|
||||
return False
|
||||
print('Execution completed')
|
||||
return True
|
||||
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
|
||||
print('Setting MySQL password encrypting...')
|
||||
|
||||
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
|
||||
if (code != 0):
|
||||
print('Setting password encryption failed!')
|
||||
return False
|
||||
|
||||
print('Setting password encryption completed')
|
||||
return True
|
||||
def uninstall_mysqlserver():
|
||||
code = os.system('yes | sudo systemctl stop mysqld')
|
||||
code = os.system('sudo apt-get remove --purge mysql* -y') and code
|
||||
code = os.system('sudo rm -Rf /var/lib/mysql/') and code
|
||||
code = os.system('sudo rm -Rf /etc/mysql/') and code
|
||||
code = os.system('sudo rm -rf /var/log/mysql') and code
|
||||
code = os.system('sudo deluser --remove-home mysql') and code
|
||||
code = os.system('sudo delgroup mysql') and code
|
||||
|
||||
return code
|
||||
|
||||
def get_postrgre_path_to_bin(postgrePath = ''):
|
||||
if (host_platform == 'windows'):
|
||||
if (postgrePath == ''):
|
||||
postgrePath = os.environ['PROGRAMW6432'] + '\\PostgreSQL\\13'
|
||||
postgrePath += '\\bin'
|
||||
return postgrePath
|
||||
def get_postgreLoginSrting(userName):
|
||||
if (host_platform == 'windows'):
|
||||
return 'psql -U' + userName + ' '
|
||||
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
|
||||
def get_postgreSQLInfoByFlag(flag):
|
||||
arrInfo = []
|
||||
|
||||
try:
|
||||
aReg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
aKey = winreg.OpenKey(aReg, "SOFTWARE\\PostgreSQL\\Installations", 0, winreg.KEY_READ | flag)
|
||||
|
||||
count_subkey = winreg.QueryInfoKey(aKey)[0]
|
||||
|
||||
for i in range(count_subkey):
|
||||
PostgreSQLsubkey_name = winreg.EnumKey(aKey, i)
|
||||
PostgreSQLsubkey = winreg.OpenKey(aKey, PostgreSQLsubkey_name)
|
||||
dictInfo = {}
|
||||
dictInfo['Location'] = winreg.QueryValueEx(PostgreSQLsubkey, 'Base Directory')[0]
|
||||
dictInfo['Version'] = winreg.QueryValueEx(PostgreSQLsubkey, 'CLT_Version')[0]
|
||||
dictInfo['DataLocation'] = winreg.QueryValueEx(PostgreSQLsubkey, 'Data Directory')[0]
|
||||
arrInfo.append(dictInfo)
|
||||
except:
|
||||
pass
|
||||
|
||||
return arrInfo
|
||||
def get_postgreSQLInfo():
|
||||
return get_postgreSQLInfoByFlag(winreg.KEY_WOW64_32KEY) + get_postgreSQLInfoByFlag(winreg.KEY_WOW64_64KEY)
|
||||
def check_postgreSQL():
|
||||
base.print_info('Check PostgreSQL')
|
||||
|
||||
dependence = CDependencies()
|
||||
|
||||
postgreLoginSrt = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
connectionString = postgreLoginSrt + ' -c "SELECT setting FROM pg_settings WHERE name = ' + "'port'" + ';"'
|
||||
|
||||
if (host_platform == 'linux'):
|
||||
result = os.system(postgreLoginSrt + ' -c "\q"')
|
||||
connectionResult = base.run_command(connectionString)['stdout']
|
||||
|
||||
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
|
||||
print('Valid PostgreSQL not found!')
|
||||
dependence.append_install('PostgreSQL')
|
||||
dependence.append_uninstall('PostgreSQL')
|
||||
else:
|
||||
print('PostreSQL is installed')
|
||||
dependence.sqlPath = 'psql'
|
||||
return dependence
|
||||
|
||||
arrInfo = get_postgreSQLInfo()
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
for info in arrInfo:
|
||||
if (base.is_dir(info['Location']) == False):
|
||||
continue
|
||||
|
||||
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
|
||||
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
|
||||
|
||||
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
|
||||
print(postgre_full_name + 'configuration is valid')
|
||||
dependence.sqlPath = info['Location']
|
||||
return dependence
|
||||
print(postgre_full_name + 'configuration is not valid')
|
||||
|
||||
print('Valid PostgreSQL not found')
|
||||
|
||||
dependence.append_uninstall('PostgreSQL')
|
||||
dependence.append_install('PostgreSQL')
|
||||
|
||||
for info in arrInfo:
|
||||
dependence.append_removepath(info['DataLocation'])
|
||||
|
||||
return dependence
|
||||
def check_postgreConfig(postgrePath = ''):
|
||||
result = True
|
||||
if (host_platform == 'windows'):
|
||||
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
|
||||
|
||||
rootUser = install_params['PostgreSQL']['root']
|
||||
dbUser = install_params['PostgreSQL']['dbUser']
|
||||
dbName = install_params['PostgreSQL']['dbName']
|
||||
dbPass = install_params['PostgreSQL']['dbPass']
|
||||
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
|
||||
postgreLoginRoot = get_postgreLoginSrting(rootUser)
|
||||
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
|
||||
creatdb_path = base.get_script_dir() + "/../../server/schema/postgresql/createdb.sql"
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "\du ' + dbUser + '"')['stdout'].find(dbUser) != -1):
|
||||
print('User ' + dbUser + ' is exist')
|
||||
if (os.system(postgreLoginDbUser + '-c "\q"') != 0):
|
||||
print('Invalid user password!')
|
||||
base.print_info('Changing password...')
|
||||
result = change_userPass(dbUser, dbPass, postgre_path_to_bin) and result
|
||||
else:
|
||||
print('User ' + dbUser + ' not exist!')
|
||||
base.print_info('Creating ' + dbName + ' user...')
|
||||
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
|
||||
print('Database ' + dbName + ' not found')
|
||||
base.print_info('Creating ' + dbName + ' database...')
|
||||
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
|
||||
else:
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "SELECT pg_size_pretty(pg_database_size(' + "'" + dbName + "'" + '));"')['stdout'].find('7559 kB') != -1):
|
||||
print('Database ' + dbName + ' not configured')
|
||||
base.print_info('Configuring ' + dbName + ' database...')
|
||||
result = configureDb(dbName, creatdb_path, postgre_path_to_bin) and result
|
||||
print('Database ' + dbName + ' is valid')
|
||||
|
||||
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "\l+ ' + dbName + '"')['stdout'].find(dbUser +'=CTc/' + rootUser) == -1):
|
||||
print('User ' + dbUser + ' has no database privileges!')
|
||||
base.print_info('Setting database privileges for user ' + dbUser + '...')
|
||||
result = set_dbPrivilegesForUser(dbUser, dbName, postgre_path_to_bin) and result
|
||||
print('User ' + dbUser + ' has database privileges')
|
||||
|
||||
return result
|
||||
def create_postgreDb(dbName, postgre_path_to_bin = ''):
|
||||
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "CREATE DATABASE ' + dbName +';"') != 0):
|
||||
return False
|
||||
return True
|
||||
def set_dbPrivilegesForUser(userName, dbName, postgre_path_to_bin = ''):
|
||||
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + userName + ';"') != 0):
|
||||
return False
|
||||
return True
|
||||
def create_postgreUser(userName, userPass, postgre_path_to_bin = ''):
|
||||
postgreLoginRoot = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "CREATE USER ' + userName + ' WITH password ' + "'" + userPass + "'" + ';"') != 0):
|
||||
return False
|
||||
return True
|
||||
def change_userPass(userName, userPass, postgre_path_to_bin = ''):
|
||||
postgreLoginRoot = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
|
||||
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "ALTER USER ' + userName + " WITH PASSWORD '" + userPass + "';" + '"') != 0):
|
||||
return False
|
||||
return True
|
||||
def configureDb(userName, dbName, scriptPath, postgre_path_to_bin = ''):
|
||||
print('Execution ' + scriptPath)
|
||||
postgreLoginSrt = get_postgreLoginSrting(userName)
|
||||
|
||||
code = base.exec_command_in_dir(postgre_path_to_bin, postgreLoginSrt + ' -d ' + dbName + ' -f "' + scriptPath + '"')
|
||||
if (code != 0):
|
||||
print('Execution failed!')
|
||||
return False
|
||||
print('Execution completed')
|
||||
return True
|
||||
def uninstall_postgresql():
|
||||
code = os.system('sudo DEBIAN_FRONTEND=noninteractive apt-get purge --auto-remove postgresql* -y')
|
||||
code = os.system('sudo rm -rf /var/lib/postgresql/') and code
|
||||
code = os.system('sudo rm -rf /var/log/postgresql/') and code
|
||||
code = os.system('sudo rm -rf /etc/postgresql/') and code
|
||||
code = os.system('sudo userdel -r postgres') and code
|
||||
code = os.system('sudo groupdel postgres') and code
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp 5432')['stdout'])
|
||||
|
||||
return code
|
||||
|
||||
def get_programUninstallsByFlag(sName, flag):
|
||||
info = []
|
||||
aReg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
aKey = winreg.OpenKey(aReg, "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall", 0, winreg.KEY_READ | flag)
|
||||
count_subkey = winreg.QueryInfoKey(aKey)[0]
|
||||
|
||||
for i in range(count_subkey):
|
||||
try:
|
||||
asubkey_name = winreg.EnumKey(aKey, i)
|
||||
asubkey = winreg.OpenKey(aKey, asubkey_name)
|
||||
progName = winreg.QueryValueEx(asubkey, 'DisplayName')[0]
|
||||
|
||||
if (progName.find(sName) != -1):
|
||||
info.append(winreg.QueryValueEx(asubkey, 'UninstallString')[0])
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
return info
|
||||
def get_programUninstalls(sName):
|
||||
return get_programUninstallsByFlag(sName, winreg.KEY_WOW64_32KEY) + get_programUninstallsByFlag(sName, winreg.KEY_WOW64_64KEY)
|
||||
|
||||
def uninstallProgram(sName):
|
||||
base.print_info("Uninstalling all versions " + sName + "...")
|
||||
info = ''
|
||||
code = 0
|
||||
if (host_platform == 'windows'):
|
||||
unInfo = get_programUninstalls(sName)
|
||||
for info in unInfo:
|
||||
info = info.replace('"', '')
|
||||
if (base.is_file(info) == False):
|
||||
info = info.replace('/I', '/x').replace('/i', '/x') + ' /qn'
|
||||
else:
|
||||
if (sName in uninstall_params):
|
||||
info = '"' + info + '" ' + uninstall_params[sName]
|
||||
else:
|
||||
info = '"' + info + '" /S'
|
||||
elif (host_platform == 'linux'):
|
||||
if (sName in uninstall_special):
|
||||
code = uninstall_special[sName]()
|
||||
else:
|
||||
info = 'sudo apt-get remove --purge ' + sName + '* -y && ' + 'sudo apt-get autoremove -y && ' + 'sudo apt-get autoclean'
|
||||
|
||||
if (info != ''):
|
||||
print("Uninstalling " + sName + "...")
|
||||
print(info)
|
||||
|
||||
popen = subprocess.Popen(info, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
popen.communicate()
|
||||
code = popen.wait()
|
||||
|
||||
if (code != 0):
|
||||
print("Uninstalling was failed!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def installProgram(sName):
|
||||
base.print_info("Installing " + sName + "...")
|
||||
if (host_platform == 'windows'):
|
||||
if (sName in install_special):
|
||||
code = install_special[sName]()
|
||||
else:
|
||||
if (sName not in downloads_list['Windows']):
|
||||
print("Url for install not found!")
|
||||
return False
|
||||
|
||||
download_url = downloads_list['Windows'][sName]
|
||||
file_name = "install."
|
||||
is_msi = download_url.endswith('msi')
|
||||
if is_msi:
|
||||
file_name += "msi"
|
||||
else:
|
||||
file_name += "exe"
|
||||
base.download(download_url, file_name)
|
||||
|
||||
base.print_info("Install " + sName + "...")
|
||||
install_command = ("msiexec.exe /i " + file_name) if is_msi else file_name
|
||||
|
||||
if (sName in install_params):
|
||||
install_command += " " + install_params.get(sName, '')
|
||||
if (is_msi == True):
|
||||
install_command += " /qn "
|
||||
elif sName not in install_params:
|
||||
install_command += " /S"
|
||||
|
||||
print(install_command)
|
||||
code = os.system(install_command)
|
||||
base.delete_file(file_name)
|
||||
elif (host_platform == 'linux'):
|
||||
if (sName in install_special):
|
||||
code = install_special[sName]()
|
||||
else:
|
||||
if (sName not in downloads_list['Linux']):
|
||||
print("Program for install not found!")
|
||||
return False
|
||||
|
||||
base.print_info("Install " + sName + "...")
|
||||
install_command = 'yes | sudo apt install ' + downloads_list['Linux'][sName]
|
||||
print(install_command)
|
||||
code = os.system(install_command)
|
||||
|
||||
if (code != 0):
|
||||
print("Installing was failed!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def install_gruntcli():
|
||||
check_npmPath()
|
||||
|
||||
install_command = 'npm install -g grunt-cli'
|
||||
if (host_platform != 'windows'):
|
||||
install_command = 'sudo ' + install_command
|
||||
|
||||
return os.system(install_command)
|
||||
|
||||
def install_mysqlserver():
|
||||
if (host_platform == 'windows'):
|
||||
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
|
||||
elif (host_platform == 'linux'):
|
||||
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
|
||||
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
|
||||
code = os.system('sudo apt-get -y install zsh htop') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
|
||||
return os.system('yes | sudo apt install mysql-server') and code
|
||||
return 1
|
||||
|
||||
def get_updates():
|
||||
return os.system('yes | sudo apt-get update')
|
||||
|
||||
def install_redis():
|
||||
base.print_info("Installing Redis...")
|
||||
pid = base.run_command('netstat -ano | findstr ' + install_params['Redis'].split(' ')[0].split('=')[1])['stdout'].split(' ')[-1]
|
||||
if (pid != ''):
|
||||
os.system('taskkill /F /PID ' + pid)
|
||||
os.system('sc delete Redis')
|
||||
|
||||
return installProgram('Redis')
|
||||
|
||||
def install_postgresql():
|
||||
if (host_platform == 'windows'):
|
||||
download_url = downloads_list['PostgreSQL']
|
||||
file_name = "install.exe"
|
||||
base.download(download_url, file_name)
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
|
||||
else:
|
||||
base.print_info("Install PostgreSQL...")
|
||||
install_command = 'sudo apt install postgresql -y'
|
||||
|
||||
print(install_command)
|
||||
code = os.system(install_command)
|
||||
|
||||
if (host_platform == 'windows'):
|
||||
base.delete_file(file_name)
|
||||
else:
|
||||
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
|
||||
|
||||
return code
|
||||
|
||||
def install_nodejs():
|
||||
os.system('curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -')
|
||||
base.print_info("Install node.js...")
|
||||
install_command = 'yes | sudo apt install nodejs'
|
||||
print(install_command)
|
||||
return os.system(install_command)
|
||||
|
||||
downloads_list = {
|
||||
'Windows': {
|
||||
'Git': 'https://github.com/git-for-windows/git/releases/download/v2.29.0.windows.1/Git-2.29.0-64-bit.exe',
|
||||
'Node.js': 'https://nodejs.org/download/release/v14.17.6/node-v14.17.6-x64.msi',
|
||||
'Java': 'https://javadl.oracle.com/webapps/download/AutoDL?BundleId=242990_a4634525489241b9a9e1aa73d9e118e6',
|
||||
'RabbitMQ': 'https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.9/rabbitmq-server-3.8.9.exe',
|
||||
'Erlang': 'http://erlang.org/download/otp_win64_23.1.exe',
|
||||
'VC2019x64': 'https://aka.ms/vs/17/release/vc_redist.x64.exe',
|
||||
'MySQLInstaller': 'https://dev.mysql.com/get/Downloads/MySQLInstaller/mysql-installer-web-community-8.0.21.0.msi',
|
||||
'BuildTools': 'https://download.visualstudio.microsoft.com/download/pr/11503713/e64d79b40219aea618ce2fe10ebd5f0d/vs_BuildTools.exe',
|
||||
'Redis': 'https://github.com/tporadowski/redis/releases/download/v5.0.9/Redis-x64-5.0.9.msi',
|
||||
'PostgreSQL': 'https://sbp.enterprisedb.com/getfile.jsp?fileid=12851'
|
||||
},
|
||||
'Linux': {
|
||||
'Git': 'git',
|
||||
'Npm': 'npm',
|
||||
'Java': 'openjdk-11-jdk',
|
||||
'RabbitMQ': 'rabbitmq-server',
|
||||
'Redis': 'redis-server',
|
||||
'Erlang': 'erlang',
|
||||
'Curl': 'curl',
|
||||
'7z': 'p7zip-full',
|
||||
'PostgreSQL': 'postgresql'
|
||||
}
|
||||
}
|
||||
install_special = {
|
||||
'NodeJs': install_nodejs,
|
||||
'GruntCli': install_gruntcli,
|
||||
'MySQLServer': install_mysqlserver,
|
||||
'RedisServer' : install_redis,
|
||||
'PostgreSQL': install_postgresql
|
||||
}
|
||||
uninstall_special = {
|
||||
'MySQLServer': uninstall_mysqlserver,
|
||||
'PostgreSQL' : uninstall_postgresql
|
||||
}
|
||||
install_params = {
|
||||
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
|
||||
'Git': '/VERYSILENT /NORESTART',
|
||||
'Java': '/s',
|
||||
'MySQLServer': {
|
||||
'port': '3306',
|
||||
'user': 'root',
|
||||
'pass': 'onlyoffice',
|
||||
'version': '8.0.21'
|
||||
},
|
||||
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
|
||||
'PostgreSQL': {
|
||||
'root': 'postgres',
|
||||
'dbPort': '5432',
|
||||
'dbName': 'onlyoffice',
|
||||
'dbUser': 'onlyoffice',
|
||||
'dbPass': 'onlyoffice'
|
||||
}
|
||||
}
|
||||
uninstall_params = {
|
||||
'PostgreSQL': '--mode unattended --unattendedmodeui none'
|
||||
}
|
||||
|
||||
37
scripts/develop/develop.py
Normal file
37
scripts/develop/develop.py
Normal file
@ -0,0 +1,37 @@
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append('scripts/develop')
|
||||
import base
|
||||
import build_js
|
||||
import build_server
|
||||
import config
|
||||
import dependence
|
||||
import config_server as develop_config_server
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
def build_docker_server():
|
||||
dependence.check__docker_dependencies()
|
||||
build_develop_server()
|
||||
|
||||
def build_docker_sdk_web_apps(dir):
|
||||
dependence.check__docker_dependencies()
|
||||
build_js.build_js_develop(dir)
|
||||
|
||||
def build_develop_server():
|
||||
build_server.build_server_develop()
|
||||
build_js.build_js_develop(base_dir + "/../../..")
|
||||
develop_config_server.make()
|
||||
if ("" != config.option("branding")):
|
||||
branding_develop_script_dir = base_dir + "/../../../" + config.option("branding") + "/build_tools/scripts"
|
||||
if base.is_file(branding_develop_script_dir + "/develop.py"):
|
||||
base.cmd_in_dir(branding_develop_script_dir, "python", ["develop.py"], True)
|
||||
|
||||
def make():
|
||||
if ("1" != config.option("develop")):
|
||||
return
|
||||
if not dependence.check_dependencies():
|
||||
exit(1)
|
||||
build_develop_server()
|
||||
exit(0)
|
||||
|
||||
26
scripts/develop/install.py
Normal file
26
scripts/develop/install.py
Normal file
@ -0,0 +1,26 @@
|
||||
import sys
|
||||
sys.path.append('scripts')
|
||||
sys.path.append('scripts/develop')
|
||||
sys.path.append('scripts/develop/vendor')
|
||||
import base
|
||||
import shutil
|
||||
import optparse
|
||||
import dependence
|
||||
|
||||
arguments = sys.argv[1:]
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option("--install", action="append", type="string", dest="install", default=[], help="provides install dependencies")
|
||||
parser.add_option("--uninstall", action="append", type="string", dest="uninstall", default=[], help="provides uninstall dependencies")
|
||||
parser.add_option("--remove-path", action="append", type="string", dest="remove-path", default=[], help="provides path dependencies to remove")
|
||||
|
||||
(options, args) = parser.parse_args(arguments)
|
||||
configOptions = vars(options)
|
||||
|
||||
for item in configOptions["uninstall"]:
|
||||
dependence.uninstallProgram(item)
|
||||
for item in configOptions["remove-path"]:
|
||||
if (base.is_dir(item) == True):
|
||||
shutil.rmtree(item)
|
||||
for item in configOptions["install"]:
|
||||
dependence.installProgram(item)
|
||||
53
scripts/develop/print_repositories.py
Executable file
53
scripts/develop/print_repositories.py
Executable file
@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../')
|
||||
|
||||
import argparse
|
||||
import config
|
||||
import base
|
||||
import os
|
||||
|
||||
parser = argparse.ArgumentParser(description="Print repositories list.")
|
||||
parser.add_argument('-P', '--platform', type=str, dest='platform',
|
||||
action='store', default="native", help="Defines platform")
|
||||
parser.add_argument('-M', '--module', type=str, dest='module',
|
||||
action='store', default="core desktop builder server",
|
||||
help="Defines modules")
|
||||
parser.add_argument('-B', '--branding', type=str, dest='branding',
|
||||
action='store', help="Defines branding path")
|
||||
args = parser.parse_args()
|
||||
|
||||
config_args = [
|
||||
'configure.py',
|
||||
'--platform', args.platform,
|
||||
'--module', args.module
|
||||
]
|
||||
if args.branding != None:
|
||||
config_args += ['--branding', args.branding]
|
||||
|
||||
base.cmd_in_dir('../../', 'python', config_args)
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
config.parse_defaults()
|
||||
|
||||
repositories = base.get_repositories()
|
||||
|
||||
# Add other repositories
|
||||
if config.check_option("module", "builder"):
|
||||
repositories['document-builder-package'] = [False, False]
|
||||
|
||||
if (config.check_option("module", "server")):
|
||||
repositories['document-server-package'] = [False, False]
|
||||
repositories['Docker-DocumentServer'] = [False, False]
|
||||
|
||||
for repo in repositories:
|
||||
line = repo
|
||||
repo_dir = repositories[repo][1]
|
||||
if repo_dir != False:
|
||||
repo_dir = os.path.relpath(repo_dir, base.get_script_dir() + "../../..")
|
||||
line += " " + repo_dir
|
||||
print(line)
|
||||
|
||||
sys.exit(0)
|
||||
60
scripts/develop/release.py
Normal file
60
scripts/develop/release.py
Normal file
@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../')
|
||||
|
||||
import base
|
||||
import dependence
|
||||
import config
|
||||
|
||||
def protect_brunch(branch, repo, strict = False):
|
||||
team = '' if strict else 'dep-application-development-leads'
|
||||
command = 'echo {"required_status_checks": null,"enforce_admins":true,"required_pull_request_reviews": null,"restrictions": {"users":[],"teams":["'
|
||||
command += team + '"]}} | gh api -X PUT repos/ONLYOFFICE/' + repo + '/branches/' + branch + '/protection --input -'
|
||||
result = base.run_command(command)
|
||||
if ('' != result['stderr']):
|
||||
print(result['stderr'])
|
||||
return
|
||||
|
||||
branch_from = 'release/v6.2.0'
|
||||
branches_to = ['develop']
|
||||
|
||||
platform = base.host_platform()
|
||||
if ("windows" == platform):
|
||||
dependence.check_pythonPath()
|
||||
dependence.check_gitPath()
|
||||
|
||||
if (dependence.check_gh() != True or dependence.check_gh_auth() != True):
|
||||
sys.exit(0)
|
||||
|
||||
base.cmd_in_dir('../../', 'python', ['configure.py', '--branding', 'onlyoffice', '--branding-url', 'https://github.com/ONLYOFFICE/onlyoffice.git', '--branch', branch_from, '--module', 'core desktop builder server mobile', '--update', '1', '--update-light', '1', '--clean', '0'])
|
||||
|
||||
# parse configuration
|
||||
config.parse()
|
||||
|
||||
base.git_update('onlyoffice')
|
||||
|
||||
# correct defaults (the branding repo is already updated)
|
||||
config.parse_defaults()
|
||||
|
||||
repositories = base.get_repositories()
|
||||
|
||||
# Add other repositories
|
||||
repositories['core-ext'] = [True, False]
|
||||
|
||||
base.update_repositories(repositories)
|
||||
|
||||
repositories['onlyoffice'] = [True, False]
|
||||
|
||||
for repo in repositories:
|
||||
current_dir = repositories[repo][1]
|
||||
if current_dir != False:
|
||||
cur_dir = os.getcwd()
|
||||
os.chdir(current_dir)
|
||||
|
||||
base.create_pull_request(branches_to, repo, True, current_dir)
|
||||
|
||||
if current_dir != False:
|
||||
os.chdir(cur_dir)
|
||||
|
||||
sys.exit(0)
|
||||
145
scripts/develop/run_server.py
Normal file
145
scripts/develop/run_server.py
Normal file
@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../')
|
||||
import os
|
||||
import base
|
||||
import dependence
|
||||
import traceback
|
||||
import develop
|
||||
|
||||
base_dir = base.get_script_dir(__file__)
|
||||
|
||||
def install_module(path):
|
||||
base.print_info('Install: ' + path)
|
||||
base.cmd_in_dir(path, 'npm', ['ci'])
|
||||
|
||||
def run_module(directory, args=[]):
|
||||
base.run_nodejs_in_dir(directory, args)
|
||||
|
||||
def find_rabbitmqctl(base_path):
|
||||
return base.find_file(os.path.join(base_path, 'RabbitMQ Server'), 'rabbitmqctl.bat')
|
||||
|
||||
def restart_win_rabbit():
|
||||
base.print_info('restart RabbitMQ node to prevent "Erl.exe high CPU usage every Monday morning on Windows" https://groups.google.com/forum/#!topic/rabbitmq-users/myl74gsYyYg')
|
||||
rabbitmqctl = find_rabbitmqctl(os.environ['PROGRAMW6432']) or find_rabbitmqctl(os.environ['ProgramFiles(x86)'])
|
||||
if rabbitmqctl is not None:
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
|
||||
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
|
||||
else:
|
||||
base.print_info('Missing rabbitmqctl.bat')
|
||||
|
||||
def start_mac_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
base.run_process(['mysql.server', 'restart'])
|
||||
base.print_info('Start RabbitMQ Server')
|
||||
base.run_process(['rabbitmq-server'])
|
||||
# base.print_info('Start Redis')
|
||||
# base.run_process(['redis-server'])
|
||||
|
||||
def start_linux_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
os.system('sudo service mysql restart')
|
||||
base.print_info('Restart RabbitMQ Server')
|
||||
os.system('sudo service rabbitmq-server restart')
|
||||
|
||||
def run_integration_example():
|
||||
if base.is_exist(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs'):
|
||||
base.cmd_in_dir(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
|
||||
|
||||
def start_linux_services():
|
||||
base.print_info('Restart MySQL Server')
|
||||
|
||||
def make_start():
|
||||
base.configure_common_apps()
|
||||
|
||||
platform = base.host_platform()
|
||||
if ("windows" == platform):
|
||||
dependence.check_pythonPath()
|
||||
dependence.check_gitPath()
|
||||
restart_win_rabbit()
|
||||
elif ("mac" == platform):
|
||||
start_mac_services()
|
||||
elif ("linux" == platform):
|
||||
start_linux_services()
|
||||
|
||||
def make_configure(args):
|
||||
platform = base.host_platform()
|
||||
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
|
||||
|
||||
base.print_info('Build modules')
|
||||
if ("linux" == platform):
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
|
||||
else:
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
|
||||
|
||||
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
|
||||
def make_install():
|
||||
platform = base.host_platform()
|
||||
run_integration_example()
|
||||
|
||||
base.create_dir(base_dir + '/../../../server/App_Data')
|
||||
|
||||
install_module(base_dir + '/../../../server/DocService')
|
||||
install_module(base_dir + '/../../../server/Common')
|
||||
install_module(base_dir + '/../../../server/FileConverter')
|
||||
|
||||
def make_run():
|
||||
platform = base.host_platform()
|
||||
base.set_env('NODE_ENV', 'development-' + platform)
|
||||
base.set_env('NODE_CONFIG_DIR', '../Common/config')
|
||||
|
||||
if ("mac" == platform):
|
||||
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
elif ("linux" == platform):
|
||||
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
|
||||
|
||||
run_module(base_dir + '/../../../server/DocService', ['sources/server.js'])
|
||||
#run_module(base_dir + '/../../../server/DocService', ['sources/gc.js'])
|
||||
run_module(base_dir + '/../../../server/FileConverter', ['sources/convertermaster.js'])
|
||||
#run_module(base_dir + '/../../../server/SpellChecker', ['sources/server.js'])
|
||||
|
||||
def run_docker_server(args = []):
|
||||
try:
|
||||
make_start()
|
||||
develop.build_docker_server()
|
||||
make_install()
|
||||
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
|
||||
|
||||
def run_docker_sdk_web_apps(dir):
|
||||
try:
|
||||
develop.build_docker_sdk_web_apps(dir)
|
||||
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
|
||||
|
||||
def make(args = []):
|
||||
try:
|
||||
make_start()
|
||||
make_configure(args)
|
||||
make_install()
|
||||
make_run()
|
||||
|
||||
except SystemExit:
|
||||
input("Ignoring SystemExit. Press Enter to continue...")
|
||||
exit(0)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except:
|
||||
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
|
||||
|
||||
if __name__ == "__main__":
|
||||
make(sys.argv[1:])
|
||||
|
||||
114
scripts/develop/vendor/libwindows.py
vendored
Normal file
114
scripts/develop/vendor/libwindows.py
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
'''
|
||||
This file is based on the code from https://github.com/JustAMan/pyWinClobber/blob/master/win32elevate.py
|
||||
|
||||
Copyright (c) 2013 by JustAMan at GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'''
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
import ctypes
|
||||
from ctypes.wintypes import HANDLE, BOOL, DWORD, HWND, HINSTANCE, HKEY
|
||||
from ctypes import c_ulong, c_char_p, c_int, c_void_p
|
||||
PHANDLE = ctypes.POINTER(HANDLE)
|
||||
PDWORD = ctypes.POINTER(DWORD)
|
||||
|
||||
CloseHandle = ctypes.windll.kernel32.CloseHandle
|
||||
CloseHandle.argtypes = (HANDLE, )
|
||||
CloseHandle.restype = BOOL
|
||||
|
||||
TOKEN_READ = 0x20008
|
||||
|
||||
class ShellExecuteInfo(ctypes.Structure):
|
||||
_fields_ = [('cbSize', DWORD),
|
||||
('fMask', c_ulong),
|
||||
('hwnd', HWND),
|
||||
('lpVerb', c_char_p),
|
||||
('lpFile', c_char_p),
|
||||
('lpParameters', c_char_p),
|
||||
('lpDirectory', c_char_p),
|
||||
('nShow', c_int),
|
||||
('hInstApp', HINSTANCE),
|
||||
('lpIDList', c_void_p),
|
||||
('lpClass', c_char_p),
|
||||
('hKeyClass', HKEY),
|
||||
('dwHotKey', DWORD),
|
||||
('hIcon', HANDLE),
|
||||
('hProcess', HANDLE)]
|
||||
def __init__(self, **kw):
|
||||
ctypes.Structure.__init__(self)
|
||||
self.cbSize = ctypes.sizeof(self)
|
||||
for field_name, field_value in kw.items():
|
||||
setattr(self, field_name, field_value)
|
||||
|
||||
PShellExecuteInfo = ctypes.POINTER(ShellExecuteInfo)
|
||||
|
||||
ShellExecuteEx = ctypes.windll.Shell32.ShellExecuteExA
|
||||
ShellExecuteEx.argtypes = (PShellExecuteInfo, )
|
||||
ShellExecuteEx.restype = BOOL
|
||||
|
||||
WaitForSingleObject = ctypes.windll.kernel32.WaitForSingleObject
|
||||
WaitForSingleObject.argtypes = (HANDLE, DWORD)
|
||||
WaitForSingleObject.restype = DWORD
|
||||
|
||||
# SW_HIDE = 0
|
||||
SW_SHOW = 5
|
||||
SEE_MASK_NOCLOSEPROCESS = 0x00000040
|
||||
SEE_MASK_NO_CONSOLE = 0x00008000
|
||||
INFINITE = -1
|
||||
|
||||
ELEVATE_MARKER = 'win32elevate_marker_parameter'
|
||||
|
||||
|
||||
def wait_and_close_handle(process_handle):
|
||||
'''
|
||||
Waits till spawned process finishes and closes the handle for it
|
||||
'''
|
||||
WaitForSingleObject(process_handle, INFINITE)
|
||||
CloseHandle(process_handle)
|
||||
|
||||
def sudo(executable, params=None):
|
||||
'''
|
||||
This will re-run current Python script requesting to elevate administrative rights.
|
||||
'''
|
||||
if not params:
|
||||
params = []
|
||||
|
||||
execute_info = ShellExecuteInfo(
|
||||
fMask=SEE_MASK_NOCLOSEPROCESS | SEE_MASK_NO_CONSOLE,
|
||||
hwnd=None,
|
||||
lpVerb=b'runas',
|
||||
lpFile=executable.encode('utf-8'),
|
||||
lpParameters=subprocess.list2cmdline(params).encode('utf-8'),
|
||||
lpDirectory=None,
|
||||
nShow=SW_SHOW
|
||||
)
|
||||
|
||||
if not all(stream.isatty() for stream in (sys.stdin, sys.stdout, sys.stderr)):
|
||||
# TODO: Some streams were redirected, we need to manually work them
|
||||
raise NotImplementedError("Redirection is not supported")
|
||||
|
||||
if not ShellExecuteEx(ctypes.byref(execute_info)):
|
||||
raise ctypes.WinError()
|
||||
|
||||
wait_and_close_handle(execute_info.hProcess)
|
||||
|
||||
|
||||
__all__ = ('sudo')
|
||||
38
scripts/package_branding.py
Normal file
38
scripts/package_branding.py
Normal file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import package_utils as utils
|
||||
|
||||
onlyoffice = True
|
||||
company_name = "ONLYOFFICE"
|
||||
company_name_l = company_name.lower()
|
||||
publisher_name = "Ascensio System SIA"
|
||||
cert_name = "Ascensio System SIA"
|
||||
|
||||
s3_bucket = "repo-doc-onlyoffice-com"
|
||||
s3_region = "eu-west-1"
|
||||
|
||||
if utils.is_windows():
|
||||
desktop_product_name = "Desktop Editors"
|
||||
desktop_product_name_s = desktop_product_name.replace(" ","")
|
||||
desktop_package_name = company_name + "_" + desktop_product_name_s
|
||||
desktop_vcredist_list = ["2022"]
|
||||
desktop_update_changes_list = {
|
||||
"en": "changes",
|
||||
"ru": "changes_ru"
|
||||
}
|
||||
|
||||
if utils.is_macos():
|
||||
desktop_package_name = "ONLYOFFICE"
|
||||
desktop_build_dir = "desktop-apps/macos"
|
||||
desktop_branding_dir = "desktop-apps/macos"
|
||||
desktop_updates_dir = "build/update"
|
||||
desktop_changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
|
||||
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
|
||||
|
||||
builder_product_name = "Document Builder"
|
||||
|
||||
if utils.is_linux():
|
||||
desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"]
|
||||
builder_make_targets = ["deb", "rpm"] # tar
|
||||
server_make_targets = ["deb", "rpm", "tar"]
|
||||
172
scripts/package_builder.py
Normal file
172
scripts/package_builder.py
Normal file
@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("BUILDER")
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif utils.is_linux():
|
||||
make_linux()
|
||||
else:
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def aws_s3_upload(files, key, ptype=None):
|
||||
if not files:
|
||||
return False
|
||||
ret = True
|
||||
key = "builder/" + key
|
||||
for file in files:
|
||||
args = ["aws"]
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
args += ["--endpoint-url=" + branding.s3_endpoint_url]
|
||||
args += [
|
||||
"s3", "cp", "--no-progress", "--acl", "public-read",
|
||||
file, "s3://" + branding.s3_bucket + "/" + key
|
||||
]
|
||||
if common.os_family == "windows":
|
||||
upload = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
upload = utils.sh(" ".join(args), verbose=True)
|
||||
ret &= upload
|
||||
if upload and ptype is not None:
|
||||
full_key = key
|
||||
if full_key.endswith("/"): full_key += utils.get_basename(file)
|
||||
utils.add_deploy_data(
|
||||
"builder", ptype, file, full_key,
|
||||
branding.s3_bucket, branding.s3_region
|
||||
)
|
||||
return ret
|
||||
|
||||
def make_windows():
|
||||
global inno_file, zip_file, suffix, key_prefix
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
prefix = common.platforms[common.platform]["prefix"]
|
||||
company = branding.company_name.lower()
|
||||
product = branding.builder_product_name.replace(" ","").lower()
|
||||
source_dir = "..\\build_tools\\out\\%s\\%s\\%s" % (prefix, company, product)
|
||||
package_name = company + "_" + product
|
||||
package_version = common.version + "." + common.build
|
||||
suffix = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x86": "x86",
|
||||
"windows_x64_xp": "x64_xp",
|
||||
"windows_x86_xp": "x86_xp"
|
||||
}[common.platform]
|
||||
zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix)
|
||||
inno_file = "%s_%s_%s.exe" % (package_name, package_version, suffix)
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("builder clean")
|
||||
utils.delete_dir("build")
|
||||
|
||||
utils.log_h2("copy arifacts")
|
||||
utils.create_dir("build\\app")
|
||||
utils.copy_dir_content(source_dir, "build\\app\\")
|
||||
|
||||
make_zip()
|
||||
make_inno()
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_zip():
|
||||
utils.log_h2("builder zip build")
|
||||
utils.log_h3(zip_file)
|
||||
|
||||
ret = utils.cmd("7z", "a", "-y", zip_file, ".\\app\\*",
|
||||
chdir="build", creates="build\\" + zip_file, verbose=True)
|
||||
utils.set_summary("builder zip build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder zip deploy")
|
||||
ret = aws_s3_upload(
|
||||
["build\\" + zip_file], "win/generic/%s/" % common.channel, "Portable"
|
||||
)
|
||||
utils.set_summary("builder zip deploy", ret)
|
||||
return
|
||||
|
||||
def make_inno():
|
||||
utils.log_h2("builder inno build")
|
||||
utils.log_h3(inno_file)
|
||||
|
||||
args = [
|
||||
"-Arch " + suffix,
|
||||
"-Version " + common.version,
|
||||
"-Build " + common.build
|
||||
]
|
||||
if not branding.onlyoffice:
|
||||
args.append("-Branding '..\\..\\%s\\document-builder-package\\exe'" % common.branding)
|
||||
if common.sign:
|
||||
args.append("-Sign")
|
||||
args.append("-CertName '%s'" % branding.cert_name)
|
||||
ret = utils.ps1(
|
||||
".\\make_inno.ps1", args, creates="build\\" + inno_file, verbose=True
|
||||
)
|
||||
utils.set_summary("builder inno build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("builder inno deploy")
|
||||
ret = aws_s3_upload(
|
||||
["build\\" + inno_file], "win/inno/%s/" % common.channel, "Installer"
|
||||
)
|
||||
utils.set_summary("builder inno deploy", ret)
|
||||
return
|
||||
|
||||
def make_linux():
|
||||
utils.set_cwd("document-builder-package")
|
||||
|
||||
utils.log_h2("builder build")
|
||||
make_args = branding.builder_make_targets
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("builder build", ret)
|
||||
|
||||
rpm_arch = "x86_64"
|
||||
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("builder deploy")
|
||||
if ret:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.log_h2("builder tar deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("tar/*.tar.gz"),
|
||||
"linux/generic/%s/" % common.channel,
|
||||
"Portable"
|
||||
)
|
||||
utils.set_summary("builder tar deploy", ret)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.log_h2("builder deb deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"linux/debian/%s/" % common.channel,
|
||||
"Debian"
|
||||
)
|
||||
utils.set_summary("builder deb deploy", ret)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.log_h2("builder rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"linux/rhel/%s/" % common.channel,
|
||||
"CentOS"
|
||||
)
|
||||
utils.set_summary("builder rpm deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.builder_make_targets:
|
||||
utils.set_summary("builder tar deploy", False)
|
||||
if "deb" in branding.builder_make_targets:
|
||||
utils.set_summary("builder deb deploy", False)
|
||||
if "rpm" in branding.builder_make_targets:
|
||||
utils.set_summary("builder rpm deploy", False)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
49
scripts/package_common.py
Normal file
49
scripts/package_common.py
Normal file
@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
platforms = {
|
||||
"windows_x64": { "title": "Windows x64", "prefix": "win_64" },
|
||||
"windows_x64_xp": { "title": "Windows x64 XP", "prefix": "win_64_xp" },
|
||||
"windows_x86": { "title": "Windows x86", "prefix": "win_32" },
|
||||
"windows_x86_xp": { "title": "Windows x86 XP", "prefix": "win_32_xp" },
|
||||
"darwin_x86_64": { "title": "macOS x86_64", "prefix": "mac_64" },
|
||||
"darwin_x86_64_v8": { "title": "macOS x86_64 V8", "prefix": "mac_64" },
|
||||
"darwin_arm64": { "title": "macOS arm64", "prefix": "mac_arm64" },
|
||||
"linux_x86_64": { "title": "Linux x86_64", "prefix": "linux_64" },
|
||||
"linux_aarch64": { "title": "Linux aarch64", "prefix": "linux_arm64" },
|
||||
"android": { "title": "Android" }
|
||||
}
|
||||
|
||||
out_dir = "build_tools/out"
|
||||
tsa_server = "http://timestamp.digicert.com"
|
||||
vcredist_links = {
|
||||
"2022": {
|
||||
"x64": {
|
||||
"url": "https://aka.ms/vs/17/release/vc_redist.x64.exe",
|
||||
"md5": "703bd677778f2a1ba1eb4338bac3b868"
|
||||
},
|
||||
"x86": {
|
||||
"url": "https://aka.ms/vs/17/release/vc_redist.x86.exe",
|
||||
"md5": "732d0ac9cd31b8136f1c8e72efec1636"
|
||||
}
|
||||
},
|
||||
"2015": {
|
||||
"x64": {
|
||||
"url": "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x64.exe",
|
||||
"md5": "27b141aacc2777a82bb3fa9f6e5e5c1c"
|
||||
},
|
||||
"x86": {
|
||||
"url": "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x86.exe",
|
||||
"md5": "1a15e6606bac9647e7ad3caa543377cf"
|
||||
}
|
||||
},
|
||||
"2013": {
|
||||
"x64": {
|
||||
"url": "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x64.exe",
|
||||
"md5": "96b61b8e069832e6b809f24ea74567ba"
|
||||
},
|
||||
"x86": {
|
||||
"url": "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x86.exe",
|
||||
"md5": "0fc525b6b7b96a87523daa7a0013c69d"
|
||||
}
|
||||
}
|
||||
}
|
||||
59
scripts/package_core.py
Normal file
59
scripts/package_core.py
Normal file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("CORE")
|
||||
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
if common.deploy:
|
||||
make_core()
|
||||
return
|
||||
|
||||
def make_core():
|
||||
prefix = common.platforms[common.platform]["prefix"]
|
||||
company = branding.company_name.lower()
|
||||
repos = {
|
||||
"windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build },
|
||||
"windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build },
|
||||
"darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
"darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build },
|
||||
"linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build },
|
||||
}
|
||||
repo = repos[common.platform]
|
||||
branch = utils.get_env("BRANCH_NAME")
|
||||
core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company))
|
||||
dest_version = "%s/core/%s/%s/%s/" % (repo["repo"], branch, repo["version"], repo["arch"])
|
||||
dest_latest = "%s/core/%s/%s/%s/" % (repo["repo"], branch, "latest", repo["arch"])
|
||||
|
||||
if branch is None:
|
||||
utils.log_err("BRANCH_NAME variable is undefined")
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
if not utils.is_file(core_7z):
|
||||
utils.log_err("core.7z does not exist")
|
||||
utils.set_summary("core deploy", False)
|
||||
return
|
||||
|
||||
utils.log_h2("core deploy")
|
||||
args = ["aws", "s3", "cp", "--acl", "public-read", "--no-progress",
|
||||
core_7z, "s3://" + branding.s3_bucket + "/" + dest_version + "core.7z"]
|
||||
if common.os_family == "windows":
|
||||
ret = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret = utils.sh(" ".join(args), verbose=True)
|
||||
if ret:
|
||||
utils.add_deploy_data("core", "Archive", core_7z, dest_version + "core.7z", branding.s3_bucket, branding.s3_region)
|
||||
args = ["aws", "s3", "sync", "--delete",
|
||||
"--acl", "public-read", "--no-progress",
|
||||
"s3://" + branding.s3_bucket + "/" + dest_version,
|
||||
"s3://" + branding.s3_bucket + "/" + dest_latest]
|
||||
if common.os_family == "windows":
|
||||
ret &= utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
ret &= utils.sh(" ".join(args), verbose=True)
|
||||
utils.set_summary("core deploy", ret)
|
||||
return
|
||||
662
scripts/package_desktop.py
Normal file
662
scripts/package_desktop.py
Normal file
@ -0,0 +1,662 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("DESKTOP")
|
||||
if utils.is_windows():
|
||||
make_windows()
|
||||
elif utils.is_macos():
|
||||
make_macos()
|
||||
elif utils.is_linux():
|
||||
make_linux()
|
||||
else:
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def aws_s3_upload(files, key, ptype=None):
|
||||
if not files:
|
||||
return False
|
||||
ret = True
|
||||
key = "desktop/" + key
|
||||
for file in files:
|
||||
args = ["aws"]
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
args += ["--endpoint-url=" + branding.s3_endpoint_url]
|
||||
args += [
|
||||
"s3", "cp", "--no-progress", "--acl", "public-read",
|
||||
file, "s3://" + branding.s3_bucket + "/" + key
|
||||
]
|
||||
if common.os_family == "windows":
|
||||
upload = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
upload = utils.sh(" ".join(args), verbose=True)
|
||||
ret &= upload
|
||||
if upload and ptype is not None:
|
||||
full_key = key
|
||||
if full_key.endswith("/"): full_key += utils.get_basename(file)
|
||||
utils.add_deploy_data(
|
||||
"desktop", ptype, file, full_key,
|
||||
branding.s3_bucket, branding.s3_region
|
||||
)
|
||||
return ret
|
||||
|
||||
#
|
||||
# Windows
|
||||
#
|
||||
|
||||
def make_windows():
|
||||
global package_version, iscc_args, source_dir, source_help_dir, \
|
||||
inno_file, inno_help_file, inno_update_file, advinst_file, zip_file, \
|
||||
arch_list, inno_arch_list
|
||||
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
|
||||
|
||||
prefix = common.platforms[common.platform]["prefix"]
|
||||
company = branding.company_name.lower()
|
||||
product = branding.desktop_product_name.replace(" ","").lower()
|
||||
package_name = branding.desktop_package_name
|
||||
package_version = common.version + "." + common.build
|
||||
source_dir = "..\\..\\..\\..\\build_tools\\out\\%s\\%s\\%s" % (prefix, company, product)
|
||||
arch_list = {
|
||||
"windows_x64": "x64",
|
||||
"windows_x64_xp": "x64",
|
||||
"windows_x86": "x86",
|
||||
"windows_x86_xp": "x86"
|
||||
}
|
||||
inno_arch_list = {
|
||||
"windows_x64": "64",
|
||||
"windows_x86": "32",
|
||||
"windows_x64_xp": "64",
|
||||
"windows_x86_xp": "32"
|
||||
}
|
||||
suffix = arch_list[common.platform]
|
||||
if common.platform.endswith("_xp"): suffix += "_xp"
|
||||
zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix)
|
||||
inno_file = "%s_%s_%s.exe" % (package_name, package_version, suffix)
|
||||
inno_help_file = "%s_Help_%s_%s.exe" % (package_name, package_version, suffix)
|
||||
inno_update_file = "update\\editors_update_%s.exe" % suffix
|
||||
advinst_file = "%s_%s_%s.msi" % (package_name, package_version, suffix)
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("desktop clean")
|
||||
# utils.delete_dir("data\\vcredist")
|
||||
utils.delete_dir("DesktopEditors-cache")
|
||||
utils.delete_files("*.exe")
|
||||
utils.delete_files("*.msi")
|
||||
utils.delete_files("*.aic")
|
||||
utils.delete_files("*.tmp")
|
||||
utils.delete_files("*.zip")
|
||||
utils.delete_files("update\\*.exe")
|
||||
utils.delete_files("update\\*.xml")
|
||||
utils.delete_files("update\\*.html")
|
||||
|
||||
make_zip()
|
||||
|
||||
vcdl = True
|
||||
vcdl &= download_vcredist("2013")
|
||||
vcdl &= download_vcredist("2022")
|
||||
|
||||
if not vcdl:
|
||||
utils.set_summary("desktop inno build", False)
|
||||
# utils.set_summary("desktop inno help build", False)
|
||||
utils.set_summary("desktop inno update build", False)
|
||||
utils.set_summary("desktop advinst build", False)
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
make_inno()
|
||||
# make_inno_help()
|
||||
make_inno_update()
|
||||
|
||||
if common.platform == "windows_x64":
|
||||
make_winsparkle_files()
|
||||
|
||||
if common.platform in ["windows_x64", "windows_x86"]:
|
||||
make_advinst()
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_zip():
|
||||
utils.log_h2("desktop zip build")
|
||||
ret = utils.cmd(
|
||||
"7z", "a", "-y", zip_file, source_dir + "\\*",
|
||||
creates=zip_file, verbose=True
|
||||
)
|
||||
utils.set_summary("desktop zip build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop zip deploy")
|
||||
ret = aws_s3_upload(
|
||||
[zip_file],
|
||||
"win/generic/%s/" % common.channel,
|
||||
"Portable"
|
||||
)
|
||||
utils.set_summary("desktop zip deploy", ret)
|
||||
return
|
||||
|
||||
def download_vcredist(year):
|
||||
utils.log_h2("vcredist " + year + " download")
|
||||
|
||||
arch = arch_list[common.platform]
|
||||
link = common.vcredist_links[year][arch]["url"]
|
||||
md5 = common.vcredist_links[year][arch]["md5"]
|
||||
vcredist_file = "data\\vcredist\\vcredist_%s_%s.exe" % (year, arch)
|
||||
|
||||
utils.log_h2(vcredist_file)
|
||||
utils.create_dir(utils.get_dirname(vcredist_file))
|
||||
ret = utils.download_file(link, vcredist_file, md5, verbose=True)
|
||||
utils.set_summary("vcredist " + year + " download", ret)
|
||||
return ret
|
||||
|
||||
def make_inno():
|
||||
global iscc_args
|
||||
utils.log_h2("desktop inno build")
|
||||
utils.log_h3(inno_file)
|
||||
|
||||
iscc_args = [
|
||||
"/Qp",
|
||||
"/DsAppVersion=" + package_version,
|
||||
"/DDEPLOY_PATH=" + source_dir,
|
||||
"/D_ARCH=" + inno_arch_list[common.platform]
|
||||
]
|
||||
if branding.onlyoffice:
|
||||
iscc_args.append("/D_ONLYOFFICE=1")
|
||||
else:
|
||||
iscc_args.append("/DsBrandingFolder=" + \
|
||||
utils.get_abspath(common.workspace_dir + "\\" + common.branding + "\\desktop-apps"))
|
||||
if common.platform in ["windows_x64_xp", "windows_x86_xp"]:
|
||||
iscc_args.append("/D_WIN_XP=1")
|
||||
if common.sign:
|
||||
iscc_args.append("/DENABLE_SIGNING=1")
|
||||
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + \
|
||||
branding.cert_name + "$q /t " + common.tsa_server + " $f")
|
||||
args = ["iscc"] + iscc_args + ["common.iss"]
|
||||
ret = utils.cmd(*args, creates=inno_file, verbose=True)
|
||||
utils.set_summary("desktop inno build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop inno deploy")
|
||||
ret = aws_s3_upload(
|
||||
[inno_file],
|
||||
"win/inno/%s/%s/" % (common.version, common.build),
|
||||
"Installer"
|
||||
)
|
||||
utils.set_summary("desktop inno deploy", ret)
|
||||
return
|
||||
|
||||
def make_inno_help():
|
||||
utils.log_h2("desktop inno help build")
|
||||
utils.log_h3(inno_help_file)
|
||||
|
||||
args = [
|
||||
"iscc",
|
||||
"/Qp",
|
||||
"/DsAppVersion=" + package_version,
|
||||
"/DDEPLOY_PATH=" + source_help_dir,
|
||||
"/D_ARCH=" + inno_arch_list[common.platform]
|
||||
]
|
||||
if branding.onlyoffice:
|
||||
args.append("/D_ONLYOFFICE=1")
|
||||
else:
|
||||
args.append("/DsBrandingFolder=" + \
|
||||
utils.get_abspath(common.workspace_dir + "\\" + common.branding + "\\desktop-apps"))
|
||||
if common.sign:
|
||||
args.append("/DENABLE_SIGNING=1")
|
||||
args.append("/Sbyparam=signtool.exe sign /v /n $q" + \
|
||||
branding.cert_name + "$q /t " + common.tsa_server + " $f")
|
||||
args.append("help.iss")
|
||||
ret = utils.cmd(*args, creates=inno_help_file, verbose=True)
|
||||
utils.set_summary("desktop inno help build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop inno help deploy")
|
||||
ret = aws_s3_upload(
|
||||
[inno_help_file],
|
||||
"win/inno/%s/%s/" % (common.version, common.build),
|
||||
"Installer"
|
||||
)
|
||||
utils.set_summary("desktop inno help deploy", ret)
|
||||
return
|
||||
|
||||
def make_inno_update():
|
||||
utils.log_h2("desktop inno update build")
|
||||
utils.log_h2(inno_update_file)
|
||||
|
||||
args = ["iscc"] + iscc_args + ["/DTARGET_NAME=" + inno_file, "update_common.iss"]
|
||||
ret = utils.cmd(*args, creates=inno_update_file, verbose=True)
|
||||
utils.set_summary("desktop inno update build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop inno update deploy")
|
||||
ret = aws_s3_upload(
|
||||
[inno_update_file],
|
||||
"win/inno/%s/%s/" % (common.version, common.build),
|
||||
"WinSparkle"
|
||||
)
|
||||
utils.set_summary("desktop inno update deploy", ret)
|
||||
return
|
||||
|
||||
def make_winsparkle_files():
|
||||
utils.log_h2("desktop winsparkle files build")
|
||||
|
||||
if branding.onlyoffice:
|
||||
awk_branding = "update/branding.awk"
|
||||
else:
|
||||
awk_branding = "../../../../" + common.branding + \
|
||||
"/desktop-apps/win-linux/package/windows/update/branding.awk"
|
||||
awk_args = [
|
||||
"-v", "Version=" + common.version,
|
||||
"-v", "Build=" + common.build,
|
||||
"-v", "Branch=" + common.channel,
|
||||
"-v", "Timestamp=" + common.timestamp,
|
||||
"-i", awk_branding
|
||||
]
|
||||
|
||||
appcast = "update/appcast.xml"
|
||||
utils.log_h3(appcast)
|
||||
args = ["env", "LANG=en_US.UTF-8", "awk", "-v", "Prod=1"] + \
|
||||
awk_args + ["-f", "update/appcast.xml.awk"]
|
||||
appcast_result = utils.cmd_output(*args, verbose=True)
|
||||
utils.write_file(appcast, appcast_result)
|
||||
|
||||
appcast_test = "update/appcast-test.xml"
|
||||
utils.log_h3(appcast_test)
|
||||
args = ["env", "LANG=en_US.UTF-8", "awk"] + \
|
||||
awk_args + ["-f", "update/appcast.xml.awk"]
|
||||
appcast_result = utils.cmd_output(*args, verbose=True)
|
||||
utils.write_file(appcast_test, appcast_result)
|
||||
|
||||
if branding.onlyoffice:
|
||||
changes_dir = "update/changes/" + common.version
|
||||
else:
|
||||
changes_dir = "../../../../" + common.branding + \
|
||||
"/desktop-apps/win-linux/package/windows/update/changes/" + common.version
|
||||
for lang, base in branding.desktop_update_changes_list.items():
|
||||
changes = "update/%s.html" % base
|
||||
if lang == "en": encoding = "en_US.UTF-8"
|
||||
elif lang == "ru": encoding = "ru_RU.UTF-8"
|
||||
utils.log_h3(changes)
|
||||
changes_file = "%s/%s.html" % (changes_dir, lang)
|
||||
args = ["env", "LANG=" + encoding, "awk"] + awk_args + \
|
||||
["-f", "update/changes.html.awk", changes_file]
|
||||
|
||||
if utils.is_exist(changes_file):
|
||||
changes_result = utils.cmd_output(*args, verbose=True)
|
||||
utils.write_file(changes, changes_result)
|
||||
else:
|
||||
utils.log("! file not exist: " + changes_file)
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("desktop winsparkle files deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("update/*.xml") + utils.glob_path("update/*.html"),
|
||||
"win/inno/%s/%s/" % (common.version, common.build),
|
||||
"WinSparkle"
|
||||
)
|
||||
utils.set_summary("desktop winsparkle files deploy", ret)
|
||||
return
|
||||
|
||||
def make_advinst():
|
||||
utils.log_h2("desktop advinst build")
|
||||
utils.log_h3(advinst_file)
|
||||
|
||||
arch = arch_list[common.platform]
|
||||
|
||||
if not branding.onlyoffice:
|
||||
branding_path = common.workspace_dir + "\\" + common.branding
|
||||
utils.copy_dir_content(
|
||||
branding_path + "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".bmp")
|
||||
utils.copy_dir_content(
|
||||
branding_path + "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".png")
|
||||
utils.copy_dir_content(
|
||||
branding_path + "\\desktop-apps\\win-linux\\extras\\projicons\\res",
|
||||
"..\\..\\extras\\projicons\\res", ".ico")
|
||||
utils.copy_file(
|
||||
branding_path + "\\desktop-apps\\win-linux\\package\\windows\\dictionary.ail",
|
||||
"dictionary.ail")
|
||||
utils.copy_file(
|
||||
branding_path + "\\desktop-apps\\common\\package\\license\\eula_" + common.branding + ".rtf",
|
||||
"..\\..\\..\\common\\package\\license\\agpl-3.0.rtf")
|
||||
utils.copy_file(
|
||||
branding_path + "\\multimedia\\videoplayer\\icons\\" + common.branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\media.ico")
|
||||
utils.copy_file(
|
||||
branding_path + "\\multimedia\\imageviewer\\icons\\ico\\" + common.branding + ".ico",
|
||||
"..\\..\\extras\\projicons\\res\\gallery.ico")
|
||||
|
||||
aic_content = [";aic"]
|
||||
if not common.sign:
|
||||
aic_content += [
|
||||
"ResetSig"
|
||||
]
|
||||
if arch == "x86":
|
||||
aic_content += [
|
||||
"SetPackageType x86",
|
||||
"SetAppdir -buildname DefaultBuild -path [ProgramFilesFolder][MANUFACTURER_INSTALL_FOLDER]\\[PRODUCT_INSTALL_FOLDER]",
|
||||
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x64)"',
|
||||
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x64)"'
|
||||
]
|
||||
if arch == "x64":
|
||||
aic_content += [
|
||||
'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x86)"',
|
||||
'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x86)"'
|
||||
]
|
||||
if branding.onlyoffice:
|
||||
aic_content += [
|
||||
"DelFolder CUSTOM_PATH"
|
||||
]
|
||||
else:
|
||||
utils.replace_in_file('DesktopEditors.aip','(<ROW Property="UpgradeCode" Value=")(.*)("/>)', r'\1%s\3' % (branding.desktop_upgrade_code))
|
||||
aic_content += [
|
||||
"AddUpgradeCode {47EEF706-B0E4-4C43-944B-E5F914B92B79} \
|
||||
-min_ver 7.1.1 -include_min_ver \
|
||||
-max_ver 7.2.2 -include_max_ver \
|
||||
-include_lang 1049 \
|
||||
-property_name UPGRADE_2 -enable_migrate",
|
||||
"DelLanguage 1029 -buildname DefaultBuild",
|
||||
"DelLanguage 1031 -buildname DefaultBuild",
|
||||
"DelLanguage 1041 -buildname DefaultBuild",
|
||||
"DelLanguage 1046 -buildname DefaultBuild",
|
||||
"DelLanguage 2070 -buildname DefaultBuild",
|
||||
"DelLanguage 1060 -buildname DefaultBuild",
|
||||
"DelLanguage 1036 -buildname DefaultBuild",
|
||||
"DelLanguage 3082 -buildname DefaultBuild",
|
||||
"DelLanguage 1033 -buildname DefaultBuild",
|
||||
"SetCurrentFeature ExtendedFeature",
|
||||
"NewSync CUSTOM_PATH " + source_dir + "\\..\\MediaViewer",
|
||||
"UpdateFile CUSTOM_PATH\\ImageViewer.exe " + source_dir + "\\..\\MediaViewer\\ImageViewer.exe",
|
||||
"UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + source_dir + "\\..\\MediaViewer\\VideoPlayer.exe",
|
||||
"SetProperty ASCC_REG_PREFIX=" + branding.ascc_reg_prefix
|
||||
]
|
||||
aic_content += [
|
||||
"AddOsLc -buildname DefaultBuild -arch " + arch,
|
||||
"SetCurrentFeature MainFeature",
|
||||
"NewSync APPDIR " + source_dir,
|
||||
"UpdateFile APPDIR\\DesktopEditors.exe " + source_dir + "\\DesktopEditors.exe",
|
||||
"SetVersion " + package_version,
|
||||
"SetPackageName " + advinst_file + " -buildname DefaultBuild",
|
||||
"Rebuild -buildslist DefaultBuild"
|
||||
]
|
||||
utils.write_file("DesktopEditors.aic", "\r\n".join(aic_content), "utf-8-sig")
|
||||
ret = utils.cmd("AdvancedInstaller.com", "/execute", \
|
||||
"DesktopEditors.aip", "DesktopEditors.aic", verbose=True)
|
||||
utils.set_summary("desktop advinst build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("desktop advinst deploy")
|
||||
ret = aws_s3_upload(
|
||||
[advinst_file],
|
||||
"win/advinst/%s/" % common.channel,
|
||||
"Installer"
|
||||
)
|
||||
utils.set_summary("desktop advinst deploy", ret)
|
||||
return
|
||||
|
||||
#
|
||||
# macOS
|
||||
#
|
||||
|
||||
def make_macos():
|
||||
global package_name, build_dir, branding_dir, updates_dir, changes_dir, \
|
||||
suffix, lane, scheme, app_version
|
||||
package_name = branding.desktop_package_name
|
||||
build_dir = branding.desktop_build_dir
|
||||
branding_dir = branding.desktop_branding_dir
|
||||
updates_dir = branding.desktop_updates_dir
|
||||
changes_dir = branding.desktop_changes_dir
|
||||
suffix = {
|
||||
"darwin_x86_64": "x86_64",
|
||||
"darwin_x86_64_v8": "v8",
|
||||
"darwin_arm64": "arm"
|
||||
}[common.platform]
|
||||
lane = "release_" + suffix
|
||||
scheme = package_name + "-" + suffix
|
||||
|
||||
utils.set_cwd(branding_dir)
|
||||
|
||||
if common.clean:
|
||||
utils.log("\n=== Clean\n")
|
||||
utils.delete_dir(utils.get_env("HOME") + "/Library/Developer/Xcode/Archives")
|
||||
utils.delete_dir(utils.get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
|
||||
|
||||
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
|
||||
% (common.workspace_dir, branding.desktop_branding_dir, branding.desktop_package_name, suffix)
|
||||
current_version = utils.sh_output(
|
||||
'/usr/libexec/PlistBuddy -c "Print :CFBundleShortVersionString" ' + plist_path,
|
||||
verbose=True).rstrip()
|
||||
current_build = utils.sh_output(
|
||||
'/usr/libexec/PlistBuddy -c "Print :CFBundleVersion" ' + plist_path,
|
||||
verbose=True).rstrip()
|
||||
app_version = current_version
|
||||
|
||||
appcast_url = branding.sparkle_base_url + "/" + suffix + "/" + branding.desktop_package_name.lower() + ".xml"
|
||||
release_version = utils.sh_output(
|
||||
'curl -Ls ' + appcast_url + ' 2> /dev/null' \
|
||||
+ ' | xmllint --xpath "/rss/channel/item[1]/enclosure/@*[name()=\'sparkle:shortVersionString\']" -' \
|
||||
+ ' | cut -f2 -d\\\"',
|
||||
verbose=True).rstrip()
|
||||
release_build = utils.sh_output(
|
||||
'curl -Ls ' + appcast_url + ' 2> /dev/null' \
|
||||
+ ' | xmllint --xpath "/rss/channel/item[1]/enclosure/@*[name()=\'sparkle:version\']" -' \
|
||||
+ ' | cut -f2 -d\\\"',
|
||||
verbose=True).rstrip()
|
||||
|
||||
utils.log("CURRENT=" + current_version + "(" + current_build + ")" \
|
||||
+ "\nRELEASE=" + release_version + "(" + release_build + ")")
|
||||
|
||||
dmg = make_dmg()
|
||||
if dmg:
|
||||
if int(current_build) > int(release_build):
|
||||
make_sparkle_updates()
|
||||
else:
|
||||
utils.log(release_build + " <= " + current_build)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_dmg():
|
||||
utils.log_h2("desktop dmg build")
|
||||
utils.log_h3(scheme)
|
||||
utils.log_h3("build/" + package_name + ".app")
|
||||
dmg = utils.sh(
|
||||
"bundler exec fastlane " + lane + " skip_git_bump:true",
|
||||
verbose=True
|
||||
)
|
||||
utils.set_summary("desktop dmg build", dmg)
|
||||
|
||||
if common.deploy and dmg:
|
||||
utils.log_h2("desktop dmg deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("build/*.dmg"),
|
||||
"mac/%s/%s/%s/" % (suffix, common.version, common.build),
|
||||
"Disk Image"
|
||||
)
|
||||
utils.set_summary("desktop dmg deploy", ret)
|
||||
|
||||
utils.log_h2("desktop zip deploy")
|
||||
ret = aws_s3_upload(
|
||||
["build/%s-%s.zip" % (scheme, app_version)],
|
||||
"mac/%s/%s/%s/" % (suffix, common.version, common.build),
|
||||
"Archive"
|
||||
)
|
||||
utils.set_summary("desktop zip deploy", ret)
|
||||
return dmg
|
||||
|
||||
def make_sparkle_updates():
|
||||
utils.log_h2("desktop sparkle files build")
|
||||
|
||||
zip_filename = scheme + '-' + app_version
|
||||
macos_zip = "build/" + zip_filename + ".zip"
|
||||
updates_storage_dir = "%s/%s/_updates" % (utils.get_env('ARCHIVES_DIR'), scheme)
|
||||
utils.create_dir(updates_dir)
|
||||
utils.copy_file(macos_zip, updates_dir)
|
||||
utils.copy_dir_content(updates_storage_dir, updates_dir, ".zip")
|
||||
utils.copy_file(
|
||||
changes_dir + "/" + app_version + "/ReleaseNotes.html",
|
||||
updates_dir + "/" + zip_filename + ".html"
|
||||
)
|
||||
utils.copy_file(
|
||||
changes_dir + "/" + app_version + "/ReleaseNotesRU.html",
|
||||
updates_dir + "/" + zip_filename + ".ru.html"
|
||||
)
|
||||
|
||||
sparkle_base_url = "%s/%s/updates/" % (branding.sparkle_base_url, suffix)
|
||||
ret = utils.sh(
|
||||
common.workspace_dir \
|
||||
+ "/desktop-apps/macos/Vendor/Sparkle/bin/generate_appcast " \
|
||||
+ updates_dir \
|
||||
+ " --download-url-prefix " + sparkle_base_url \
|
||||
+ " --release-notes-url-prefix " + sparkle_base_url,
|
||||
verbose=True
|
||||
)
|
||||
utils.set_summary("desktop sparkle files build", ret)
|
||||
|
||||
# utils.log_h3("edit sparkle appcast links")
|
||||
# appcast_url = branding.sparkle_base_url + "/" + suffix
|
||||
# appcast = "%s/%s.xml" % (updates_dir, package_name.lower())
|
||||
# for lang, base in update_changes_list.items():
|
||||
# if base == "ReleaseNotes":
|
||||
# utils.replace_in_file(appcast,
|
||||
# r'(<sparkle:releaseNotesLink>.+/).+(\.html</sparkle:releaseNotesLink>)',
|
||||
# "\\1" + base + "\\2")
|
||||
# else:
|
||||
# utils.replace_in_file(appcast,
|
||||
# r'(<sparkle:releaseNotesLink xml:lang="' + lang + r'">).+(\.html</sparkle:releaseNotesLink>)',
|
||||
# "\\1" + base + "\\2")
|
||||
|
||||
utils.log("")
|
||||
utils.log_h3("generate checksums")
|
||||
utils.sh(
|
||||
"md5 *.zip *.delta > md5sums.txt",
|
||||
chdir="build/update", verbose=True
|
||||
)
|
||||
utils.sh(
|
||||
"shasum -a 256 *.zip *.delta > sha256sums.txt",
|
||||
chdir="build/update", verbose=True
|
||||
)
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("desktop sparkle files deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("build/update/*.delta") \
|
||||
+ utils.glob_path("build/update/*.xml") \
|
||||
+ utils.glob_path("build/update/*.html"),
|
||||
"mac/%s/%s/%s/" % (suffix, common.version, common.build),
|
||||
"Sparkle"
|
||||
)
|
||||
utils.set_summary("desktop sparkle files deploy", ret)
|
||||
|
||||
utils.log_h2("desktop checksums deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("build/update/*.txt"),
|
||||
"mac/%s/%s/%s/" % (suffix, common.version, common.build),
|
||||
"Checksums"
|
||||
)
|
||||
utils.set_summary("desktop checksums deploy", ret)
|
||||
return
|
||||
|
||||
#
|
||||
# Linux
|
||||
#
|
||||
|
||||
def make_linux():
|
||||
utils.set_cwd("desktop-apps/win-linux/package/linux")
|
||||
|
||||
utils.log_h2("desktop build")
|
||||
make_args = branding.desktop_make_targets
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("desktop build", ret)
|
||||
|
||||
rpm_arch = "x86_64"
|
||||
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("desktop deploy")
|
||||
if ret:
|
||||
utils.log_h2("desktop tar deploy")
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("tar/*.tar.gz") + utils.glob_path("tar/*.tar.xz"),
|
||||
"linux/generic/%s/" % common.channel,
|
||||
"Portable"
|
||||
)
|
||||
utils.set_summary("desktop tar deploy", ret)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"linux/debian/%s/" % common.channel,
|
||||
"Debian"
|
||||
)
|
||||
utils.set_summary("desktop deb deploy", ret)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop deb-astra deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("deb-astra/*.deb"),
|
||||
"linux/astra/",
|
||||
"Astra Linux Signed"
|
||||
)
|
||||
utils.set_summary("desktop deb-astra deploy", ret)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
|
||||
+ utils.glob_path("rpm/builddir/RPMS/noarch/*.rpm"),
|
||||
"linux/rhel/%s/" % common.channel,
|
||||
"CentOS"
|
||||
)
|
||||
utils.set_summary("desktop rpm deploy", ret)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop suse-rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
|
||||
+ utils.glob_path("suse-rpm/builddir/RPMS/noarch/*.rpm"),
|
||||
"linux/suse/%s/" % common.channel,
|
||||
"SUSE Linux"
|
||||
)
|
||||
utils.set_summary("desktop suse-rpm deploy", ret)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop apt-rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm") \
|
||||
+ utils.glob_path("apt-rpm/builddir/RPMS/noarch/*.rpm"),
|
||||
"linux/altlinux/%s/" % common.channel,
|
||||
"ALT Linux"
|
||||
)
|
||||
utils.set_summary("desktop apt-rpm deploy", ret)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.log_h2("desktop urpmi deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm") \
|
||||
+ utils.glob_path("urpmi/builddir/RPMS/noarch/*.rpm"),
|
||||
"linux/rosa/%s/" % common.channel,
|
||||
"ROSA"
|
||||
)
|
||||
utils.set_summary("desktop urpmi deploy", ret)
|
||||
else:
|
||||
if "tar" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop tar deploy", False)
|
||||
if "deb" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb deploy", False)
|
||||
if "deb-astra" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop deb-astra deploy", False)
|
||||
if "rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop rpm deploy", False)
|
||||
if "suse-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop suse-rpm deploy", False)
|
||||
if "apt-rpm" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop apt-rpm deploy", False)
|
||||
if "urpmi" in branding.desktop_make_targets:
|
||||
utils.set_summary("desktop urpmi deploy", False)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
45
scripts/package_mobile.py
Normal file
45
scripts/package_mobile.py
Normal file
@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make():
|
||||
utils.log_h1("MOBILE")
|
||||
if not utils.is_linux():
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
make_mobile()
|
||||
return
|
||||
|
||||
def make_mobile():
|
||||
utils.set_cwd("build_tools/out")
|
||||
|
||||
if common.clean:
|
||||
utils.log_h2("mobile clean")
|
||||
utils.sh("rm -rfv *.zip", verbose=True)
|
||||
|
||||
zip_file = "build-" + common.version + "-" + common.build + ".zip"
|
||||
s3_key = "mobile/android/%s/%s" % (common.channel, zip_file)
|
||||
|
||||
utils.log_h2("mobile build")
|
||||
ret = utils.sh("zip -r " + zip_file + " ./android* ./js", verbose=True)
|
||||
utils.set_summary("mobile build", ret)
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("mobile deploy")
|
||||
if ret:
|
||||
ret = utils.sh(
|
||||
"aws s3 cp --acl public-read --no-progress " \
|
||||
+ zip_file + " s3://" + branding.s3_bucket + "/" + s3_key,
|
||||
verbose=True
|
||||
)
|
||||
if ret:
|
||||
utils.add_deploy_data(
|
||||
"mobile", "Android", zip_file, s3_key,
|
||||
branding.s3_bucket, branding.s3_region
|
||||
)
|
||||
utils.set_summary("mobile deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
145
scripts/package_server.py
Normal file
145
scripts/package_server.py
Normal file
@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import package_utils as utils
|
||||
import package_common as common
|
||||
import package_branding as branding
|
||||
|
||||
def make(edition):
|
||||
utils.log_h1("SERVER (" + edition.upper() + ")")
|
||||
if utils.is_windows():
|
||||
make_windows(edition)
|
||||
elif utils.is_linux():
|
||||
make_linux(edition)
|
||||
else:
|
||||
utils.log("Unsupported host OS")
|
||||
return
|
||||
|
||||
def aws_s3_upload(files, key, edition, ptype=None):
|
||||
if not files:
|
||||
return False
|
||||
ret = True
|
||||
key = "server/" + key
|
||||
for file in files:
|
||||
args = ["aws"]
|
||||
if hasattr(branding, "s3_endpoint_url"):
|
||||
args += ["--endpoint-url=" + branding.s3_endpoint_url]
|
||||
args += [
|
||||
"s3", "cp", "--no-progress", "--acl", "public-read",
|
||||
file, "s3://" + branding.s3_bucket + "/" + key
|
||||
]
|
||||
if common.os_family == "windows":
|
||||
upload = utils.cmd(*args, verbose=True)
|
||||
else:
|
||||
upload = utils.sh(" ".join(args), verbose=True)
|
||||
ret &= upload
|
||||
if upload and ptype is not None:
|
||||
full_key = key
|
||||
if full_key.endswith("/"): full_key += utils.get_basename(file)
|
||||
utils.add_deploy_data(
|
||||
"server_" + edition, ptype, file, full_key,
|
||||
branding.s3_bucket, branding.s3_region
|
||||
)
|
||||
return ret
|
||||
|
||||
def make_windows(edition):
|
||||
if edition == "enterprise":
|
||||
product_name = "DocumentServer-EE"
|
||||
elif edition == "developer":
|
||||
product_name = "DocumentServer-DE"
|
||||
else:
|
||||
product_name = "DocumentServer"
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
ret = utils.cmd("make", "clean", verbose=True)
|
||||
args = ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if not branding.onlyoffice:
|
||||
args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret &= utils.cmd("make", "packages", *args, verbose=True)
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
if common.deploy and ret:
|
||||
utils.log_h2("server " + edition + " inno deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("exe/*.exe"),
|
||||
"win/inno/%s/" % common.channel,
|
||||
edition,
|
||||
"Installer"
|
||||
)
|
||||
utils.set_summary("server " + edition + " inno deploy", ret)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
|
||||
def make_linux(edition):
|
||||
if edition == "enterprise":
|
||||
product_name = "documentserver-ee"
|
||||
elif edition == "developer":
|
||||
product_name = "documentserver-de"
|
||||
else:
|
||||
product_name = "documentserver"
|
||||
utils.set_cwd("document-server-package")
|
||||
|
||||
utils.log_h2("server " + edition + " build")
|
||||
make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name]
|
||||
if common.platform == "linux_aarch64":
|
||||
make_args += ["-e", "UNAME_M=aarch64"]
|
||||
if not branding.onlyoffice:
|
||||
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
|
||||
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
|
||||
utils.set_summary("server " + edition + " build", ret)
|
||||
|
||||
rpm_arch = "x86_64"
|
||||
if common.platform == "linux_aarch64": rpm_arch = "aarch64"
|
||||
|
||||
if common.deploy:
|
||||
utils.log_h2("server " + edition + " deploy")
|
||||
if ret:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " deb deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("deb/*.deb"),
|
||||
"linux/debian/%s/" % common.channel,
|
||||
edition,
|
||||
"Debian"
|
||||
)
|
||||
utils.set_summary("server " + edition + " deb deploy", ret)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"linux/rhel/%s/" % common.channel,
|
||||
edition,
|
||||
"CentOS"
|
||||
)
|
||||
utils.set_summary("server " + edition + " rpm deploy", ret)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " apt-rpm deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"),
|
||||
"linux/altlinux/%s/" % common.channel,
|
||||
edition,
|
||||
"ALT Linux"
|
||||
)
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", ret)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.log_h2("server " + edition + " snap deploy")
|
||||
ret = aws_s3_upload(
|
||||
utils.glob_path("*.tar.gz"),
|
||||
"linux/generic/%s/" % common.channel,
|
||||
edition,
|
||||
"Snap"
|
||||
)
|
||||
utils.set_summary("server " + edition + " snap deploy", ret)
|
||||
else:
|
||||
if "deb" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " deb deploy", False)
|
||||
if "rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " rpm deploy", False)
|
||||
if "apt-rpm" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " apt-rpm deploy", False)
|
||||
if "tar" in branding.server_make_targets:
|
||||
utils.set_summary("server " + edition + " snap deploy", False)
|
||||
|
||||
utils.set_cwd(common.workspace_dir)
|
||||
return
|
||||
382
scripts/package_utils.py
Normal file
382
scripts/package_utils.py
Normal file
@ -0,0 +1,382 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import codecs
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import package_common as common
|
||||
|
||||
def host_platform():
|
||||
return platform.system().lower()
|
||||
|
||||
def is_windows():
|
||||
return host_platform() == "windows"
|
||||
|
||||
def is_macos():
|
||||
return host_platform() == "darwin"
|
||||
|
||||
def is_linux():
|
||||
return host_platform() == "linux"
|
||||
|
||||
def log(string, end='\n'):
|
||||
sys.stdout.write(string + end)
|
||||
sys.stdout.flush()
|
||||
return
|
||||
|
||||
def log_h1(string):
|
||||
line = "#" * (len(string) + 8)
|
||||
log("\n" + line + "\n### " + string + " ###\n" + line + "\n")
|
||||
return
|
||||
|
||||
def log_h2(string):
|
||||
log("\n### " + string + "\n")
|
||||
return
|
||||
|
||||
def log_h3(string):
|
||||
log("# " + string)
|
||||
return
|
||||
|
||||
def log_err(string):
|
||||
log("!!! " + string)
|
||||
return
|
||||
|
||||
def get_timestamp():
|
||||
return "%.f" % time.time()
|
||||
|
||||
def get_env(key, default=None):
|
||||
return os.getenv(key, default)
|
||||
|
||||
def set_env(key, value):
|
||||
os.environ[key] = value
|
||||
return
|
||||
|
||||
def get_cwd():
|
||||
return os.getcwd()
|
||||
|
||||
def set_cwd(path, verbose=True):
|
||||
if verbose:
|
||||
log("- change working dir:")
|
||||
log(" path: " + path)
|
||||
os.chdir(path)
|
||||
return
|
||||
|
||||
def get_path(path):
|
||||
if is_windows():
|
||||
return path.replace("/", "\\")
|
||||
return path
|
||||
|
||||
def get_abspath(path):
|
||||
return os.path.abspath(get_path(path))
|
||||
|
||||
def get_basename(path):
|
||||
return os.path.basename(path)
|
||||
|
||||
def get_dirname(path):
|
||||
return os.path.dirname(path)
|
||||
|
||||
def get_file_size(path):
|
||||
return os.path.getsize(path)
|
||||
|
||||
def get_script_dir(path):
|
||||
return get_dirname(os.path.realpath(path))
|
||||
|
||||
def is_file(path):
|
||||
return os.path.isfile(path)
|
||||
|
||||
def is_dir(path):
|
||||
return os.path.isdir(path)
|
||||
|
||||
def is_exist(path):
|
||||
if os.path.exists(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_path(path):
|
||||
return glob.glob(path)
|
||||
|
||||
def glob_file(path):
|
||||
if glob.glob(path) and is_file(glob.glob(path)[0]):
|
||||
return glob.glob(path)[0]
|
||||
return
|
||||
|
||||
def get_md5(path):
|
||||
if os.path.exists(path):
|
||||
md5_hash = hashlib.md5()
|
||||
md5_hash.update(open(path, "rb").read())
|
||||
return md5_hash.hexdigest()
|
||||
return
|
||||
|
||||
def create_dir(path, verbose=True):
|
||||
if verbose:
|
||||
log("- create_dir:")
|
||||
log(" path: " + path)
|
||||
if not is_exist(path):
|
||||
os.makedirs(path)
|
||||
else:
|
||||
log_err("dir exist")
|
||||
return
|
||||
|
||||
def write_file(path, data, encoding='utf-8', verbose=True):
|
||||
if is_file(path):
|
||||
delete_file(path)
|
||||
if verbose:
|
||||
log("- write_file:")
|
||||
log(" path: " + path)
|
||||
log(" encoding: " + encoding)
|
||||
log(" data: |\n" + data)
|
||||
with codecs.open(path, 'w', encoding) as file:
|
||||
file.write(data)
|
||||
return
|
||||
|
||||
def replace_in_file(path, pattern, text_replace, encoding='utf-8', verbose=True):
|
||||
if verbose:
|
||||
log("- replace_in_file:")
|
||||
log(" path: " + path)
|
||||
log(" pattern: " + pattern)
|
||||
log(" replace: " + text_replace)
|
||||
log(" encoding: " + encoding)
|
||||
file_data = ""
|
||||
with codecs.open(get_path(path), "r", encoding) as file:
|
||||
file_data = file.read()
|
||||
file_data = re.sub(pattern, text_replace, file_data)
|
||||
delete_file(path)
|
||||
with codecs.open(get_path(path), "w", encoding) as file:
|
||||
file.write(file_data)
|
||||
return
|
||||
|
||||
def copy_file(src, dst, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_file:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
if is_file(dst):
|
||||
delete_file(dst)
|
||||
if not is_file(src):
|
||||
log_err("file not exist: " + src)
|
||||
return
|
||||
return shutil.copy2(get_path(src), get_path(dst))
|
||||
|
||||
def copy_files(src, dst, override=True, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_files:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" override: " + str(override))
|
||||
for file in glob.glob(src):
|
||||
file_name = os.path.basename(file)
|
||||
if is_file(file):
|
||||
if override and is_file(dst + "/" + file_name):
|
||||
delete_file(dst + "/" + file_name)
|
||||
if not is_file(dst + "/" + file_name):
|
||||
if verbose:
|
||||
log(file + " : " + get_path(dst))
|
||||
shutil.copy2(file, get_path(dst))
|
||||
elif is_dir(file):
|
||||
if not is_dir(dst + "/" + file_name):
|
||||
create_dir(dst + "/" + file_name)
|
||||
copy_files(file + "/*", dst + "/" + file_name, override)
|
||||
return
|
||||
|
||||
def copy_dir(src, dst, override=True, verbose=True):
|
||||
if verbose:
|
||||
log("- copy_dir:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" override: " + str(override))
|
||||
if is_dir(dst):
|
||||
delete_dir(dst)
|
||||
try:
|
||||
shutil.copytree(get_path(src), get_path(dst))
|
||||
except OSError as e:
|
||||
log_err('directory not copied. Error: %s' % e)
|
||||
return
|
||||
|
||||
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
|
||||
if verbose:
|
||||
log("- copy_dir_content:")
|
||||
log(" src: " + src)
|
||||
log(" dst: " + dst)
|
||||
log(" include: " + filter_include)
|
||||
log(" exclude: " + filter_exclude)
|
||||
src_folder = src
|
||||
if ("/" != src[-1:]):
|
||||
src_folder += "/"
|
||||
src_folder += "*"
|
||||
for file in glob.glob(src_folder):
|
||||
basename = os.path.basename(file)
|
||||
if ("" != filter_include) and (-1 == basename.find(filter_include)):
|
||||
continue
|
||||
if ("" != filter_exclude) and (-1 != basename.find(filter_exclude)):
|
||||
continue
|
||||
if is_file(file):
|
||||
copy_file(file, dst, verbose=False)
|
||||
elif is_dir(file):
|
||||
copy_dir(file, dst + "/" + basename)
|
||||
return
|
||||
|
||||
def delete_file(path, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_file:")
|
||||
log(" path: " + path)
|
||||
if not is_file(path):
|
||||
log_err("file not exist")
|
||||
return
|
||||
return os.remove(path)
|
||||
|
||||
def delete_dir(path, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_dir:")
|
||||
log(" path: " + path)
|
||||
if not is_dir(path):
|
||||
log_err("dir not exist")
|
||||
return
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return
|
||||
|
||||
def delete_files(src, verbose=True):
|
||||
if verbose:
|
||||
log("- delete_files:")
|
||||
log(" pattern: " + src)
|
||||
for path in glob.glob(src):
|
||||
if verbose:
|
||||
log(path)
|
||||
if is_file(path):
|
||||
os.remove(path)
|
||||
elif is_dir(path):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
return
|
||||
|
||||
def set_summary(target, status):
|
||||
common.summary.append({target: status})
|
||||
return
|
||||
|
||||
def add_deploy_data(product, ptype, src, dst, bucket, region):
|
||||
common.deploy_data.append({
|
||||
"platform": common.platforms[common.platform]["title"],
|
||||
"product": product,
|
||||
"type": ptype,
|
||||
# "local": get_path(src),
|
||||
"size": get_file_size(get_path(src)),
|
||||
"bucket": bucket,
|
||||
"region": region,
|
||||
"key": dst
|
||||
})
|
||||
file = open(get_path(common.workspace_dir + "/deploy.json"), 'w')
|
||||
file.write(json.dumps(common.deploy_data, sort_keys=True, indent=4))
|
||||
file.close()
|
||||
return
|
||||
|
||||
def cmd(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- cmd:")
|
||||
log(" command: " + " ".join(args))
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
log_err("creates exist")
|
||||
return False
|
||||
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
|
||||
oldcwd = get_cwd()
|
||||
set_cwd(kwargs["chdir"])
|
||||
ret = subprocess.call(
|
||||
[i for i in args], stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
if kwargs.get("chdir") and oldcwd:
|
||||
set_cwd(oldcwd)
|
||||
return ret
|
||||
|
||||
def cmd_output(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- cmd_output:")
|
||||
log(" command: " + " ".join(args))
|
||||
return subprocess.check_output(
|
||||
[i for i in args], stderr=subprocess.STDOUT, shell=True
|
||||
).decode("utf-8")
|
||||
|
||||
def powershell(*args, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- powershell:")
|
||||
log(" command: " + " ".join(args))
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
return False
|
||||
args = ["powershell", "-Command"] + [i for i in args]
|
||||
ret = subprocess.call(
|
||||
args, stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
return ret
|
||||
|
||||
def ps1(file, args=[], **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log_h2("powershell cmdlet: " + file + " " + " ".join(args))
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
return True
|
||||
ret = subprocess.call(
|
||||
["powershell", file] + args, stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
return ret
|
||||
|
||||
def download_file(url, path, md5, verbose=False):
|
||||
if verbose:
|
||||
log("- download_file:")
|
||||
log(" url: " + path)
|
||||
log(" path: " + url)
|
||||
log(" md5: " + md5)
|
||||
if is_file(path):
|
||||
if get_md5(path) == md5:
|
||||
log_err("file already exist (match checksum)")
|
||||
return True
|
||||
else:
|
||||
log_err("wrong checksum (%s), delete" % md5)
|
||||
os.remove(path)
|
||||
ret = powershell(
|
||||
"(New-Object System.Net.WebClient).DownloadFile('%s','%s')" % (url, path),
|
||||
verbose=True
|
||||
)
|
||||
md5_new = get_md5(path)
|
||||
if md5 != md5_new:
|
||||
log_err("checksum didn't match (%s != %s)" % (md5, md5_new))
|
||||
return False
|
||||
return ret
|
||||
|
||||
def sh(command, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- sh:")
|
||||
log(" command: " + command)
|
||||
if kwargs.get("chdir"):
|
||||
log(" chdir: " + kwargs["chdir"])
|
||||
if kwargs.get("creates"):
|
||||
log(" creates: " + kwargs["creates"])
|
||||
if kwargs.get("creates") and is_exist(kwargs["creates"]):
|
||||
log_err("creates exist")
|
||||
return False
|
||||
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
|
||||
oldcwd = get_cwd()
|
||||
set_cwd(kwargs["chdir"])
|
||||
ret = subprocess.call(
|
||||
command, stderr=subprocess.STDOUT, shell=True
|
||||
) == 0
|
||||
if kwargs.get("chdir") and oldcwd:
|
||||
set_cwd(oldcwd)
|
||||
return ret
|
||||
|
||||
def sh_output(command, **kwargs):
|
||||
if kwargs.get("verbose"):
|
||||
log("- sh_output:")
|
||||
log(" command: " + command)
|
||||
return subprocess.check_output(
|
||||
command, stderr=subprocess.STDOUT, shell=True
|
||||
).decode("utf-8")
|
||||
206
scripts/sdkjs_common/generate_builder_intarface.py
Normal file
206
scripts/sdkjs_common/generate_builder_intarface.py
Normal file
@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import shutil
|
||||
import re
|
||||
def readFile(path):
|
||||
with open(path, "r", errors='replace') as file:
|
||||
filedata = file.read()
|
||||
return filedata
|
||||
|
||||
def writeFile(path, content):
|
||||
if (os.path.isfile(path)):
|
||||
os.remove(path)
|
||||
|
||||
with open(path, "w") as file:
|
||||
file.write(content)
|
||||
return
|
||||
|
||||
class EditorApi(object):
|
||||
def __init__(self):
|
||||
self.records = []
|
||||
self.init = False
|
||||
self.folder = "word"
|
||||
self.type = "CDE"
|
||||
self.numfile = 0
|
||||
self.files = []
|
||||
return
|
||||
|
||||
def initFiles(self, type, files):
|
||||
self.folder = type
|
||||
if "word" == self.folder:
|
||||
self.type = "CDE"
|
||||
elif "slide" == self.folder:
|
||||
self.type = "CPE"
|
||||
else:
|
||||
self.type = "CSE"
|
||||
self.files = files
|
||||
return
|
||||
|
||||
def getReturnValue(self, description):
|
||||
paramStart = description.find("@returns {")
|
||||
if -1 == paramStart:
|
||||
return "{}"
|
||||
paramEnd = description.find("}", paramStart)
|
||||
retParam = description[paramStart + 10:paramEnd]
|
||||
isArray = False
|
||||
if -1 != retParam.find("[]"):
|
||||
isArray = True
|
||||
retParam = retParam.replace("[]", "")
|
||||
retType = retParam.replace("|", " ").replace(".", " ").split(" ")[0]
|
||||
retTypeLower = retType.lower()
|
||||
retValue = ""
|
||||
if -1 != retType.find("\""):
|
||||
retValue = "\"\""
|
||||
elif "boolean" == retTypeLower or "bool" == retTypeLower:
|
||||
retValue = "true"
|
||||
elif "string" == retTypeLower:
|
||||
retValue = "\"\""
|
||||
elif "number" == retTypeLower:
|
||||
retValue = "0"
|
||||
elif "undefined" == retTypeLower:
|
||||
retValue = "undefined"
|
||||
elif "null" == retTypeLower:
|
||||
retValue = "null"
|
||||
elif "array" == retTypeLower:
|
||||
retValue = "[]"
|
||||
elif "base64img" == retTypeLower:
|
||||
retValue = "base64img"
|
||||
elif "error" == retTypeLower:
|
||||
retValue = "undefined"
|
||||
else:
|
||||
retValue = "new " + retType + "()"
|
||||
if isArray:
|
||||
retValue = "[" + retValue + "]"
|
||||
return "{ return " + retValue + "; }"
|
||||
|
||||
def check_record(self, recordData):
|
||||
rec = recordData
|
||||
rec = rec.replace("\t", "")
|
||||
rec = rec.replace('\n ', '\n')
|
||||
indexEndDecoration = rec.find("*/")
|
||||
|
||||
indexOfStartPropName = rec.find('Object.defineProperty(')
|
||||
if indexOfStartPropName != -1:
|
||||
propName = re.search(r'"([^\"]*)"', rec[indexOfStartPropName:])[0]
|
||||
else:
|
||||
propName = None
|
||||
|
||||
decoration = "/**" + rec[0:indexEndDecoration + 2]
|
||||
decoration = decoration.replace("Api\n", "ApiInterface\n")
|
||||
decoration = decoration.replace("Api ", "ApiInterface ")
|
||||
decoration = decoration.replace("{Api}", "{ApiInterface}")
|
||||
decoration = decoration.replace("@return ", "@returns ")
|
||||
decoration = decoration.replace("@returns {?", "@returns {")
|
||||
decoration = decoration.replace("?}", "}")
|
||||
if -1 != decoration.find("@name ApiInterface"):
|
||||
self.append_record(decoration, "var ApiInterface = function() {};\nvar Api = new ApiInterface();\n", True)
|
||||
return
|
||||
code = rec[indexEndDecoration + 2:]
|
||||
code = code.replace("=\n", "= ").strip("\t\n\r ")
|
||||
lines = code.split("\n")
|
||||
codeCorrect = ""
|
||||
sMethodName = re.search(r'.prototype.(.*)=', code)
|
||||
|
||||
is_found_function = False
|
||||
addon_for_func = "{}"
|
||||
if -1 != decoration.find("@return"):
|
||||
addon_for_func = "{ return null; }"
|
||||
|
||||
for line in lines:
|
||||
line = line.strip("\t\n\r ")
|
||||
line = line.replace("{", "")
|
||||
line = line.replace("}", "")
|
||||
lineWithoutSpaces = line.replace(" ", "")
|
||||
if not is_found_function and 0 == line.find("function "):
|
||||
codeCorrect += (line + addon_for_func + "\n")
|
||||
is_found_function = True
|
||||
if not is_found_function and -1 != line.find(".prototype."):
|
||||
codeCorrect += (line + self.getReturnValue(decoration) + ";\n")
|
||||
is_found_function = True
|
||||
if -1 != lineWithoutSpaces.find(".prototype="):
|
||||
codeCorrect += (line + "\n")
|
||||
if -1 != line.find(".prototype.constructor"):
|
||||
codeCorrect += (line + "\n")
|
||||
codeCorrect = codeCorrect.replace("Api.prototype", "ApiInterface.prototype")
|
||||
self.append_record(decoration, codeCorrect)
|
||||
className = codeCorrect[0:codeCorrect.find('.')]
|
||||
|
||||
# если свойство определено сразу под методом (без декорации)
|
||||
if propName is not None and sMethodName is not None:
|
||||
prop_define = f'{className}.prototype.{propName[1:-1]} = {className}.prototype.{sMethodName.group(1)}();\n'
|
||||
self.append_record(decoration, prop_define)
|
||||
#иначе
|
||||
elif propName is not None:
|
||||
className = re.search(r'.defineProperty\((.*).prototype', code).group(1).strip()
|
||||
returnValue = 'undefined' if decoration.find('@return') == -1 else self.getReturnValue(decoration)
|
||||
if (returnValue != 'undefined'):
|
||||
returnValue = re.search(r'{ return (.*); }', returnValue).group(1).strip()
|
||||
prop_define = f'{className}.prototype.{propName[1:-1]} = {returnValue};\n'
|
||||
self.append_record(decoration, prop_define)
|
||||
return
|
||||
|
||||
def append_record(self, decoration, code, init=False):
|
||||
if init:
|
||||
if not self.init:
|
||||
self.init = True
|
||||
self.records.append(decoration + "\n" + code + "\n\n")
|
||||
return
|
||||
# check on private
|
||||
if -1 != code.find(".prototype.private_"):
|
||||
return
|
||||
# add records only for current editor
|
||||
index_type_editors = decoration.find("@typeofeditors")
|
||||
if -1 != index_type_editors:
|
||||
index_type_editors_end = decoration.find("]", index_type_editors)
|
||||
if -1 != index_type_editors_end:
|
||||
editors_support = decoration[index_type_editors:index_type_editors_end]
|
||||
if -1 == editors_support.find(self.type):
|
||||
return
|
||||
# optimizations for first file
|
||||
if 0 == self.numfile:
|
||||
self.records.append(decoration + "\n" + code + "\n")
|
||||
return
|
||||
# check override js classes
|
||||
if 0 == code.find("function "):
|
||||
index_end_name = code.find("(")
|
||||
function_name = code[9:index_end_name].strip(" ")
|
||||
for rec in range(len(self.records)):
|
||||
if -1 != self.records[rec].find("function " + function_name + "("):
|
||||
self.records[rec] = ""
|
||||
elif -1 != self.records[rec].find("function " + function_name + " ("):
|
||||
self.records[rec] = ""
|
||||
elif -1 != self.records[rec].find("\n" + function_name + ".prototype."):
|
||||
self.records[rec] = ""
|
||||
|
||||
self.records.append(decoration + "\n" + code + "\n")
|
||||
return
|
||||
|
||||
def generate(self):
|
||||
for file in self.files:
|
||||
file_content = readFile(file)
|
||||
arrRecords = file_content.split("/**")
|
||||
arrRecords = arrRecords[1:-1]
|
||||
for record in arrRecords:
|
||||
self.check_record(record)
|
||||
self.numfile += 1
|
||||
correctContent = ''.join(self.records)
|
||||
correctContent += "\n"
|
||||
os.mkdir('deploy/api_builder/' + self.folder)
|
||||
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
|
||||
return
|
||||
|
||||
def convert_to_interface(arrFiles, sEditorType):
|
||||
editor = EditorApi()
|
||||
editor.initFiles(sEditorType, arrFiles)
|
||||
editor.generate()
|
||||
return
|
||||
|
||||
old_cur = os.getcwd()
|
||||
os.chdir("../../../sdkjs")
|
||||
if True == os.path.isdir('deploy/api_builder'):
|
||||
shutil.rmtree('deploy/api_builder', ignore_errors=True)
|
||||
os.mkdir('deploy/api_builder')
|
||||
convert_to_interface(["word/apiBuilder.js"], "word")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
|
||||
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
|
||||
os.chdir(old_cur)
|
||||
8
scripts/update_js.py
Normal file
8
scripts/update_js.py
Normal file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import config
|
||||
import base
|
||||
import build_js
|
||||
|
||||
config.parse()
|
||||
build_js.make()
|
||||
26
tools/common/README.md
Normal file
26
tools/common/README.md
Normal file
@ -0,0 +1,26 @@
|
||||
## Overview
|
||||
|
||||
**change_autor.py** is a tool for change autor and last modifiend in all documents in folder.
|
||||
|
||||
## How to use
|
||||
|
||||
1. Place the files to be changed in a folder, e.g. **input**.
|
||||
2. Create a folder in which the modified files will be stored, e.g. **output**.
|
||||
3. Call the file *change_autor.py* as shown below.
|
||||
|
||||
```bash
|
||||
change_autor.py path_to_input_folder path_to_output_folder new_author_name
|
||||
```
|
||||
|
||||
________________________
|
||||
**convert_directory.py** is a tool to convert all files in the directory to the specified format.
|
||||
|
||||
## How to use
|
||||
|
||||
1. Place the files to be changed in a folder, e.g. **input**.
|
||||
2. Create a folder in which the converted files will be stored, e.g. **output**.
|
||||
3. Call the file *convert_directory.py* as shown below.
|
||||
|
||||
```bash
|
||||
convert_directory.py path_to_builder_directory path_to_input_folder path_to_output_folder format_ext
|
||||
```
|
||||
86
tools/common/change_autor.py
Normal file
86
tools/common/change_autor.py
Normal file
@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (3 > len(params)):
|
||||
print("use: change_autor.py path_to_input_files_directory path_to_output_files_directory author_name")
|
||||
exit(0)
|
||||
|
||||
cur_path = os.getcwd()
|
||||
base.configure_common_apps()
|
||||
|
||||
directory_input = params[0].replace("\\", "/")
|
||||
directory_output = params[1].replace("\\", "/")
|
||||
author_name = params[2]
|
||||
|
||||
input_files = []
|
||||
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
|
||||
input_files.append(file.replace("\\", "/"))
|
||||
|
||||
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
|
||||
|
||||
def change_author_name(file_dist, output_file, author_name):
|
||||
app = "7za" if ("mac" == base.host_platform()) else "7z"
|
||||
base.cmd_exe(app, ["x", "-y", file_dist, "-o" + temp_dir, "docProps\\core.xml", "-r"])
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'r') as file:
|
||||
data = file.read()
|
||||
|
||||
creator_open = "<dc:creator>"
|
||||
creator_close = "</dc:creator>"
|
||||
open_tag_pos = data.find(creator_open)
|
||||
if open_tag_pos == -1:
|
||||
creator_close_to_find = "<dc:creator/>"
|
||||
else:
|
||||
creator_close_to_find = "</dc:creator>"
|
||||
close_tag_pos = data.find(creator_close_to_find)
|
||||
last_tag_pos = data.find("</cp:coreProperties>")
|
||||
|
||||
if open_tag_pos != -1 and close_tag_pos != - 1:
|
||||
data = data[:open_tag_pos + len(creator_open)] + author_name + data[close_tag_pos:]
|
||||
elif close_tag_pos != - 1:
|
||||
data = data[:close_tag_pos] + creator_open + author_name + creator_close + data[close_tag_pos + len(creator_close_to_find):]
|
||||
else:
|
||||
data = data[:last_tag_pos] + creator_open + author_name + creator_close + data[last_tag_pos:]
|
||||
|
||||
lastModified_open = "<cp:lastModifiedBy>"
|
||||
lastModified_close = "</cp:lastModifiedBy>"
|
||||
open_tag_pos = data.find(lastModified_open)
|
||||
if open_tag_pos == -1:
|
||||
lastModified_close_to_find = "<cp:lastModifiedBy/>"
|
||||
else:
|
||||
lastModified_close_to_find = "</cp:lastModifiedBy>"
|
||||
close_tag_pos = data.find(lastModified_close_to_find)
|
||||
last_tag_pos = data.find("</cp:coreProperties>")
|
||||
|
||||
if open_tag_pos != -1 and close_tag_pos != - 1:
|
||||
data = data[:open_tag_pos + len(lastModified_open)] + author_name + data[close_tag_pos:]
|
||||
elif close_tag_pos != - 1:
|
||||
data = data[:close_tag_pos] + lastModified_open + author_name + lastModified_close + data[close_tag_pos + len(lastModified_close_to_find):]
|
||||
else:
|
||||
data = data[:last_tag_pos] + lastModified_open + author_name + lastModified_close + data[last_tag_pos:]
|
||||
|
||||
with open(temp_dir + "/docProps/core.xml", 'w') as file:
|
||||
file.write(data)
|
||||
|
||||
shutil.copyfile(file_dist, output_file)
|
||||
base.cmd_exe(app, ["a", "-r", output_file, temp_dir + "\\docProps"])
|
||||
|
||||
output_len = len(input_files)
|
||||
output_cur = 1
|
||||
for input_file in input_files:
|
||||
if base.is_dir(temp_dir):
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
|
||||
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + input_file.split(".")[-1]
|
||||
change_author_name(input_file, output_file, author_name)
|
||||
base.delete_dir(temp_dir)
|
||||
output_cur += 1
|
||||
29
tools/common/convert.py
Normal file
29
tools/common/convert.py
Normal file
@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import convert_common
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (3 > len(params)):
|
||||
print("use: convert.py path_to_builder_directory path_to_input_file path_to_output_file [params]")
|
||||
exit(0)
|
||||
|
||||
base.configure_common_apps()
|
||||
|
||||
directory_x2t = params[0].replace("\\", "/")
|
||||
file_input = params[1].replace("\\", "/")
|
||||
file_output = params[2].replace("\\", "/")
|
||||
convert_params = ""
|
||||
if 4 == len(params):
|
||||
convert_params = params[3]
|
||||
|
||||
directory_fonts = directory_x2t + "/sdkjs/common"
|
||||
if not base.is_file(directory_fonts + "/AllFonts.js"):
|
||||
base.cmd_in_dir(directory_x2t, "docbuilder", [], True)
|
||||
|
||||
convert_common.convertFile(directory_x2t, file_input, file_output, convert_params)
|
||||
153
tools/common/convert_common.py
Normal file
153
tools/common/convert_common.py
Normal file
@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT = 0x0040
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCX = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0001
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOC = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0002
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_ODT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0003
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_RTF = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0004
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_TXT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0005
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_HTML = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0006
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_MHT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0007
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_EPUB = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0008
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_FB2 = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0009
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_MOBI = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000a
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCM = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000b
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOTX = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000c
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOTM = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000d
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_ODT_FLAT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000e
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_OTT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x000f
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOC_FLAT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0010
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCX_FLAT = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0011
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_HTML_IN_CONTAINER = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0012
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCX_PACKAGE = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0014
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_OFORM = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0015
|
||||
AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCXF = AVS_OFFICESTUDIO_FILE_DOCUMENT + 0x0016
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION = 0x0080
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPTX = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0001
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPT = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0002
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_ODP = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0003
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPSX = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0004
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPTM = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0005
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPSM = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0006
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_POTX = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0007
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_POTM = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0008
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_ODP_FLAT = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x0009
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_OTP = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000a
|
||||
AVS_OFFICESTUDIO_FILE_PRESENTATION_PPTX_PACKAGE = AVS_OFFICESTUDIO_FILE_PRESENTATION + 0x000b
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET = 0x0100
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSX = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0001
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLS = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0002
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_ODS = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0003
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_CSV = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0004
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSM = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0005
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLTX = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0006
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLTM = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0007
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSB = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0008
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_ODS_FLAT = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x0009
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_OTS = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x000a
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSX_FLAT = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x000b
|
||||
AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSX_PACKAGE = AVS_OFFICESTUDIO_FILE_SPREADSHEET + 0x000c
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM = 0x0200
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_PDF = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0001
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_SWF = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0002
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_DJVU = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0003
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_XPS = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0004
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_SVG = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0005
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_HTMLR = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0006
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_HTMLRMenu = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0007
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_HTMLRCanvas = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0008
|
||||
AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_PDFA = AVS_OFFICESTUDIO_FILE_CROSSPLATFORM + 0x0009
|
||||
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE = 0x0400
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_JPG = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0001
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_TIFF = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0002
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_TGA = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0003
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_GIF = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0004
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_PNG = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0005
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_EMF = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0006
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_WMF = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0007
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_BMP = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0008
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_CR2 = AVS_OFFICESTUDIO_FILE_IMAGE + 0x0009
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_PCX = AVS_OFFICESTUDIO_FILE_IMAGE + 0x000a
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_RAS = AVS_OFFICESTUDIO_FILE_IMAGE + 0x000b
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_PSD = AVS_OFFICESTUDIO_FILE_IMAGE + 0x000c
|
||||
AVS_OFFICESTUDIO_FILE_IMAGE_ICO = AVS_OFFICESTUDIO_FILE_IMAGE + 0x000d
|
||||
|
||||
EXT_TO_FORMAT = {
|
||||
"docx" : AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCX,
|
||||
"docxf" : AVS_OFFICESTUDIO_FILE_DOCUMENT_DOCXF,
|
||||
"oform" : AVS_OFFICESTUDIO_FILE_DOCUMENT_OFORM,
|
||||
"dotx" : AVS_OFFICESTUDIO_FILE_DOCUMENT_DOTX,
|
||||
"odt" : AVS_OFFICESTUDIO_FILE_DOCUMENT_ODT,
|
||||
"ott" : AVS_OFFICESTUDIO_FILE_DOCUMENT_OTT,
|
||||
"rtf" : AVS_OFFICESTUDIO_FILE_DOCUMENT_RTF,
|
||||
"txt" : AVS_OFFICESTUDIO_FILE_DOCUMENT_TXT,
|
||||
"html" : AVS_OFFICESTUDIO_FILE_DOCUMENT_HTML,
|
||||
"xlsx" : AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLSX,
|
||||
"xltx" : AVS_OFFICESTUDIO_FILE_SPREADSHEET_XLTX,
|
||||
"ods" : AVS_OFFICESTUDIO_FILE_SPREADSHEET_ODS,
|
||||
"ots" : AVS_OFFICESTUDIO_FILE_SPREADSHEET_OTS,
|
||||
"csv" : AVS_OFFICESTUDIO_FILE_SPREADSHEET_CSV,
|
||||
"pptx" : AVS_OFFICESTUDIO_FILE_PRESENTATION_PPTX,
|
||||
"potx" : AVS_OFFICESTUDIO_FILE_PRESENTATION_POTX,
|
||||
"odp" : AVS_OFFICESTUDIO_FILE_PRESENTATION_ODP,
|
||||
"otp" : AVS_OFFICESTUDIO_FILE_PRESENTATION_OTP,
|
||||
"pdf" : AVS_OFFICESTUDIO_FILE_CROSSPLATFORM_PDF,
|
||||
"fb2" : AVS_OFFICESTUDIO_FILE_DOCUMENT_FB2,
|
||||
"epub" : AVS_OFFICESTUDIO_FILE_DOCUMENT_EPUB,
|
||||
"png" : AVS_OFFICESTUDIO_FILE_IMAGE_PNG,
|
||||
"jpg" : AVS_OFFICESTUDIO_FILE_IMAGE_JPG
|
||||
}
|
||||
|
||||
def getFormatByExt(ext):
|
||||
format = 0
|
||||
try:
|
||||
format = EXT_TO_FORMAT[ext]
|
||||
except KeyError as e:
|
||||
raise ValueError('Undefined format: {}'.format(e.args[0]))
|
||||
return format
|
||||
|
||||
def getFormatByFile(file_path):
|
||||
ext = file_path.split(".")[-1]
|
||||
return getFormatByExt(ext)
|
||||
|
||||
def convertFile(directory_x2t, file_input, file_output, convert_params):
|
||||
cur_path = os.getcwd()
|
||||
|
||||
directory_fonts = directory_x2t + "/sdkjs/common"
|
||||
if not base.is_file(directory_fonts + "/AllFonts.js"):
|
||||
base.cmd_in_dir(directory_x2t, "docbuilder", [], True)
|
||||
|
||||
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
|
||||
if base.is_dir(temp_dir):
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
|
||||
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
|
||||
xml_convert += u"<TaskQueueDataConvert>"
|
||||
xml_convert += (u"<m_sFileFrom>" + escape(file_input) + u"</m_sFileFrom>")
|
||||
xml_convert += (u"<m_sFileTo>" + escape(file_output) + u"</m_sFileTo>")
|
||||
xml_convert += u"<m_nFormatTo>" + str(getFormatByFile(file_output)) + u"</m_nFormatTo>"
|
||||
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
|
||||
xml_convert += (u"<m_sFontDir>" + directory_fonts + "</m_sFontDir>")
|
||||
xml_convert += u"<m_sJsonParams>{"spreadsheetLayout":{"fitToWidth":1,"fitToHeight":1}}</m_sJsonParams>"
|
||||
xml_convert += u"<m_nDoctParams>1</m_nDoctParams>"
|
||||
xml_convert += convert_params
|
||||
xml_convert += (u"<m_sTempDir>" + temp_dir + u"</m_sTempDir>")
|
||||
xml_convert += u"</TaskQueueDataConvert>"
|
||||
base.save_as_script(temp_dir + "/to.xml", [xml_convert])
|
||||
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
|
||||
base.delete_dir(temp_dir)
|
||||
|
||||
os.chdir(cur_path)
|
||||
41
tools/common/convert_directory.py
Normal file
41
tools/common/convert_directory.py
Normal file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
import convert_common
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (4 > len(params)):
|
||||
print("use: convert_directory.py path_to_builder_directory path_to_input_files_directory path_to_output_files_directory format_ext [convert_params]")
|
||||
exit(0)
|
||||
|
||||
cur_path = os.getcwd()
|
||||
base.configure_common_apps()
|
||||
|
||||
directory_x2t = params[0].replace("\\", "/")
|
||||
directory_input = params[1].replace("\\", "/")
|
||||
directory_output = params[2].replace("\\", "/")
|
||||
format_ext = params[3]
|
||||
convert_params = ""
|
||||
if (5 == len(params)):
|
||||
convert_params = params[4]
|
||||
|
||||
input_files = []
|
||||
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
|
||||
input_files.append(file.replace("\\", "/"))
|
||||
|
||||
directory_fonts = directory_x2t + "/sdkjs/common"
|
||||
if not base.is_file(directory_fonts + "/AllFonts.js"):
|
||||
base.cmd_in_dir(directory_x2t, "docbuilder", [], True)
|
||||
|
||||
output_len = len(input_files)
|
||||
output_cur = 1
|
||||
for input_file in input_files:
|
||||
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
|
||||
output_file = os.path.join(directory_output, os.path.splitext(os.path.basename(input_file))[0]) + u"." + format_ext
|
||||
convert_common.convertFile(directory_x2t, input_file, output_file, convert_params)
|
||||
output_cur += 1
|
||||
80
tools/common/thumbnails.py
Normal file
80
tools/common/thumbnails.py
Normal file
@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import glob
|
||||
|
||||
params = sys.argv[1:]
|
||||
|
||||
if (5 != len(params)):
|
||||
print("use: thumbnails.py path_to_builder_directory path_to_input_files_directory path_to_output_files_directory width height")
|
||||
exit(0)
|
||||
|
||||
cur_path = os.getcwd()
|
||||
base.configure_common_apps()
|
||||
|
||||
directory_x2t = params[0].replace("\\", "/")
|
||||
directory_input = params[1].replace("\\", "/")
|
||||
directory_output = params[2].replace("\\", "/")
|
||||
th_width = params[3]
|
||||
th_height = params[4]
|
||||
|
||||
output_dir = directory_output + "/[" + str(th_width) + "x" + str(th_height) + "]"
|
||||
if base.is_dir(output_dir):
|
||||
base.delete_dir(output_dir)
|
||||
base.create_dir(output_dir)
|
||||
|
||||
input_files = []
|
||||
for file in glob.glob(os.path.join(u"" + directory_input, u'*')):
|
||||
input_files.append(file.replace("\\", "/"))
|
||||
|
||||
#print(input_files)
|
||||
temp_dir = os.getcwd().replace("\\", "/") + "/temp"
|
||||
if base.is_dir(temp_dir):
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
|
||||
directory_fonts = directory_x2t + "/sdkjs/common"
|
||||
if not base.is_file(directory_fonts + "/AllFonts.js"):
|
||||
base.cmd_in_dir(directory_x2t, "docbuilder", [], True)
|
||||
|
||||
json_params = "{'spreadsheetLayout':{'fitToWidth':1,'fitToHeight':1},"
|
||||
json_params += "'documentLayout':{'drawPlaceHolders':true,'drawFormHighlight':true,'isPrint':true}}"
|
||||
json_params = json_params.replace("'", """)
|
||||
|
||||
output_len = len(input_files)
|
||||
output_cur = 1
|
||||
for input_file in input_files:
|
||||
print("process [" + str(output_cur) + " of " + str(output_len) + "]: " + str(input_file.encode("utf-8")))
|
||||
output_file = os.path.join(output_dir, os.path.splitext(os.path.basename(input_file))[0])
|
||||
xml_convert = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
|
||||
xml_convert += u"<TaskQueueDataConvert>"
|
||||
xml_convert += (u"<m_sFileFrom>" + input_file + u"</m_sFileFrom>")
|
||||
xml_convert += (u"<m_sFileTo>" + output_file + u".zip</m_sFileTo>")
|
||||
xml_convert += u"<m_nFormatTo>1029</m_nFormatTo>"
|
||||
xml_convert += (u"<m_sAllFontsPath>" + directory_fonts + u"/AllFonts.js</m_sAllFontsPath>")
|
||||
xml_convert += (u"<m_sFontDir>" + directory_fonts + u"</m_sFontDir>")
|
||||
xml_convert += (u"<m_sJsonParams>" + json_params + u"</m_sJsonParams>")
|
||||
xml_convert += u"<m_nDoctParams>1</m_nDoctParams>"
|
||||
xml_convert += u"<m_oThumbnail>"
|
||||
xml_convert += u"<first>false</first>"
|
||||
if ((0 != th_width) and (0 != th_height)):
|
||||
xml_convert += u"<aspect>0</aspect>"
|
||||
xml_convert += (u"<width>" + str(th_width) + u"</width>")
|
||||
xml_convert += (u"<height>" + str(th_height) + u"</height>")
|
||||
xml_convert += u"</m_oThumbnail>"
|
||||
xml_convert += u"<m_nDoctParams>1</m_nDoctParams>"
|
||||
xml_convert += (u"<m_sTempDir>" + temp_dir + u"</m_sTempDir>")
|
||||
xml_convert += u"</TaskQueueDataConvert>"
|
||||
base.save_as_script(temp_dir + "/to.xml", [xml_convert])
|
||||
base.cmd_in_dir(directory_x2t, "x2t", [temp_dir + "/to.xml"], True)
|
||||
base.delete_dir(temp_dir)
|
||||
base.create_dir(temp_dir)
|
||||
base.extract_unicode(output_file + u".zip", output_file)
|
||||
base.delete_file(output_dir + "/" + os.path.splitext(os.path.basename(input_file))[0] + ".zip")
|
||||
output_cur += 1
|
||||
|
||||
base.delete_dir(temp_dir)
|
||||
os.chdir(cur_path)
|
||||
@ -5,6 +5,7 @@ sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
import deps
|
||||
|
||||
def get_branch_name(directory):
|
||||
cur_dir = os.getcwd()
|
||||
@ -12,73 +13,14 @@ def get_branch_name(directory):
|
||||
# detect build_tools branch
|
||||
#command = "git branch --show-current"
|
||||
command = "git symbolic-ref --short -q HEAD"
|
||||
popen = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
current_branch = "master"
|
||||
try:
|
||||
stdout, stderr = popen.communicate()
|
||||
popen.wait()
|
||||
current_branch = stdout.strip().decode("utf-8")
|
||||
finally:
|
||||
popen.stdout.close()
|
||||
popen.stderr.close()
|
||||
current_branch = base.run_command(command)['stdout']
|
||||
os.chdir(cur_dir)
|
||||
return current_branch
|
||||
|
||||
def install_deps():
|
||||
# dependencies
|
||||
packages = ["apt-transport-https",
|
||||
"autoconf2.13",
|
||||
"build-essential",
|
||||
"ca-certificates",
|
||||
"cmake",
|
||||
"curl",
|
||||
"git",
|
||||
"glib-2.0-dev",
|
||||
"libglu1-mesa-dev",
|
||||
"libgtk-3-dev",
|
||||
"libpulse-dev",
|
||||
"libtool",
|
||||
"p7zip-full",
|
||||
"subversion",
|
||||
"gzip",
|
||||
"libasound2-dev",
|
||||
"libatspi2.0-dev",
|
||||
"libcups2-dev",
|
||||
"libdbus-1-dev",
|
||||
"libicu-dev",
|
||||
"libglu1-mesa-dev",
|
||||
"libgstreamer1.0-dev",
|
||||
"libgstreamer-plugins-base1.0-dev",
|
||||
"libx11-xcb-dev",
|
||||
"libxcb*",
|
||||
"libxi-dev",
|
||||
"libxrender-dev",
|
||||
"libxss1"]
|
||||
|
||||
base.cmd("sudo", ["apt-get", "install", "-y"] + packages)
|
||||
|
||||
# nodejs
|
||||
if not base.is_file("./node_js_setup_10.x"):
|
||||
base.download("https://deb.nodesource.com/setup_10.x", "./node_js_setup_10.x")
|
||||
base.cmd("sudo", ["bash", "./node_js_setup_10.x"])
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "npm"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "grunt-cli"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "pkg"])
|
||||
|
||||
# java
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "software-properties-common"])
|
||||
base.cmd("sudo", ["add-apt-repository", "-y", "ppa:openjdk-r/ppa"])
|
||||
base.cmd("sudo", ["apt-get", "update"])
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-8-jdk"])
|
||||
base.cmd("sudo", ["update-alternatives", "--config", "java"])
|
||||
base.cmd("sudo", ["update-alternatives", "--config", "javac"])
|
||||
return
|
||||
|
||||
def install_qt():
|
||||
# qt
|
||||
if not base.is_file("./qt_source_5.9.9.tar.xz"):
|
||||
base.download("http://download.qt.io/official_releases/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz", "./qt_source_5.9.9.tar.xz")
|
||||
base.download("https://download.qt.io/archive/qt/5.9/5.9.9/single/qt-everywhere-opensource-src-5.9.9.tar.xz", "./qt_source_5.9.9.tar.xz")
|
||||
|
||||
if not base.is_dir("./qt-everywhere-opensource-src-5.9.9"):
|
||||
base.cmd("tar", ["-xf", "./qt_source_5.9.9.tar.xz"])
|
||||
@ -116,7 +58,7 @@ def install_qt():
|
||||
|
||||
if not base.is_file("./node_js_setup_10.x"):
|
||||
print("install dependencies...")
|
||||
install_deps()
|
||||
deps.install_deps()
|
||||
|
||||
if not base.is_dir("./qt_build"):
|
||||
print("install qt...")
|
||||
@ -126,6 +68,7 @@ branch = get_branch_name("../..")
|
||||
|
||||
array_args = sys.argv[1:]
|
||||
array_modules = []
|
||||
params = []
|
||||
|
||||
config = {}
|
||||
for arg in array_args:
|
||||
@ -133,6 +76,8 @@ for arg in array_args:
|
||||
indexEq = arg.find("=")
|
||||
if (-1 != indexEq):
|
||||
config[arg[2:indexEq]] = arg[indexEq + 1:]
|
||||
params.append(arg[:indexEq])
|
||||
params.append(arg[indexEq + 1:])
|
||||
else:
|
||||
array_modules.append(arg)
|
||||
|
||||
@ -154,7 +99,7 @@ print("---------------------------------------------")
|
||||
build_tools_params = ["--branch", branch,
|
||||
"--module", modules,
|
||||
"--update", "1",
|
||||
"--qt-dir", os.getcwd() + "/qt_build/Qt-5.9.9"]
|
||||
"--qt-dir", os.getcwd() + "/qt_build/Qt-5.9.9"] + params
|
||||
|
||||
base.cmd_in_dir("../..", "./configure.py", build_tools_params)
|
||||
base.cmd_in_dir("../..", "./make.py")
|
||||
|
||||
9
tools/linux/check_system/check.sh
Executable file
9
tools/linux/check_system/check.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
DIR_X2T=$SCRIPTPATH/..
|
||||
$("$DIR_X2T/x2t" &>/dev/null)
|
||||
status=$?
|
||||
|
||||
[ $status -ne 88 ] && $(cp "$SCRIPTPATH/libstdc++.so.6" "$DIR_X2T/libstdc++.so.6")
|
||||
|
||||
BIN
tools/linux/check_system/libstdc++.so.6
Normal file
BIN
tools/linux/check_system/libstdc++.so.6
Normal file
Binary file not shown.
91
tools/linux/deps.py
Executable file
91
tools/linux/deps.py
Executable file
@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
sys.path.append('../../scripts')
|
||||
import base
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
def install_deps():
|
||||
if base.is_file("./packages_complete"):
|
||||
return
|
||||
|
||||
# dependencies
|
||||
packages = ["apt-transport-https",
|
||||
"autoconf2.13",
|
||||
"build-essential",
|
||||
"ca-certificates",
|
||||
"cmake",
|
||||
"curl",
|
||||
"git",
|
||||
"glib-2.0-dev",
|
||||
"libglu1-mesa-dev",
|
||||
"libgtk-3-dev",
|
||||
"libpulse-dev",
|
||||
"libtool",
|
||||
"p7zip-full",
|
||||
"subversion",
|
||||
"gzip",
|
||||
"libasound2-dev",
|
||||
"libatspi2.0-dev",
|
||||
"libcups2-dev",
|
||||
"libdbus-1-dev",
|
||||
"libicu-dev",
|
||||
"libglu1-mesa-dev",
|
||||
"libgstreamer1.0-dev",
|
||||
"libgstreamer-plugins-base1.0-dev",
|
||||
"libx11-xcb-dev",
|
||||
"libxcb*",
|
||||
"libxi-dev",
|
||||
"libxrender-dev",
|
||||
"libxss1",
|
||||
"libncurses5"]
|
||||
|
||||
base.cmd("sudo", ["apt-get", "install", "-y"] + packages)
|
||||
|
||||
# nodejs
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
|
||||
nodejs_cur = 0
|
||||
try:
|
||||
nodejs_version = base.run_command('node -v')['stdout']
|
||||
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
|
||||
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
|
||||
nodejs_cur = nodejs_cur_version_major * 1000 + nodejs_cur_version_minor
|
||||
print("Installed Node.js version: " + str(nodejs_cur_version_major) + "." + str(nodejs_cur_version_minor))
|
||||
except:
|
||||
nodejs_cur = 1
|
||||
if (nodejs_cur < 10020):
|
||||
print("Node.js version cannot be less 10.20")
|
||||
print("Reinstall")
|
||||
if (base.is_dir("./node_js_setup_10.x")):
|
||||
base.delete_dir("./node_js_setup_10.x")
|
||||
base.cmd("sudo", ["apt-get", "remove", "--purge", "-y", "nodejs"])
|
||||
base.download("https://deb.nodesource.com/setup_10.x", "./node_js_setup_10.x")
|
||||
base.cmd('curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -')
|
||||
base.cmd("sudo", ["bash", "./node_js_setup_10.x"])
|
||||
base.cmd("sudo", ["apt-get", "install", "-y", "nodejs"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "npm@6"])
|
||||
else:
|
||||
print("OK")
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "npm", "yarn"], True)
|
||||
base.cmd("sudo", ["npm", "install", "-g", "grunt-cli"])
|
||||
base.cmd("sudo", ["npm", "install", "-g", "pkg"])
|
||||
|
||||
# java
|
||||
java_error = base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-11-jdk"], True)
|
||||
if (0 != java_error):
|
||||
java_error = base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-8-jdk"], True)
|
||||
if (0 != java_error):
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "software-properties-common"])
|
||||
base.cmd("sudo", ["add-apt-repository", "-y", "ppa:openjdk-r/ppa"])
|
||||
base.cmd("sudo", ["apt-get", "update"])
|
||||
base.cmd("sudo", ["apt-get", "-y", "install", "openjdk-8-jdk"])
|
||||
base.cmd("sudo", ["update-alternatives", "--config", "java"])
|
||||
base.cmd("sudo", ["update-alternatives", "--config", "javac"])
|
||||
|
||||
base.writeFile("./packages_complete", "complete")
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
install_deps()
|
||||
|
||||
BIN
tools/win/vswhere/vswhere.exe
Normal file
BIN
tools/win/vswhere/vswhere.exe
Normal file
Binary file not shown.
Reference in New Issue
Block a user