Compare commits

..

5 Commits

163 changed files with 2205 additions and 12549 deletions

View File

@ -3,7 +3,7 @@ name: Bug Report
about: Report an issue with build_tools you've discovered.
---
# Describe your problem:
**Describe your problem**:
*Be clear in your description of the problem.
Open an issue with a descriptive title and a summary in complete sentences.*

View File

@ -1,24 +1,15 @@
name: Markdown Lint
on:
workflow_dispatch:
push:
branches:
- '**'
paths:
- '*.md'
- 'develop/*.md'
- 'scripts/**.md'
- '.markdownlint.jsonc'
name: check
on: [push]
jobs:
markdownlint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: DavidAnson/markdownlint-cli2-action@v16
- uses: actions/checkout@v2
- name: Use Node.js 12
uses: actions/setup-node@v1
with:
globs: |
*.md
develop/*.md
scripts/**.md
node-version: 12
- name: Check *.md files by `markdownlint`
run: |
npm install -g markdownlint-cli
markdownlint *.md develop/*.md

View File

@ -1,88 +0,0 @@
name: Git Operations
on:
workflow_dispatch:
inputs:
operation:
description: 'Operation to perform'
required: true
type: choice
options:
- create
- remove
default: 'create'
branch_name:
description: 'Branch name to create or remove'
required: true
type: string
base_branch:
description: 'Base branch to work from (for create operation)'
required: false
type: string
default: 'develop'
branding:
description: 'Branding name'
required: false
type: string
default: 'onlyoffice'
branding_url:
description: 'Branding repository URL (relative to git host)'
required: false
type: string
default: 'ONLYOFFICE/onlyoffice.git'
jobs:
git-operations:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
path: ONLYOFFICE/build_tools
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
# Install any Python dependencies if requirements.txt exists
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Configure Git
run: |
git config --global user.name "GitHub Actions Bot"
git config --global user.email "actions@github.com"
- name: Run Git Operations
run: |
cd ONLYOFFICE/build_tools/scripts/develop
python git_operations.py ${{ inputs.operation }} "${{ inputs.branch_name }}" \
--base-branch="${{ inputs.base_branch }}" \
--branding="${{ inputs.branding }}" \
--branding-url="${{ inputs.branding_url }}" \
--modules="${{ inputs.modules }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Operation Summary
run: |
echo "## Git Operations Summary" >> $GITHUB_STEP_SUMMARY
echo "- **Operation**: ${{ inputs.operation }}" >> $GITHUB_STEP_SUMMARY
echo "- **Branch Name**: ${{ inputs.branch_name }}" >> $GITHUB_STEP_SUMMARY
echo "- **Base Branch**: ${{ inputs.base_branch }}" >> $GITHUB_STEP_SUMMARY
echo "- **Branding**: ${{ inputs.branding }}" >> $GITHUB_STEP_SUMMARY
echo "- **Branding URL**: ${{ inputs.branding_url }}" >> $GITHUB_STEP_SUMMARY
echo "- **Modules**: ${{ inputs.modules }}" >> $GITHUB_STEP_SUMMARY
if [ "${{ inputs.operation }}" = "remove" ] && [ "${{ inputs.force_remove }}" = "true" ]; then
echo "- **Force Remove**: Yes" >> $GITHUB_STEP_SUMMARY
fi

View File

@ -1,30 +1,24 @@
name: Update hard-coded version
on: workflow_dispatch
on: create
jobs:
update-version:
if: >-
${{ contains(github.ref, 'refs/heads/hotfix/v') ||
contains(github.ref, 'refs/heads/release/v') }}
${{ startsWith(github.event.ref, 'hotfix/') ||
startsWith(github.event.ref, 'release/') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
token: ${{ secrets.PUSH_TOKEN }}
- name: Checkout
uses: actions/checkout@v2.4.0
- name: Set version
run: >-
echo "${{ github.ref_name }}" |
echo "${{ github.event.ref }}" |
awk '{gsub(/.+\/v/,"version=");print;}' >> $GITHUB_ENV
- name: Save version
run: echo "${{ env.version }}" > version
- name: Commit & push changes
uses: EndBug/add-and-commit@v9
- name: Commit changes
uses: EndBug/add-and-commit@v7
with:
author_name: github-actions[bot]
author_email: github-actions[bot]@users.noreply.github.com
message: Update hard-coded version to ${{ env.version }}
message: Update hard-coded version to v${{ env.version }}
add: version

8
.gitignore vendored
View File

@ -7,11 +7,3 @@ config
*.*~
**~
*.DS_Store
.idea
scripts/license_checker/reports
tests/puppeteer/node_modules
tests/puppeteer/work_directory
tests/puppeteer/package.json
tests/puppeteer/package-lock.json
scripts/sdkjs_common/jsdoc/node_modules
scripts/sdkjs_common/jsdoc/package-lock.json

View File

@ -1,5 +0,0 @@
{
"line-length": {
"code_block_line_length": 300
}
}

View File

@ -1,33 +1,15 @@
FROM ubuntu:20.04
FROM ubuntu:14.04
ENV TZ=Etc/UTC
ENV DEBIAN_FRONTEND=noninteractive
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN echo 'keyboard-configuration keyboard-configuration/layoutcode string us' | debconf-set-selections && \
echo 'keyboard-configuration keyboard-configuration/modelcode string pc105' | debconf-set-selections
RUN apt-get -y update && \
apt-get -y install tar \
sudo \
wget
RUN wget https://github.com/Kitware/CMake/releases/download/v3.30.0/cmake-3.30.0-linux-x86_64.tar.gz && \
tar -xzf cmake-3.30.0-linux-x86_64.tar.gz -C /opt && \
ln -s /opt/cmake-3.30.0-linux-x86_64/bin/cmake /usr/local/bin/cmake && \
ln -s /opt/cmake-3.30.0-linux-x86_64/bin/ctest /usr/local/bin/ctest && \
rm cmake-3.30.0-linux-x86_64.tar.gz
apt-get -y install python \
python3 \
sudo
RUN rm /usr/bin/python && ln -s /usr/bin/python2 /usr/bin/python
ADD . /build_tools
WORKDIR /build_tools
RUN mkdir -p /opt/python3 && \
wget -P /opt/python3/ https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/python/python3.tar.gz && \
tar -xzf /opt/python3/python3.tar.gz -C /opt/python3 --strip-components=1
ENV PATH="/opt/python3/bin:${PATH}"
RUN ln -s /opt/python3/bin/python3.10 /usr/bin/python
CMD ["sh", "-c", "cd tools/linux && python3 ./automate.py"]
CMD cd tools/linux && \
python3 ./automate.py

View File

@ -8,12 +8,12 @@ necessary for the compilation process, all the dependencies required for the
correct work, as well as to get the latest version of
**ONLYOFFICE products** source code and build all their components.
**Important!** We can only guarantee the correct work of the products built
from the `master` branch.
**Important!** We can only guarantee the correct work of the products built from
the `master` branch.
## How to use - Linux
**Note**: The solution has been tested on **Ubuntu 16.04**.
**Note**: The solution has been tested on **Ubuntu 14.04**.
### Installing dependencies
@ -196,8 +196,9 @@ LD_LIBRARY_PATH=./ ./DesktopEditors
**Note**: The created database must have **onlyoffice** both for user and password.
```bash
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH PASSWORD 'onlyoffice';"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice OWNER onlyoffice;"
sudo -i -u postgres psql -c "CREATE DATABASE onlyoffice;"
sudo -i -u postgres psql -c "CREATE USER onlyoffice WITH password 'onlyoffice';"
sudo -i -u postgres psql -c "GRANT ALL privileges ON DATABASE onlyoffice TO onlyoffice;"
```
3. Configure the database:

118
build.pro Normal file
View File

@ -0,0 +1,118 @@
TEMPLATE = subdirs
ROOT_DIR=$$PWD/..
DEPLOY_DIR=$$PWD/deploy
CORE_ROOT_DIR=$$ROOT_DIR/core
include($$PWD/common.pri)
CONFIG += ordered
core_windows {
desktop:CONFIG += core_and_multimedia
}
core_linux {
desktop:CONFIG += core_and_multimedia
}
core_mac {
CONFIG += no_desktop_apps
}
core_ios {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
core_android {
CONFIG += no_use_common_binary
CONFIG += no_desktop_apps
CONFIG += no_tests
}
addSubProject(cryptopp, $$CORE_ROOT_DIR/Common/3dParty/cryptopp/project/cryptopp.pro)
addSubProject(kernel, $$CORE_ROOT_DIR/Common/kernel.pro,\
cryptopp)
addSubProject(unicodeconverter, $$CORE_ROOT_DIR/UnicodeConverter/UnicodeConverter.pro,\
kernel)
addSubProject(network, $$CORE_ROOT_DIR/Common/Network/network.pro,\
kernel)
addSubProject(graphics, $$CORE_ROOT_DIR/DesktopEditor/graphics/pro/graphics.pro,\
kernel unicodeconverter)
addSubProject(pdfwriter, $$CORE_ROOT_DIR/PdfWriter/PdfWriter.pro,\
kernel unicodeconverter graphics)
addSubProject(djvufile, $$CORE_ROOT_DIR/DjVuFile/DjVuFile.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(xpsfile, $$CORE_ROOT_DIR/XpsFile/XpsFile.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(htmlrenderer, $$CORE_ROOT_DIR/HtmlRenderer/htmlrenderer.pro,\
kernel unicodeconverter graphics pdfwriter)
addSubProject(pdfreader, $$CORE_ROOT_DIR/PdfReader/PdfReader.pro,\
kernel unicodeconverter graphics pdfwriter htmlrenderer)
addSubProject(docxrenderer, $$CORE_ROOT_DIR/DocxRenderer/DocxRenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(htmlfile2, $$CORE_ROOT_DIR/HtmlFile2/HtmlFile2.pro,\
kernel unicodeconverter graphics network)
addSubProject(doctrenderer, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/doctrenderer.pro,\
kernel unicodeconverter graphics)
addSubProject(fb2file, $$CORE_ROOT_DIR/Fb2File/Fb2File.pro,\
kernel unicodeconverter graphics)
addSubProject(epubfile, $$CORE_ROOT_DIR/EpubFile/CEpubFile.pro,\
kernel unicodeconverter graphics htmlfile2)
!no_x2t {
addSubProject(docxformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/DocxFormatLib.pro)
addSubProject(pptxformat, $$CORE_ROOT_DIR/ASCOfficePPTXFile/PPTXLib/Linux/PPTXFormatLib/PPTXFormatLib.pro)
addSubProject(docxfile, $$CORE_ROOT_DIR/ASCOfficeDocxFile2/Linux/ASCOfficeDocxFile2Lib.pro)
addSubProject(txtxmlformat, $$CORE_ROOT_DIR/ASCOfficeTxtFile/TxtXmlFormatLib/Linux/TxtXmlFormatLib.pro)
addSubProject(rtfformat, $$CORE_ROOT_DIR/ASCOfficeRtfFile/RtfFormatLib/Linux/RtfFormatLib.pro)
addSubProject(pptformat, $$CORE_ROOT_DIR/ASCOfficePPTFile/PPTFormatLib/Linux/PPTFormatLib.pro)
addSubProject(docformat, $$CORE_ROOT_DIR/ASCOfficeDocFile/DocFormatLib/Linux/DocFormatLib.pro)
addSubProject(odffilereader, $$CORE_ROOT_DIR/ASCOfficeOdfFile/linux/OdfFileReaderLib.pro)
addSubProject(odffilewriter, $$CORE_ROOT_DIR/ASCOfficeOdfFileW/linux/OdfFileWriterLib.pro)
addSubProject(xlsformat, $$CORE_ROOT_DIR/ASCOfficeXlsFile2/source/linux/XlsFormatLib.pro)
addSubProject(xlsbformat, $$CORE_ROOT_DIR/Common/DocxFormat/DocxFormatLib/XlsbFormatLib.pro)
addSubProject(x2t, $$CORE_ROOT_DIR/X2tConverter/build/Qt/X2tConverter.pro,\
docxformat pptxformat docxfile txtxmlformat rtfformat pptformat docformat odffilereader odffilewriter xlsformat xlsbformat fb2file epubfile docxrenderer)
}
!no_use_common_binary {
addSubProject(allfontsgen, $$CORE_ROOT_DIR/DesktopEditor/AllFontsGen/AllFontsGen.pro,\
kernel unicodeconverter graphics)
addSubProject(allthemesgen, $$CORE_ROOT_DIR/DesktopEditor/allthemesgen/allthemesgen.pro,\
kernel unicodeconverter graphics)
addSubProject(docbuilder, $$CORE_ROOT_DIR/DesktopEditor/doctrenderer/app_builder/docbuilder.pro,\
kernel unicodeconverter graphics doctrenderer)
}
!no_tests {
addSubProject(standardtester, $$CORE_ROOT_DIR/Test/Applications/StandardTester/standardtester.pro)
}
core_and_multimedia {
addSubProject(videoplayer, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro,\
kernel unicodeconverter graphics)
}
desktop {
message(desktop)
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
addSubProject(ooxmlsignature, $$CORE_ROOT_DIR/DesktopEditor/xmlsec/src/ooxmlsignature.pro,\
kernel unicodeconverter graphics)
addSubProject(documentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore.pro,\
kernel unicodeconverter graphics hunspell ooxmlsignature htmlrenderer pdfwriter pdfreader djvufile xpsfile)
addSubProject(documentscore_helper, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/ascdocumentscore_helper.pro,\
documentscore)
!core_mac {
addSubProject(qtdocumentscore, $$ROOT_DIR/desktop-sdk/ChromiumBasedEditors/lib/qt_wrapper/qtascdocumentscore.pro,\
documentscore)
}
!no_desktop_apps {
core_windows:addSubProject(projicons, $$ROOT_DIR/desktop-apps/win-linux/extras/projicons/ProjIcons.pro,\
documentscore videoplayer)
addSubProject(desktopapp, $$ROOT_DIR/desktop-apps/win-linux/ASCDocumentEditor.pro,\
documentscore videoplayer)
}
}
mobile {
message(mobile)
!desktop {
addSubProject(hunspell, $$CORE_ROOT_DIR/Common/3dParty/hunspell/qt/hunspell.pro)
}
}

View File

@ -14,20 +14,19 @@ parser.add_option("--clean", action="store", type="string", dest="clean", defaul
parser.add_option("--module", action="store", type="string", dest="module", default="builder", help="defines what modules to build. You can specify several of them, e.g. --module 'core desktop builder server mobile'")
parser.add_option("--develop", action="store", type="string", dest="develop", default="0", help="defines develop mode")
parser.add_option("--beta", action="store", type="string", dest="beta", default="0", help="defines beta mode")
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'win_arm64', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]")
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines the destination platform for your build ['win_64', 'win_32', 'win_64_xp', 'win_32_xp', 'linux_64', 'linux_32', 'mac_64', 'ios', 'android_arm64_v8a', 'android_armv7', 'android_x86', 'android_x86_64'; combinations: 'native': your current system (windows/linux/mac only); 'all': all available systems; 'windows': win_64 win_32 win_64_xp win_32_xp; 'linux': linux_64 linux_32; 'mac': mac_64; 'android': android_arm64_v8a android_armv7 android_x86 android_x86_64]")
parser.add_option("--config", action="store", type="string", dest="config", default="", help="provides ability to specify additional parameters for qmake")
parser.add_option("--qt-dir", action="store", type="string", dest="qt-dir", default="", help="defines qmake directory path. qmake can be found in qt-dir/compiler/bin directory")
parser.add_option("--qt-dir-xp", action="store", type="string", dest="qt-dir-xp", default="", help="defines qmake directory path for Windows XP. qmake can be found in 'qt-dir/compiler/bin directory")
parser.add_option("--external-folder", action="store", type="string", dest="external-folder", default="", help="defines a directory with external folder")
parser.add_option("--sql-type", action="store", type="string", dest="sql-type", default="postgres", help="defines the sql type wich will be used")
parser.add_option("--db-port", action="store", type="string", dest="db-port", default="5432", help="defines the sql db-port wich will be used")
parser.add_option("--db-name", action="store", type="string", dest="db-name", default="onlyoffice", help="defines the sql db-name wich will be used")
parser.add_option("--db-user", action="store", type="string", dest="db-user", default="onlyoffice", help="defines the sql db-user wich will be used")
parser.add_option("--db-pass", action="store", type="string", dest="db-pass", default="onlyoffice", help="defines the sql db-pass wich will be used")
parser.add_option("--compiler", action="store", type="string", dest="compiler", default="", help="defines compiler name. It is not recommended to use it as it's defined automatically (msvc2015, msvc2015_64, gcc, gcc_64, clang, clang_64, etc)")
parser.add_option("--no-apps", action="store", type="string", dest="no-apps", default="0", help="disables building desktop apps that use qt")
parser.add_option("--themesparams", action="store", type="string", dest="themesparams", default="", help="provides settings for generating presentation themes thumbnails")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="auto", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--git-protocol", action="store", type="string", dest="git-protocol", default="https", help="can be used only if update is set to true - 'https', 'ssh'")
parser.add_option("--branding", action="store", type="string", dest="branding", default="", help="provides branding path")
parser.add_option("--branding-name", action="store", type="string", dest="branding-name", default="", help="provides branding name")
parser.add_option("--branding-url", action="store", type="string", dest="branding-url", default="", help="provides branding url")
@ -41,9 +40,6 @@ parser.add_option("--features", action="store", type="string", dest="features",
parser.add_option("--vs-version", action="store", type="string", dest="vs-version", default="2015", help="version of visual studio")
parser.add_option("--vs-path", action="store", type="string", dest="vs-path", default="", help="path to vcvarsall")
parser.add_option("--siteUrl", action="store", type="string", dest="siteUrl", default="127.0.0.1", help="site url")
parser.add_option("--multiprocess", action="store", type="string", dest="multiprocess", default="1", help="provides ability to specify single process for make")
parser.add_option("--sysroot", action="store", type="string", dest="sysroot", default="0", help="provides ability to use sysroot (ubuntu 16.04) to build c++ code. If value is \"1\", then the sysroot from tools/linux/sysroot will be used, and if it is not there, it will download it and unpack it. You can also set value as the path to the your own sysroot (rarely used). Only for linux")
parser.add_option("--qemu-win-arm64-dir", action="store", type="string", dest="qemu-win-arm64-dir", default="", help="dir to qemu virtual machine for win_arm64 cross build. It should contains start.bat. More info in tools/win/qemu.")
(options, args) = parser.parse_args(arguments)
configOptions = vars(options)

View File

@ -1,3 +1,3 @@
sdkjs-plugin="ai, photoeditor, ocr, translator, thesaurus, youtube, highlightcode"
sdkjs-plugin-server="speech, zotero, mendeley, speechrecognition"
sdkjs-plugin="photoeditor, macros, ocr, translator, thesaurus, youtube, highlightcode"
sdkjs-plugin-server="speech, zotero, mendeley"
sdkjs-addons="sdkjs-forms"

View File

@ -1,19 +1,11 @@
FROM onlyoffice/documentserver:latest
RUN apt-get update -y && \
apt-get install git -y \
python3 -y \
python -y \
openjdk-11-jdk -y \
bzip2 -y \
npm -y && \
npm install -g grunt grunt-cli -y && \
ln -s /usr/bin/python3 /usr/bin/python && \
ln -s /usr/bin/pip3 /usr/bin/pip && \
npm install -g grunt-cli -y && \
git clone --depth 1 https://github.com/ONLYOFFICE/build_tools.git var/www/onlyoffice/documentserver/build_tools && \
sed -i '/documentserver-static-gzip.sh ${ONLYOFFICE_DATA_CONTAINER}/d' /app/ds/run-document-server.sh && \
#Set Up Debug Logging
sed -i 's/WARN/ALL/g' /etc/onlyoffice/documentserver/log4js/production.json && \
#Start test example
if [ -s /etc/supervisor/conf.d/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /etc/supervisor/conf.d/ds-example.conf; fi && \
if [ -s /app/ds/setup/config/supervisor/ds/ds-example.conf ] ; then sed -i 's,autostart=false,autostart=true,' /app/ds/setup/config/supervisor/ds/ds-example.conf; fi && \
rm -rf /var/lib/apt/lists/*
ENTRYPOINT python3 /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver $@ && /bin/sh -c /app/ds/run-document-server.sh
ENTRYPOINT python /var/www/onlyoffice/documentserver/build_tools/develop/run_build_js.py /var/www/onlyoffice/documentserver && /bin/sh -c /app/ds/run-document-server.sh

View File

@ -1,223 +1,46 @@
# Docker
This directory containing instruction for developers,
who want to change something in sdkjs or web-apps or server module,
who want to change something in sdkjs or web-apps module,
but don't want to compile pretty compilcated core product to make those changes.
## System requirements
## Installing ONLYOFFICE Docs
**Note**: ARM-based architectures are currently **NOT** supported;
attempting to run the images on ARM devices may result in startup failures
or other runtime issues.
## How to use - Linux
### Windows
**Note**: You need the latest Docker version installed.
You need the latest
[Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/)
installed.
**Note**: Docker Desktop does not start automatically after installation.
You should manually start the **Docker Desktop** application.
**Note**: If you have problems running Docker Desktop with the
"Use WSL 2 instead of Hyper-V" installation option,
try reinstalling it without this option.
### Linux or macOS
You need the latest
[Docker](https://docs.docker.com/engine/install/)
version installed.
## Create develop Docker Images
To create a image with the ability to include external non-minified sdkjs code,
use the following commands:
### Clone development environment to work dir
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
```
### Modify Docker Images
You might need to pull **onlyoffice/documentserver** image:
**Note**: Do not prefix docker command with sudo.
[This](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user)
instruction show how to use docker without sudo.
```bash
cd build_tools/develop
docker pull onlyoffice/documentserver
docker build --no-cache -t documentserver-develop .
```
### Create develop image
To create a image with the ability to include external non-minified sdkjs code,
use the following command from the dockerfile directory:
```bash
docker build -t documentserver-develop .
```
**Note**: The dot at the end is required.
**Note**: Sometimes script may fail due to network errors. Just restart it.
### Connecting external folders
## Clone development modules
Clone development modules to the work dir
* `sdkjs` repo is located [here](https://github.com/ONLYOFFICE/sdkjs/)
* `web-apps` repo is located [here](https://github.com/ONLYOFFICE/web-apps/)
* `server` repo is located [here](https://github.com/ONLYOFFICE/server/)
```bash
git clone https://github.com/ONLYOFFICE/sdkjs.git
git clone https://github.com/ONLYOFFICE/web-apps.git
git clone https://github.com/ONLYOFFICE/server.git
```
## Start server with external folders
To mount external folders to the container,
To connect external folders to the container,
you need to pass the "-v" parameter
along with the relative paths to the required folders.
The folders `sdkjs` and `web-apps` are required for proper development workflow.
The folders `server` is optional
along with the relative paths to the required folders.
**Note**: Run command with the current working directory
containing `sdkjs`, `web-apps`...
**Note**: ONLYOFFICE server uses port 80.
Look for another application using port 80 and stop it
**Note**: Server start with `sdkjs` and `web-apps` takes 15 minutes
and takes 20 minutes with `server`
### docker run on Windows (PowerShell)
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
run with `sdkjs` and `web-apps`
**For example, let's connect the external folders "sdkjs" and "web-apps" to the container:**
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $pwd/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
### docker run on Linux or macOS
run with `sdkjs` and `web-apps`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```
or run with `sdkjs`, `web-apps` and `server`
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true -v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps -v $(pwd)/server:/var/www/onlyoffice/documentserver/server documentserver-develop
```
## Open editor
After the server starts successfully, you will see Docker log messages like this
```bash
[Date] [WARN] [localhost] [docId] [userId] nodeJS
```
To try the document editor, open a browser tab and type
[http://localhost/example](http://localhost/example) into the URL bar.
**Note**: Disable **ad blockers** for localhost page.
It may block some scripts (like Analytics.js)
## Modify sources
### To change something in `sdkjs` do the following steps
1)Edit source file. Let's insert an image url into each open document.
Following command inserts (in case of problems, you can replace URL)
`this.AddImageUrl(['http://localhost/example/images/logo.png']);`
after event
`this.sendEvent('asc_onDocumentContentReady');`
in file
`sdkjs/common/apiBase.js`
### change sdkjs on Windows (PowerShell)
```bash
(Get-Content sdkjs/common/apiBase.js) -replace "this\.sendEvent\('asc_onDocumentContentReady'\);", "this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);" | Set-Content sdkjs/common/apiBase.js
```
### change sdkjs on Linux or macOS
```bash
sed -i "s,this.sendEvent('asc_onDocumentContentReady');,this.sendEvent('asc_onDocumentContentReady');this.AddImageUrl(['http://localhost/example/images/logo.png']);," sdkjs/common/apiBase.js
```
2)Delete browser cache or hard reload the page `Ctrl + Shift + R`
3)Open new file in browser
### To change something in `server` do the following steps
1)Edit source file. Let's send `"Hello World!"`
chart message every time a document is opened.
Following command inserts
`yield* onMessage(ctx, conn, {"message": "Hello World!"});`
in function
`sendAuthInfo`
in file
`server/DocService/sources/DocsCoServer.js`
### change server on Windows (PowerShell)
```bash
(Get-Content server/DocService/sources/DocsCoServer.js) -replace 'opt_hasForgotten, opt_openedAt\) \{', 'opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});' | Set-Content server/DocService/sources/DocsCoServer.js
```
### change server on Linux or macOS
```bash
sed -i 's#opt_hasForgotten, opt_openedAt) {#opt_hasForgotten, opt_openedAt) {yield* onMessage(ctx, conn, {"message": "Hello World!"});#' server/DocService/sources/DocsCoServer.js
```
2)Restart document server process
**Note**: Look for ``CONTAINER_ID`` in the result of ``docker ps``.
```bash
docker exec -it CONTAINER_ID supervisorctl restart all
```
3)Open new file in browser
## Start server with additional functionality(addons)
To get additional functionality and branding you need to connect a branding folder,
additional addon folders and pass command line arguments
For example run with `onlyoffice` branding and
addons:`sdkjs-forms`, `sdkjs-ooxml`, `web-apps-mobile`
### docker run on Windows (PowerShell) with branding
**Note**: Run PowerShell as administrator to fix EACCES error when installing
node_modules
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true `
-v $pwd/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $pwd/web-apps:/var/www/onlyoffice/documentserver/web-apps `
-v $pwd/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $pwd/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $pwd/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $pwd/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile `
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
```
### docker run on Linux or macOS with branding
```bash
docker run -i -t -p 80:80 --restart=always -e ALLOW_PRIVATE_IP_ADDRESS=true \
-v $(pwd)/sdkjs:/var/www/onlyoffice/documentserver/sdkjs -v $(pwd)/web-apps:/var/www/onlyoffice/documentserver/web-apps \
-v $(pwd)/onlyoffice:/var/www/onlyoffice/documentserver/onlyoffice -v $(pwd)/sdkjs-ooxml:/var/www/onlyoffice/documentserver/sdkjs-ooxml -v $(pwd)/sdkjs-forms:/var/www/onlyoffice/documentserver/sdkjs-forms -v $(pwd)/web-apps-mobile:/var/www/onlyoffice/documentserver/web-apps-mobile \
documentserver-develop args --branding onlyoffice --branding-url 'https://github.com/ONLYOFFICE/onlyoffice.git' --siteUrl localhost
docker run -i -t -d -p 80:80 --restart=always \
-v /host-dir/sdkjs:/var/www/onlyoffice/documentserver/sdkjs \
-v /host-dir/web-apps:/var/www/onlyoffice/documentserver/web-apps documentserver-develop
```

View File

@ -1,57 +1,12 @@
#!/usr/bin/env python3
#!/usr/bin/env python
import sys
sys.path.append(sys.argv[1] + '/build_tools/scripts')
sys.path.append(sys.argv[1] + '/build_tools/scripts/develop')
import build_js
import run_server
import config
import base
git_dir = sys.argv[1]
base.print_info('argv :'+' '.join(sys.argv))
base.cmd_in_dir(git_dir + '/build_tools/', 'python3', ['configure.py', '--develop', '1'] + sys.argv[2:])
base.cmd_in_dir(sys.argv[1] + '/build_tools/', 'python', ['configure.py'])
config.parse()
config.parse_defaults()
if base.is_exist(git_dir + "/server/FileConverter/bin/fonts.log"):
base.print_info('remove font cache to regenerate fonts in external sdkjs volume')
base.delete_file(git_dir + "/server/FileConverter/bin/fonts.log")
# external server volume
if base.is_exist(sys.argv[1] + '/server/DocService/package.json'):
base.print_info('replace supervisor cfg to run docservice and converter from source')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=.*", "command=node " + git_dir + "/server/DocService/sources/server.js")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=.*", "command=node " + git_dir + "/server/FileConverter/sources/convertermaster.js")
base.print_info('run_server.run_docker_server')
run_server.run_docker_server()
else:
#Fix theme generation for external sdkjs volume
if base.is_exist(git_dir + "/server/FileConverter/bin/DoctRenderer.config"):
base.print_info('replace DoctRenderer.config for external sdkjs volume')
base.generate_doctrenderer_config(git_dir + "/server/FileConverter/bin/DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
addons = {}
addons.update(base.get_sdkjs_addons())
addons.update(base.get_web_apps_addons())
staticContent = ""
for addon in addons:
if (addon):
staticContent += '"/' + addon + '": {"path": "/var/www/onlyoffice/documentserver/' + addon + '","options": {"maxAge": "7d"}},'
if staticContent:
base.print_info('replace production-linux.json for addons'+staticContent)
base.replaceInFileRE("/etc/onlyoffice/documentserver/production-linux.json", '"static_content": {.*', '"static_content": {' + staticContent)
base.print_info('replace supervisor cfg to run docservice and converter from pkg')
base.replaceInFileRE("/etc/supervisor/conf.d/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-docservice.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/DocService/docservice")
base.replaceInFileRE("/etc/supervisor/conf.d/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.replaceInFileRE("/app/ds/setup/config/supervisor/ds/ds-converter.conf", "command=node .*", "command=/var/www/onlyoffice/documentserver/server/FileConverter/converter")
base.print_info('run_server.run_docker_sdk_web_apps: ' + git_dir)
run_server.run_docker_sdk_web_apps(git_dir)
build_js.build_js_develop(sys.argv[1])

View File

@ -1,66 +0,0 @@
sudo apt-get install git curl wget p7zip-full
sudo apt-get install git-lfs
# for old system (ubuntu 16)
#curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash
#sudo apt-get install git-lfs
# save login
git config --global credential.helper store
# clone build_tools
git clone https://git.onlyoffice.com/ONLYOFFICE/build_tools.git
# deps =========================================
cd ./build_tools/tools/linux
# python 3.10
./python.sh
# qt
#./python3/bin/python3 ./qt_binary_fetch.py amd64
#./python3/bin/python3 ./qt_binary_fetch.py arm64
./python3/bin/python3 ./qt_binary_fetch.py all
# deps
./python3/bin/python3 ./deps.py
# cmake 3.30
sudo ./cmake.sh
cd ../../
# ==============================================
# sysroots (IF NEEDED) =========================
cd ./build_tools/tools/linux/sysroot
#./python3/bin/python3 ./fetch.py amd64
#./python3/bin/python3 ./fetch.py arm64
./../python3/bin/python3 ./fetch.py all
cd ../../../
# ==============================================
# configure ====================================
./tools/linux/python3/bin/python3 ./configure.py --clean "0" --update-light "1" --update "1" --branch "hotfix/v9.2.1" --module "desktop" --qt-dir "$(pwd)/tools/linux/qt_build/Qt-5.9.9"
# with sysroot: sysroot "1"
# ==============================================
# cross build linux_arm64
sudo apt install qemu-user qemu-user-static binfmt-support
sudo update-binfmts --enable qemu-aarch64
# 1) without sysroot
#sudo apt install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu
#sudo dpkg --add-architecture arm64
#sudo apt update
#... install all dev packages ...
# 2) official supported: with sysroot
./tools/linux/python3/bin/python3 ./configure.py sysroot "1" #...

51
make.py
View File

@ -1,34 +1,22 @@
#!/usr/bin/env python
import os
import sys
__dir__name__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__name__ + '/scripts')
sys.path.append(__dir__name__ + '/scripts/develop')
sys.path.append(__dir__name__ + '/scripts/develop/vendor')
sys.path.append(__dir__name__ + '/scripts/core_common')
sys.path.append(__dir__name__ + '/scripts/core_common/modules')
sys.path.append(__dir__name__ + '/scripts/core_common/modules/android')
sys.path.append('scripts')
sys.path.append('scripts/develop')
sys.path.append('scripts/develop/vendor')
sys.path.append('scripts/core_common')
sys.path.append('scripts/core_common/modules')
import config
import base
import build_sln
import build
import build_js
import build_server
import deploy
import make_common
import develop
import argparse
parser = argparse.ArgumentParser(description="options")
parser.add_argument("--build-only-branding", action="store_true")
args = parser.parse_args()
if (args.build_only_branding):
base.set_env("OO_BUILD_ONLY_BRANDING", "1")
# parse configuration
config.parse()
base.check_python()
base_dir = base.get_script_dir(__file__)
@ -45,7 +33,7 @@ if ("1" != base.get_env("OO_RUNNING_BRANDING")) and ("" != config.option("brandi
base.cmd("git", ["clone", config.option("branding-url"), branding_dir])
base.cmd_in_dir(branding_dir, "git", ["fetch"], True)
if not is_exist or ("1" != config.option("update-light")):
base.cmd_in_dir(branding_dir, "git", ["checkout", "-f", config.option("branch")], True)
@ -71,29 +59,34 @@ if ("1" == config.option("update")):
base.configure_common_apps()
# developing...
develop.make()
develop.make();
# check only js builds
if ("1" == base.get_env("OO_ONLY_BUILD_JS")):
build_js.make()
exit(0)
#base.check_tools()
# core 3rdParty
make_common.make()
# build updmodule for desktop (only for windows version)
if config.check_option("module", "desktop"):
config.extend_option("qmake_addon", "URL_WEBAPPS_HELP=https://download.onlyoffice.com/install/desktop/editors/help/v" + base.get_env('PRODUCT_VERSION') + "/apps")
if ("windows" == base.host_platform()) and (config.check_option("module", "desktop")):
config.extend_option("config", "updmodule")
config.extend_option("qmake_addon", "LINK=https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.xml")
if "windows" == base.host_platform():
config.extend_option("config", "updmodule")
base.set_env("DESKTOP_URL_UPDATES_MAIN_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcast.json")
base.set_env("DESKTOP_URL_UPDATES_DEV_CHANNEL", "https://download.onlyoffice.com/install/desktop/editors/windows/onlyoffice/appcastdev.json")
if not base.is_file(base_dir + "/tools/WinSparkle-0.7.0.zip"):
base.cmd("curl.exe", ["https://d2ettrnqo7v976.cloudfront.net/winsparkle/WinSparkle-0.7.0.zip", "--output", base_dir + "/tools/WinSparkle-0.7.0.zip"])
if not base.is_dir(base_dir + "/tools/WinSparkle-0.7.0"):
base.cmd("7z.exe", ["x", base_dir + "/tools/WinSparkle-0.7.0.zip", "-otools"])
base.create_dir(base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle")
#base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/include", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/include")
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_32")
base.copy_dir(base_dir + "/tools/WinSparkle-0.7.0/x64/Release", base_dir + "/../desktop-apps/win-linux/3dparty/WinSparkle/win_64")
# build
build_sln.make()
build.make()
# js
build_js.make()

View File

@ -2,108 +2,17 @@
# -*- coding: utf-8 -*-
import sys
sys.path.append("scripts")
import argparse
import package_common as common
sys.path.append('scripts')
import package_utils as utils
# parse
parser = argparse.ArgumentParser(description="Build packages.")
parser.add_argument("-P", "--platform", dest="platform", type=str,
action="store", help="Defines platform", required=True)
parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+",
action="store", help="Defines targets", required=True)
parser.add_argument("-V", "--version", dest="version", type=str,
action="store", help="Defines version")
parser.add_argument("-B", "--build", dest="build", type=str,
action="store", help="Defines build")
parser.add_argument("-H", "--branch", dest="branch", type=str,
action="store", help="Defines branch")
parser.add_argument("-R", "--branding", dest="branding", type=str,
action="store", help="Provides branding path")
args = parser.parse_args()
# vars
common.os_family = utils.host_platform()
common.platform = args.platform
common.prefix = common.platformPrefixes[common.platform] if common.platform in common.platformPrefixes else ""
common.targets = args.targets
common.clean = "clean" in args.targets
common.sign = "sign" in args.targets
common.deploy = "deploy" in args.targets
if args.version: common.version = args.version
else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0")
utils.set_env("PRODUCT_VERSION", common.version)
utils.set_env("BUILD_VERSION", common.version)
if args.build: common.build = args.build
else: common.build = utils.get_env("BUILD_NUMBER", "0")
utils.set_env("BUILD_NUMBER", common.build)
if args.branch: common.branch = args.branch
else: common.branch = utils.get_env("BRANCH_NAME", "null")
utils.set_env("BRANCH_NAME", common.branch)
common.branding = args.branding
common.timestamp = utils.get_timestamp()
common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..")
common.branding_dir = utils.get_abspath(common.workspace_dir + "/" + args.branding) if args.branding else common.workspace_dir
common.summary = []
utils.log("os_family: " + common.os_family)
utils.log("platform: " + str(common.platform))
utils.log("prefix: " + str(common.prefix))
utils.log("targets: " + str(common.targets))
utils.log("clean: " + str(common.clean))
utils.log("sign: " + str(common.sign))
utils.log("deploy: " + str(common.deploy))
utils.log("version: " + common.version)
utils.log("build: " + common.build)
utils.log("branding: " + str(common.branding))
utils.log("timestamp: " + common.timestamp)
utils.log("workspace_dir: " + common.workspace_dir)
utils.log("branding_dir: " + common.branding_dir)
# config
utils.parse()
# branding
if common.branding is not None:
sys.path.insert(-1, \
utils.get_path("../" + common.branding + "/build_tools/scripts"))
import package_core
import package_desktop
import package_server
import package_builder
import package_mobile
if utils.branding is not None:
branding_path = utils.get_path('..', utils.branding)
sys.path.insert(-1, utils.get_path(branding_path, 'build_tools/scripts'))
# build
utils.set_cwd(common.workspace_dir, verbose=True)
if "core" in common.targets:
package_core.make()
if "closuremaps_sdkjs_opensource" in common.targets:
package_core.deploy_closuremaps_sdkjs("opensource")
if "closuremaps_sdkjs_commercial" in common.targets:
package_core.deploy_closuremaps_sdkjs("commercial")
if "closuremaps_webapps" in common.targets:
package_core.deploy_closuremaps_webapps("opensource")
if "desktop" in common.targets:
package_desktop.make()
if "builder" in common.targets:
package_builder.make()
if "server_community" in common.targets:
package_server.make("community")
if "server_enterprise" in common.targets:
package_server.make("enterprise")
if "server_developer" in common.targets:
package_server.make("developer")
if "server_prerequisites" in common.targets:
package_server.make("prerequisites")
if "mobile" in common.targets:
package_mobile.make()
# summary
utils.log_h1("Build summary")
exitcode = 0
for i in common.summary:
if list(i.values())[0]:
utils.log("[ OK ] " + list(i.keys())[0])
else:
utils.log("[FAILED] " + list(i.keys())[0])
exitcode = 1
exit(exitcode)
import package
package.make(utils.product)

File diff suppressed because it is too large Load Diff

121
scripts/build.py Normal file
View File

@ -0,0 +1,121 @@
#!/usr/bin/env python
import config
import base
import os
import multiprocessing
def make_pro_file(makefiles_dir, pro_file):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
old_env = dict(os.environ)
# if you need change output libraries path - set the env variable
# base.set_env("DESTDIR_BUILD_OVERRIDE", os.getcwd() + "/out/android/" + config.branding() + "/mobile")
isAndroid = False if (-1 == platform.find("android")) else True
if isAndroid:
toolchain_platform = "linux-x86_64"
if ("mac" == base.host_platform()):
toolchain_platform = "darwin-x86_64"
base.set_env("ANDROID_NDK_HOST", toolchain_platform)
old_path = base.get_env("PATH")
new_path = base.qt_setup(platform) + "/bin:"
new_path += (base.get_env("ANDROID_NDK_ROOT") + "/toolchains/llvm/prebuilt/" + toolchain_platform + "/bin:")
new_path += old_path
base.set_env("PATH", new_path)
base.set_env("ANDROID_NDK_PLATFORM", "android-21")
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
# makefile suffix
file_suff = platform
if (config.check_option("config", "debug")):
file_suff += "_debug_"
file_suff += config.option("branding")
# setup qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# qmake CONFIG+=...
config_param = base.qt_config(platform)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon.append(config.option("qmake_addon"))
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe"):
print("THIS PLATFORM IS NOT SUPPORTED")
continue
# non windows platform
if not base.is_windows():
if base.is_file(makefiles_dir + "/build.makefile_" + file_suff):
base.delete_file(makefiles_dir + "/build.makefile_" + file_suff)
print("make file: " + makefiles_dir + "/build.makefile_" + file_suff)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd(base.app_make(), ["clean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd_and_return_cwd(base.app_make(), ["distclean", "-f", makefiles_dir + "/build.makefile_" + file_suff], True)
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if not base.is_file(pro_file):
base.cmd(qt_dir + "/bin/qmake", ["-nocache", pro_file, "CONFIG+=" + config_param] + qmake_addon)
if ("0" != config.option("multiprocess")):
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff, "-j" + str(multiprocessing.cpu_count())])
else:
base.cmd_and_return_cwd(base.app_make(), ["-f", makefiles_dir + "/build.makefile_" + file_suff])
else:
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
qmake_bat.append("if exist ./" + makefiles_dir + "/build.makefile_" + file_suff + " del /F ./" + makefiles_dir + "/build.makefile_" + file_suff)
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " \"" + config.option("qmake_addon") + "\""
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake clean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call nmake distclean -f " + makefiles_dir + "/build.makefile_" + file_suff)
qmake_bat.append("call \"" + qt_dir + "/bin/qmake\" -nocache " + pro_file + " \"CONFIG+=" + config_param + "\"" + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefiles_dir + "/build.makefile_" + file_suff)
base.run_as_bat(qmake_bat)
os.environ.clear()
os.environ.update(old_env)
base.delete_file(".qmake.stash")
# make build.pro
def make():
is_no_brandind_build = base.is_file("config")
make_pro_file("makefiles", "build.pro")
if config.check_option("module", "builder") and base.is_windows() and is_no_brandind_build:
# check replace
replace_path_lib = ""
replace_path_lib_file = os.getcwd() + "/../core/DesktopEditor/doctrenderer/docbuilder.com/docbuilder.h"
option_branding = config.option("branding")
if (option_branding != ""):
replace_path_lib = "../../../build/" + option_branding + "/lib/"
# replace
if (replace_path_lib != ""):
base.replaceInFile(replace_path_lib_file, "../../../build/lib/", replace_path_lib)
if ("2019" == config.option("vs-version")):
base.make_sln("../core/DesktopEditor/doctrenderer/docbuilder.com", ["docbuilder.com_2019.sln", "/Rebuild", "\"Release|x64\""], True)
base.make_sln("../core/DesktopEditor/doctrenderer/docbuilder.com", ["docbuilder.com_2019.sln", "/Rebuild", "\"Release|Win32\""], True)
else:
base.make_sln("../core/DesktopEditor/doctrenderer/docbuilder.com", ["docbuilder.com.sln", "/Rebuild", "\"Release|x64\""], True)
base.make_sln("../core/DesktopEditor/doctrenderer/docbuilder.com", ["docbuilder.com.sln", "/Rebuild", "\"Release|Win32\""], True)
# restore
if (replace_path_lib != ""):
base.replaceInFile(replace_path_lib_file, replace_path_lib, "../../../build/lib/")
return

View File

@ -4,114 +4,94 @@ import config
import base
import os
def correct_sdkjs_licence(directory):
branding = config.option("branding")
if "" == branding or "onlyoffice" == branding:
return
license = base.readFileLicence(directory + "/word/sdk-all-min.js")
base.replaceFileLicence(directory + "/common/Charts/ChartStyles.js", license)
base.replaceFileLicence(directory + "/common/hash/hash/engine.js", license)
base.replaceFileLicence(directory + "/common/hash/hash/engine_ie.js", license)
base.replaceFileLicence(directory + "/common/Native/native.js", license)
base.replaceFileLicence(directory + "/common/Native/native_graphics.js", license)
base.replaceFileLicence(directory + "/common/spell/spell/spell.js", license)
base.replaceFileLicence(directory + "/common/spell/spell/spell_ie.js", license)
base.replaceFileLicence(directory + "/pdf/src/engine/drawingfile.js", license)
base.replaceFileLicence(directory + "/pdf/src/engine/drawingfile_ie.js", license)
base.replaceInFile(directory + "/word/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
base.replaceInFile(directory + "/slide/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
base.replaceInFile(directory + "/cell/sdk-all-min.js", "onlyoffice-spellchecker", "r7-spellchecker")
return
# make build.pro
def make():
if ("1" == base.get_env("OO_NO_BUILD_JS")):
return
if not base.is_need_build_js():
return
base.set_env('NODE_ENV', 'production')
base_dir = base.get_script_dir() + "/.."
out_dir = base_dir + "/out/js/"
out_dir = base_dir + "/out/js/";
branding = config.option("branding-name")
if ("" == branding):
branding = "onlyoffice"
out_dir += branding
base.create_dir(out_dir)
isOnlyMobile = False
if (config.option("module") == "mobile"):
isOnlyMobile = True
# builder
if not isOnlyMobile:
base.cmd_in_dir(base_dir + "/../web-apps/translation", "python", ["merge_and_check.py"])
build_interface(base_dir + "/../web-apps/build")
build_sdk_builder(base_dir + "/../sdkjs/build")
base.create_dir(out_dir + "/builder")
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/builder/web-apps")
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/builder/sdkjs")
correct_sdkjs_licence(out_dir + "/builder/sdkjs")
build_interface(base_dir + "/../web-apps/build")
build_sdk_builder(base_dir + "/../sdkjs/build")
base.create_dir(out_dir + "/builder")
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/builder/web-apps")
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/builder/sdkjs")
# desktop
if config.check_option("module", "desktop") and not isOnlyMobile:
if config.check_option("module", "desktop"):
build_sdk_desktop(base_dir + "/../sdkjs/build")
base.create_dir(out_dir + "/desktop")
base.copy_dir(base_dir + "/../sdkjs/deploy/sdkjs", out_dir + "/desktop/sdkjs")
correct_sdkjs_licence(out_dir + "/desktop/sdkjs")
base.copy_dir(base_dir + "/../web-apps/deploy/web-apps", out_dir + "/desktop/web-apps")
deldirs = ['ie', 'mobile', 'embed']
[base.delete_dir(root + "/" + d) for root, dirs, f in os.walk(out_dir + "/desktop/web-apps/apps") for d in dirs if d in deldirs]
if not base.is_file(out_dir + "/desktop/sdkjs/common/AllFonts.js"):
base.copy_file(base_dir + "/../sdkjs/common/HtmlFileInternal/AllFonts.js", out_dir + "/desktop/sdkjs/common/AllFonts.js")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/documenteditor/mobile")
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/presentationeditor/mobile")
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/embed")
base.delete_dir(out_dir + "/desktop/web-apps/apps/spreadsheeteditor/mobile")
base.copy_file(base_dir + "/../web-apps/apps/api/documents/index.html.desktop", out_dir + "/desktop/web-apps/apps/api/documents/index.html")
build_interface(base_dir + "/../desktop-apps/common/loginpage/build")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/index.html", out_dir + "/desktop/index.html")
base.copy_file(base_dir + "/../desktop-apps/common/loginpage/deploy/noconnect.html", out_dir + "/desktop/noconnect.html")
# mobile
if config.check_option("module", "mobile"):
build_sdk_native(base_dir + "/../sdkjs/build")
build_sdk_native(base_dir + "/../sdkjs/build", False)
base.create_dir(out_dir + "/mobile")
base.create_dir(out_dir + "/mobile/sdkjs")
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_word.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_cell.js")
base.join_scripts(prefix_js, out_dir + "/mobile/sdkjs/banners.js")
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/mobile/sdkjs/banners_slide.js")
base.create_dir(out_dir + "/mobile/sdkjs/word")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/word/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/mobile/sdkjs/word/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/cell")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/cell/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/mobile/sdkjs/cell/script.bin")
base.create_dir(out_dir + "/mobile/sdkjs/slide")
base.join_scripts([out_dir + "/mobile/sdkjs/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/mobile/sdkjs/slide/script.bin")
base.join_scripts([out_dir + "/mobile/sdkjs/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/mobile/sdkjs/slide/script.bin")
base.delete_file(out_dir + "/mobile/sdkjs/banners.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_word.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_cell.js")
base.delete_file(out_dir + "/mobile/sdkjs/banners_slide.js")
return
# JS build
def _run_npm(directory):
retValue = base.cmd_in_dir(directory, "npm", ["install"], True)
if (0 != retValue):
retValue = base.cmd_in_dir(directory, "npm", ["install", "--verbose"])
return retValue
def _run_npm_ci(directory):
return base.cmd_in_dir(directory, "npm", ["ci"])
return base.cmd_in_dir(directory, "npm", ["install"])
def _run_npm_cli(directory):
return base.cmd_in_dir(directory, "npm", ["install", "-g", "grunt-cli"])
@ -121,7 +101,7 @@ def _run_grunt(directory, params=[]):
def build_interface(directory):
_run_npm(directory)
_run_grunt(directory, ["--force", "--verbose"] + base.web_apps_addons_param())
_run_grunt(directory, ["--force"] + base.web_apps_addons_param())
return
def get_build_param(minimize=True):
@ -141,47 +121,33 @@ def build_sdk_desktop(directory):
def build_sdk_builder(directory):
#_run_npm_cli(directory)
_run_npm(directory)
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param() + ["--map"])
_run_grunt(directory, get_build_param() + base.sdkjs_addons_param())
return
def build_sdk_native(directory, minimize=True):
#_run_npm_cli(directory)
_run_npm(directory)
addons = base.sdkjs_addons_param()
if not config.check_option("sdkjs-addons", "sdkjs-native"):
addons.append("--addon=sdkjs-native")
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons)
_run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param())
return
def build_sdkjs_develop(root_dir):
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
_run_npm_ci(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
def build_js_develop(root_dir):
#_run_npm_cli(root_dir + "/sdkjs/build")
external_folder = config.option("--external-folder")
if (external_folder != ""):
external_folder = "/" + external_folder
build_sdkjs_develop(root_dir)
_run_npm(root_dir + external_folder + "/sdkjs/build")
_run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param())
_run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param())
_run_npm(root_dir + external_folder + "/web-apps/build")
_run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites")
_run_npm(root_dir + external_folder + "/web-apps/build/sprites")
_run_grunt(root_dir + external_folder + "/web-apps/build/sprites", [])
base.cmd_in_dir(root_dir + external_folder + "/web-apps/translation", "python", ["merge_and_check.py"])
old_cur = os.getcwd()
old_product_version = base.get_env("PRODUCT_VERSION")
base.set_env("PRODUCT_VERSION", old_product_version + "d")
os.chdir(root_dir + external_folder + "/web-apps/vendor/framework7-react")
base.cmd("npm", ["ci"])
base.cmd("npm", ["install"])
base.cmd("npm", ["run", "deploy-word"])
base.cmd("npm", ["run", "deploy-cell"])
base.cmd("npm", ["run", "deploy-slide"])

View File

@ -1,45 +1,13 @@
#!/usr/bin/env python
import base
import build_js
import config
import optparse
import sys
arguments = sys.argv[1:]
parser = optparse.OptionParser()
parser.add_option("--output",
action="store", type="string", dest="output",
help="Directory for output the build result")
parser.add_option("--write-version",
action="store_true", dest="write_version", default=False,
help="Create version file of build")
parser.add_option("--minimize",
action="store", type="string", dest="minimize", default="0",
help="Is minimized version")
(options, args) = parser.parse_args(arguments)
def write_version_files(output_dir):
if (base.is_dir(output_dir)):
last_version_tag = base.run_command('git describe --abbrev=0 --tags')['stdout']
version_numbers=last_version_tag.replace('v', '').split('.')
major=(version_numbers[0:1] or ('0',))[0]
minor=(version_numbers[1:2] or ('0',))[0]
maintenance=(version_numbers[2:3] or ('0',))[0]
build=(version_numbers[3:4] or ('0',))[0]
full_version='%s.%s.%s.%s' % (major, minor, maintenance, build)
for name in ['word', 'cell', 'slide']:
base.writeFile(output_dir + '/%s/sdk.version' % name, full_version)
import base
import os
import build_js
# parse configuration
config.parse()
config.parse_defaults()
isMinimize = False
if ("1" == options.minimize or "true" == options.minimize):
isMinimize = True
config.set_option("jsminimize", "disable")
config.extend_option("jsminimize", "0")
branding = config.option("branding-name")
if ("" == branding):
@ -47,39 +15,44 @@ if ("" == branding):
base_dir = base.get_script_dir() + "/.."
out_dir = base_dir + "/../native-sdk/examples/win-linux-mac/build/sdkjs"
if (options.output):
out_dir = options.output
base.create_dir(out_dir)
build_js.build_sdk_native(base_dir + "/../sdkjs/build", isMinimize)
build_js.build_sdk_native(base_dir + "/../sdkjs/build")
vendor_dir_src = base_dir + "/../web-apps/vendor/"
sdk_dir_src = base_dir + "/../sdkjs/deploy/sdkjs/"
prefix_js = [
vendor_dir_src + "xregexp/xregexp-all-min.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs-native/common/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_word.js")
postfix_js = [
base_dir + "/../sdkjs/common/libfont/engine/fonts_native.js",
base_dir + "/../sdkjs/common/Charts/ChartStyles.js"
]
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/cell/native/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_cell.js")
base.join_scripts(prefix_js, out_dir + "/banners.js")
base.join_scripts([vendor_dir_src + "xregexp/xregexp-all-min.js",
vendor_dir_src + "underscore/underscore-min.js",
base_dir + "/../sdkjs/common/externs/jszip-utils.js",
base_dir + "/../sdkjs/common/Native/native.js",
base_dir + "/../sdkjs/common/Native/Wrappers/common.js",
base_dir + "/../sdkjs/common/Native/jquery_native.js"],
out_dir + "/banners_slide.js")
base.create_dir(out_dir + "/word")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"] + postfix_js, out_dir + "/word/script.bin")
base.join_scripts([out_dir + "/banners_word.js", sdk_dir_src + "word/sdk-all-min.js", sdk_dir_src + "word/sdk-all.js"], out_dir + "/word/script.bin")
base.create_dir(out_dir + "/cell")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"] + postfix_js, out_dir + "/cell/script.bin")
base.join_scripts([out_dir + "/banners_cell.js", sdk_dir_src + "cell/sdk-all-min.js", sdk_dir_src + "cell/sdk-all.js"], out_dir + "/cell/script.bin")
base.create_dir(out_dir + "/slide")
base.join_scripts([out_dir + "/banners.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"] + postfix_js, out_dir + "/slide/script.bin")
base.join_scripts([out_dir + "/banners_slide.js", sdk_dir_src + "slide/sdk-all-min.js", sdk_dir_src + "slide/sdk-all.js"], out_dir + "/slide/script.bin")
base.delete_file(out_dir + "/banners.js")
# Write sdk version mark file if needed
if (options.write_version):
write_version_files(out_dir)
base.delete_file(out_dir + "/banners_word.js")
base.delete_file(out_dir + "/banners_cell.js")
base.delete_file(out_dir + "/banners_slide.js")

View File

@ -16,7 +16,8 @@ def make():
if("" != config.option("branding")):
branding_dir = git_dir + '/' + config.option("branding") + '/server'
build_server_with_addons()
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "grunt", ["--no-color", "-v"] + base.server_addons_param())
#env variables
product_version = base.get_env('PRODUCT_VERSION')
@ -29,46 +30,35 @@ def make():
cur_date = datetime.date.today().strftime("%m/%d/%Y")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
server_build_dir = server_dir + "/build/server"
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildNumber = [0-9]*", "const buildNumber = " + build_number)
base.replaceInFileRE(server_build_dir + "/Common/sources/license.js", "const buildDate = '[0-9-/]*'", "const buildDate = '" + cur_date + "'")
base.replaceInFileRE(server_build_dir + "/Common/sources/commondefines.js", "const buildVersion = '[0-9.]*'", "const buildVersion = '" + product_version + "'")
custom_public_key = branding_dir + '/debug.js'
if(base.is_exist(custom_public_key)):
base.copy_file(custom_public_key, server_dir + '/Common/sources')
base.copy_file(custom_public_key, server_build_dir + '/Common/sources')
#node22 packaging has issue https://github.com/yao-pkg/pkg/issues/87
pkg_target = "node20"
pkg_target = "node14"
if ("linux" == base.host_platform()):
pkg_target += "-linux"
if (-1 != config.option("platform").find("linux_arm64")):
pkg_target += "-arm64"
if ("windows" == base.host_platform()):
pkg_target += "-win"
base.cmd_in_dir(server_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
base.cmd_in_dir(server_dir + "/AdminPanel/server", "pkg", [".", "-t", pkg_target, "-o", "adminpanel"])
base.cmd_in_dir(server_build_dir + "/DocService", "pkg", [".", "-t", pkg_target, "--options", "max_old_space_size=4096", "-o", "docservice"])
base.cmd_in_dir(server_build_dir + "/FileConverter", "pkg", [".", "-t", pkg_target, "-o", "converter"])
base.cmd_in_dir(server_build_dir + "/Metrics", "pkg", [".", "-t", pkg_target, "-o", "metrics"])
example_dir = base.get_script_dir() + "/../../document-server-integration/web/documentserver-example/nodejs"
base.delete_dir(example_dir + "/node_modules")
base.cmd_in_dir(example_dir, "npm", ["ci"])
base.cmd_in_dir(example_dir, "npm", ["install"])
base.cmd_in_dir(example_dir, "pkg", [".", "-t", pkg_target, "-o", "example"])
def build_server_with_addons():
addons = {}
addons["server"] = [True, False]
addons.update(base.get_server_addons())
for addon in addons:
if (addon):
addon_dir = base.get_script_dir() + "/../../" + addon
if (base.is_exist(addon_dir)):
base.cmd_in_dir(addon_dir, "npm", ["ci"])
base.cmd_in_dir(addon_dir, "npm", ["run", "build"])
def build_server_develop():
build_server_with_addons()
server_dir = base.get_script_dir() + "/../../server"
base.cmd_in_dir(server_dir, "npm", ["install"])
base.cmd_in_dir(server_dir, "grunt", ["develop", "-v"] + base.server_addons_param())

View File

@ -1,75 +0,0 @@
#!/usr/bin/env python
import config
import base
import os
import sys
sys.path.append(os.path.dirname(__file__) + "/..")
import sln
import qmake
# make solution
def make(solution=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
if ("" == solution):
solution = "./sln.json"
projects = sln.get_projects(solution, platform)
for pro in projects:
qmake_main_addon = ""
if (0 == platform.find("android")) and (-1 != pro.find("X2tConverter.pro")):
if config.check_option("config", "debug") and not config.check_option("config", "disable_x2t_debug_strip"):
print("[WARNING:] temporary enable strip for x2t library in debug")
qmake_main_addon += "build_strip_debug"
qmake.make(platform, pro, qmake_main_addon)
if config.check_option("platform", "ios") and config.check_option("config", "bundle_xcframeworks"):
qmake.make(platform, pro, "xcframework_platform_ios_simulator")
if config.check_option("module", "builder") and base.is_windows() and "onlyoffice" == config.branding():
# check branding libs
if (config.option("branding-name") == "onlyoffice"):
for platform in platforms:
if not platform in config.platforms:
continue
core_lib_unbranding_dir = os.getcwd() + "/../core/build/lib/" + platform + base.qt_dst_postfix()
if not base.is_dir(core_lib_unbranding_dir):
base.create_dir(core_lib_unbranding_dir)
core_lib_branding_dir = os.getcwd() + "/../core/build/onlyoffice/lib/" + platform + base.qt_dst_postfix()
base.copy_file(core_lib_branding_dir + "/doctrenderer.dll", core_lib_unbranding_dir + "/doctrenderer.dll")
base.copy_file(core_lib_branding_dir + "/doctrenderer.lib", core_lib_unbranding_dir + "/doctrenderer.lib")
# check replace
directory_builder_branding = os.getcwd() + "/../core/DesktopEditor/doctrenderer"
if base.is_dir(directory_builder_branding):
new_replace_path = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.com/src/docbuilder.h")
if ("2019" == config.option("vs-version")):
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com_2019.sln")
if (True):
new_path_net = base.correctPathForBuilder(directory_builder_branding + "/docbuilder.net/src/docbuilder.net.cpp")
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.net/src", "docbuilder.net.sln")
base.restorePathForBuilder(new_path_net)
else:
base.make_sln_project("../core/DesktopEditor/doctrenderer/docbuilder.com/src", "docbuilder.com.sln")
base.restorePathForBuilder(new_replace_path)
# build Java docbuilder wrapper
if config.check_option("module", "builder") and "onlyoffice" == config.branding():
for platform in platforms:
if not platform in config.platforms:
continue
# build JNI library
qmake.make(platform, base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java/src/jni/docbuilder_jni.pro", "", True)
# build Java code to JAR
base.cmd_in_dir(base.get_script_dir() + "/../../core/DesktopEditor/doctrenderer/docbuilder.java", "python", ["make.py"])
return

View File

@ -24,7 +24,7 @@ def parse():
# all platforms
global platforms
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "win_arm64",
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp",
"linux_64", "linux_32", "linux_arm64",
"mac_64", "mac_arm64",
"ios",
@ -57,11 +57,11 @@ def parse():
if not check_option("platform", "mac_64"):
options["platform"] = "mac_64 " + options["platform"]
if (False):
# use qemu on deploy for emulation
if ("windows" == host_platform) and check_option("platform", "win_arm64") and not base.is_os_arm():
if not check_option("platform", "win_64"):
options["platform"] = "win_64 " + options["platform"]
if ("linux" == host_platform) and check_option("platform", "linux_arm64") and not base.is_os_arm():
if not check_option("platform", "linux_64"):
# linux_64 binaries need only for desktop
if check_option("module", "desktop"):
options["platform"] = "linux_64 " + options["platform"]
if check_option("platform", "xp") and ("windows" == host_platform):
options["platform"] += " win_64_xp win_32_xp"
@ -70,39 +70,8 @@ def parse():
options["platform"] += " android_arm64_v8a android_armv7 android_x86 android_x86_64"
# check vs-version
if ("windows" == host_platform) and ("" == option("vs-version")):
options["vs-version"] = "2019"
if check_option("platform", "win_64_xp") or check_option("platform", "win_32_xp"):
options["vs-version"] = "2015"
if ("windows" == host_platform) and ("2019" == option("vs-version")):
extend_option("config", "vs2019")
# sysroot setup
if "linux" != host_platform and "sysroot" in options:
options["sysroot"] = ""
if "linux" == host_platform and "sysroot" in options:
if options["sysroot"] == "0":
options["sysroot"] = ""
elif options["sysroot"] == "1":
dst_dir = os.path.abspath(base.get_script_dir(__file__) + '/../tools/linux/sysroot')
dst_dir_amd64 = dst_dir + "/ubuntu16-amd64-sysroot"
dst_dir_arm64 = dst_dir + "/ubuntu16-arm64-sysroot"
if not base.is_dir(dst_dir_amd64) or not base.is_dir(dst_dir_arm64):
base.cmd_in_dir(dst_dir, "python3", ["./fetch.py", "all"])
options["sysroot_linux_64"] = dst_dir_amd64
options["sysroot_linux_arm64"] = dst_dir_arm64
else:
# specific sysroot => one platform for build!
options["sysroot"] = "1"
options["sysroot_linux_64"] = options["sysroot"]
options["sysroot_linux_arm64"] = options["sysroot"]
if is_cef_107():
extend_option("config", "cef_version_107")
if is_v8_60():
extend_option("config", "v8_version_60")
if ("" == option("vs-version")):
options["vs-version"] = "2015"
# check vs-path
if ("windows" == host_platform) and ("" == option("vs-path")):
@ -125,26 +94,8 @@ def parse():
if not "sdkjs-plugin-server" in options:
options["sdkjs-plugin-server"] = "default"
if check_option("platform", "ios"):
if not check_option("config", "no_bundle_xcframeworks"):
if not check_option("config", "bundle_xcframeworks"):
extend_option("config", "bundle_xcframeworks")
if check_option("config", "bundle_xcframeworks"):
if not check_option("config", "bundle_dylibs"):
extend_option("config", "bundle_dylibs")
if ("mac" == host_platform) and check_option("module", "desktop"):
if not check_option("config", "bundle_dylibs"):
extend_option("config", "bundle_dylibs")
if check_option("use-system-qt", "1"):
base.cmd_in_dir(base.get_script_dir() + "/../tools/linux", "python", ["use_system_qt.py"])
options["qt-dir"] = base.get_script_dir() + "/../tools/linux/system_qt"
# disable all warnings (enable if needed with core_enable_all_warnings options)
if not check_option("config", "core_enable_all_warnings"):
extend_option("config", "core_disable_all_warnings")
if not "arm64-toolchain-bin" in options:
options["arm64-toolchain-bin"] = "/usr/bin"
return
@ -161,9 +112,6 @@ def check_compiler(platform):
if (0 == platform.find("win")):
compiler["compiler"] = "msvc" + options["vs-version"]
compiler["compiler_64"] = "msvc" + options["vs-version"] + "_64"
if (0 == platform.find("win_arm")):
compiler["compiler"] = "msvc" + options["vs-version"] + "_arm"
compiler["compiler_64"] = "msvc" + options["vs-version"] + "_arm64"
elif (0 == platform.find("linux")):
compiler["compiler"] = "gcc"
compiler["compiler_64"] = "gcc_64"
@ -207,9 +155,6 @@ def extend_option(name, value):
else:
options[name] = value
def set_option(name, value):
options[name] = value
def branding():
branding = option("branding-name")
if ("" == branding):
@ -224,26 +169,6 @@ def is_mobile_platform():
return True
return False
def get_custom_sysroot_bin(platform):
use_platform = platform
if "linux_arm64" == platform and not base.is_os_arm():
# use cross compiler
use_platform = "linux_64"
return option("sysroot_" + use_platform) + "/usr/bin"
def get_custom_sysroot_lib(platform, isNatural=False):
use_platform = platform
if "linux_arm64" == platform and not base.is_os_arm() and not isNatural:
# use cross compiler
use_platform = "linux_64"
if ("linux_64" == use_platform):
return option("sysroot_linux_64") + "/usr/lib/x86_64-linux-gnu"
if ("linux_arm64" == use_platform):
return option("sysroot_linux_arm64") + "/usr/lib/aarch64-linux-gnu"
return ""
def parse_defaults():
defaults_path = base.get_script_dir() + "/../defaults"
if ("" != option("branding")):
@ -267,28 +192,4 @@ def parse_defaults():
options[name] = options[name].replace("default", defaults_options[name])
else:
options[name] = defaults_options[name]
if ("config_addon" in defaults_options):
extend_option("config", defaults_options["config_addon"])
return
def is_cef_107():
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
return True
return False
def is_v8_60():
if check_option("platform", "linux_arm64"):
return False
if ("linux" == base.host_platform()) and (5004 > base.get_gcc_version()) and not check_option("platform", "android"):
return True
if ("windows" == base.host_platform()) and ("2015" == option("vs-version")):
return True
#if check_option("config", "use_v8"):
# return True
return False

View File

@ -13,19 +13,11 @@ import cef
import icu
import openssl
import curl
import websocket_all
import websocket
import v8
import html2
import iwork
import md
import hunspell
import glew
import harfbuzz
import oo_brotli
import hyphen
import googletest
import libvlc
import heif
def check_android_ndk_macos_arm(dir):
if base.is_dir(dir + "/darwin-x86_64") and not base.is_dir(dir + "/darwin-arm64"):
@ -46,21 +38,9 @@ def make():
openssl.make()
v8.make()
html2.make()
iwork.make(False)
md.make()
hunspell.make(False)
harfbuzz.make()
glew.make()
hyphen.make()
googletest.make()
oo_brotli.make()
heif.make()
if config.check_option("build-libvlc", "1"):
libvlc.make()
if config.check_option("module", "mobile"):
if (config.check_option("platform", "android")):
curl.make()
websocket_all.make()
curl.make()
websocket.make()
return

View File

@ -1,163 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import re
def get_android_ndk_version():
env_val = base.get_env("ANDROID_NDK_ROOT")
if (env_val == ""):
env_val = "21.1.6352462"
return env_val.strip("/").split("/")[-1]
def get_android_ndk_version_major():
val = get_android_ndk_version().split(".")[0]
val = re.sub("[^0-9]", "", val)
return int(val)
def get_sdk_api():
if (23 > get_android_ndk_version_major()):
return "21"
return "23"
global archs
archs = ["arm64", "arm", "x86_64", "x86"]
global platforms
platforms = {
"arm64" : {
"abi" : "arm64-v8a",
"target" : "aarch64-linux-android",
"dst" : "arm64_v8a",
"api" : get_sdk_api(),
"old" : "aarch64-linux-android"
},
"arm" : {
"abi" : "armeabi-v7a",
"target" : "armv7a-linux-androideabi",
"dst" : "armv7",
"api" : get_sdk_api(),
"old" : "arm-linux-android"
},
"x86_64" : {
"arch" : "x86_64",
"target" : "x86_64-linux-android",
"dst" : "x86_64",
"api" : get_sdk_api(),
"old" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86",
"target" : "i686-linux-android",
"dst" : "x86",
"api" : get_sdk_api(),
"old" : "i686-linux-android"
}
}
# todo: check arm host!
global host
if ("linux" == base.host_platform()):
host = {
"name" : "linux",
"arch" : "linux-x86_64"
}
else:
host = {
"name" : "darwin",
"arch" : "darwin-x86_64"
}
def get_options_dict_as_array(opts):
value = []
for key in opts:
value.append(key + "=" + opts[key])
return value
def get_options_array_as_string(opts):
return " ".join(opts)
def ndk_dir():
return base.get_env("ANDROID_NDK_ROOT")
def sdk_dir():
ndk_path = ndk_dir()
if (-1 != ndk_path.find("/ndk/")):
return ndk_path + "/../.."
return ndk_path + "/.."
def toolchain_dir():
return ndk_dir() + "/toolchains/llvm/prebuilt/" + host["arch"]
def prepare_platform(arch, cpp_standard=11):
target = platforms[arch]["target"]
api = platforms[arch]["api"]
ndk_directory = ndk_dir()
toolchain = toolchain_dir()
base.set_env("TARGET", target)
base.set_env("TOOLCHAIN", toolchain)
base.set_env("NDK_STANDARD_ROOT", toolchain)
base.set_env("ANDROIDVER", api)
base.set_env("ANDROID_API", api)
base.set_env("AR", toolchain + "/bin/llvm-ar")
base.set_env("AS", toolchain + "/bin/llvm-as")
base.set_env("LD", toolchain + "/bin/ld")
base.set_env("RANLIB", toolchain + "/bin/llvm-ranlib")
base.set_env("STRIP", toolchain + "/bin/llvm-strip")
base.set_env("CC", target + api + "-clang")
base.set_env("CXX", target + api + "-clang++")
ld_flags = "-Wl,--gc-sections,-rpath-link=" + toolchain + "/sysroot/usr/lib/"
if (23 > get_android_ndk_version_major()):
ld_flags += (" -L" + toolchain + "/" + platforms[arch]["old"] + "/lib")
ld_flags += (" -L" + toolchain + "/sysroot/usr/lib/" + platforms[arch]["old"] + "/" + api)
base.set_env("LDFLAGS", ld_flags)
base.set_env("PATH", toolchain + "/bin" + os.pathsep + base.get_env("PATH"))
cflags = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-Wno-unused-function",
"-fPIC",
"-I" + toolchain + "/sysroot/usr/include",
"-D__ANDROID_API__=" + api,
"-DANDROID"
]
cflags_string = " ".join(cflags)
cppflags_string = cflags_string
if (cpp_standard >= 11):
cppflags_string += " -std=c++11"
base.set_env("CFLAGS", cflags_string)
base.set_env("CXXFLAGS", cppflags_string)
base.set_env("CPPPLAGS", cflags_string)
return
def extend_cflags(params):
base.set_env("CFLAGS", base.get_env("CFLAGS") + " " + params)
base.set_env("CPPFLAGS", base.get_env("CFLAGS"))
return
def extend_cxxflags(params):
base.set_env("CXXFLAGS", base.get_env("CXXFLAGS") + " " + params)
return
def extend_ldflags(params):
base.set_env("LDFLAGS", base.get_env("LDFLAGS") + " " + params)
return

View File

@ -1,94 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/curl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_version = "curl-7_68_0"
lib_name = "curl-7.68.0"
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://github.com/curl/curl/releases/download/" + lib_version + "/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
params = []
if ("arm64" == arch):
params.append("--host=aarch64-linux-android")
elif ("arm" == arch):
params.append("--host=arm-linux-androideabi")
elif ("x86_64" == arch):
params.append("--host=x86_64-linux-android")
elif ("x86" == arch):
params.append("--host=i686-linux-android")
openssl_dir = os.path.abspath(current_dir + "../openssl/build/android/" + android_ndk.platforms[arch]["dst"])
params.append("--enable-ipv6")
params.append("--enable-static")
params.append("--disable-shared")
params.append("--prefix=" + arch_build_dir)
params.append("--with-ssl=" + openssl_dir)
base.cmd("./configure", params)
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libcurl.a", dst_dir)
base.copy_dir(arch_build_dir + "/include", current_dir + "build/android/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -1,142 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
icu_major = "74"
icu_minor = "2"
options = {
"--enable-strict" : "no",
"--enable-extras" : "no",
"--enable-draft" : "yes",
"--enable-samples" : "no",
"--enable-tests" : "no",
"--enable-renaming" : "yes",
"--enable-icuio" : "no",
"--enable-layoutex" : "no",
"--with-library-bits" : "nochange",
"--with-library-suffix" : "",
"--enable-static" : "yes",
"--enable-shared" : "no",
"--with-data-packaging" : "archive"
}
cpp_flags_base = [
"-Os",
"-ffunction-sections",
"-fdata-sections",
"-fvisibility=hidden",
"-fPIC"
]
cpp_flags = [
"-fno-short-wchar",
"-fno-short-enums",
"-DU_USING_ICU_NAMESPACE=0",
"-DU_HAVE_NL_LANGINFO_CODESET=0",
"-DU_TIMEZONE=0",
"-DU_DISABLE_RENAMING=0",
"-DUCONFIG_NO_COLLATION=0",
"-DUCONFIG_NO_FORMATTING=0",
"-DUCONFIG_NO_REGULAR_EXPRESSIONS=0",
"-DUCONFIG_NO_TRANSLITERATION=0",
"-DU_STATIC_IMPLEMENTATION"
]
def fetch_icu(major, minor):
if not base.is_dir(current_dir + "icu"):
base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", current_dir + "icu2"])
base.copy_dir(current_dir + "icu2/icu4c", current_dir + "icu")
base.delete_dir_with_access_error(current_dir + "icu2")
return
def build_host():
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
if not base.is_dir(cross_build_dir):
base.create_dir(cross_build_dir)
os.chdir(cross_build_dir)
ld_flags = "-pthread"
if ("linux" == base.host_platform()):
ld_flags += " -Wl,--gc-sections"
else:
# gcc on OSX does not support --gc-sections
ld_flags += " -Wl,-dead_strip"
base.set_env("LDFLAGS", ld_flags)
base.set_env("CPPFLAGS", android_ndk.get_options_array_as_string(cpp_flags_base + cpp_flags))
host_type = "Linux"
if ("mac" == base.host_platform()):
host_type = "MacOSX/GCC"
base.cmd("../source/runConfigureICU", [host_type, "--prefix=" + cross_build_dir] + android_ndk.get_options_dict_as_array(options))
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(current_dir + "build")
base.copy_dir(cross_build_dir + "/include", current_dir + "build/include")
os.chdir(current_dir)
return
def build_arch(arch):
dst_dir = current_dir + "build/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
android_ndk.extend_cflags(" ".join(cpp_flags))
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
cross_build_dir = os.path.abspath(current_dir + "icu/cross_build")
arch_build_dir = os.path.abspath(current_dir + "build/tmp")
base.create_dir(arch_build_dir)
os.chdir(arch_build_dir)
base.cmd("./../../icu/source/configure", ["--with-cross-build=" + cross_build_dir] +
android_ndk.get_options_dict_as_array(options) + ["--host=" + android_ndk.platforms[arch]["target"], "--prefix=" + arch_build_dir])
base.cmd("make", ["-j4"])
os.chdir(current_dir)
base.create_dir(dst_dir)
base.copy_file(arch_build_dir + "/lib/libicuuc.a", dst_dir)
base.copy_file(arch_build_dir + "/stubdata/libicudata.a", dst_dir)
base.copy_file(arch_build_dir + "/data/out/icudt" + icu_major + "l.dat", dst_dir)
base.delete_dir(arch_build_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
old_env = dict(os.environ)
fetch_icu(icu_major, icu_minor)
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -1,94 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../../../scripts')
import base
import os
import android_ndk
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
current_dir = os.path.abspath(current_dir)
if not current_dir.endswith("/"):
current_dir += "/"
lib_name="openssl-1.1.1t"
options = [
"no-shared",
"no-tests",
"enable-ssl3",
"enable-ssl3-method",
"enable-md2",
"no-asm"
]
def fetch():
if not base.is_dir(current_dir + lib_name):
base.cmd("curl", ["-L", "-s", "-o", current_dir + lib_name + ".tar.gz",
"https://www.openssl.org/source/" + lib_name + ".tar.gz"])
base.cmd("tar", ["xfz", current_dir + lib_name + ".tar.gz", "-C", current_dir])
return
def build_host():
# not needed, just create directories
if not base.is_dir(current_dir + "/build"):
base.create_dir(current_dir + "/build")
if not base.is_dir(current_dir + "/build/android"):
base.create_dir(current_dir + "/build/android")
return
def build_arch(arch):
dst_dir = current_dir + "build/android/" + android_ndk.platforms[arch]["dst"]
if base.is_dir(dst_dir):
return
android_ndk.prepare_platform(arch)
ndk_dir = android_ndk.ndk_dir()
toolchain = android_ndk.toolchain_dir()
base.set_env("ANDROID_NDK_HOME", ndk_dir)
base.set_env("ANDROID_NDK", ndk_dir)
arch_build_dir = os.path.abspath(current_dir + "build/android/tmp")
base.create_dir(arch_build_dir)
old_cur = os.getcwd()
os.chdir(current_dir + lib_name)
base.cmd("./Configure", ["android-" + arch, "--prefix=" + arch_build_dir, "-D__ANDROID_API__=" + android_ndk.platforms[arch]["api"]] + options)
base.replaceInFile("./Makefile", "LIB_CFLAGS=", "LIB_CFLAGS=-fvisibility=hidden ")
base.replaceInFile("./Makefile", "LIB_CXXFLAGS=", "LIB_CXXFLAGS=-fvisibility=hidden ")
base.cmd("make", ["clean"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
os.chdir(old_cur)
base.create_dir(dst_dir)
base.create_dir(dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libcrypto.a", dst_dir + "/lib")
base.copy_file(arch_build_dir + "/lib/libssl.a", dst_dir + "/lib")
base.copy_dir(arch_build_dir + "/include", dst_dir + "/include")
base.delete_dir(arch_build_dir)
return
def make():
old_env = dict(os.environ)
fetch()
build_host()
for arch in android_ndk.archs:
build_arch(arch)
os.environ.clear()
os.environ.update(old_env)
return
if __name__ == "__main__":
make()

View File

@ -22,10 +22,10 @@ def move_debug_libs_windows(dir):
def clean():
if base.is_dir("boost_1_58_0"):
base.delete_dir_with_access_error("boost_1_58_0")
base.delete_dir_with_access_error("boost_1_58_0");
base.delete_dir("boost_1_58_0")
if base.is_dir("boost_1_72_0"):
base.delete_dir_with_access_error("boost_1_72_0")
base.delete_dir_with_access_error("boost_1_72_0");
base.delete_dir("boost_1_72_0")
if base.is_dir("build"):
base.delete_dir("build")
@ -68,53 +68,27 @@ def make():
if ("windows" == base.host_platform()):
win_toolset = "msvc-14.0"
win_boot_arg = "vc14"
win_vs_version = "vc140"
if (config.option("vs-version") == "2019"):
win_toolset = "msvc-14.2"
win_boot_arg = "vc142"
win_vs_version = "vc142"
# add "define=_ITERATOR_DEBUG_LEVEL=0" to b2 args before install for disable _ITERATOR_DEBUG_LEVEL
if (-1 != config.option("platform").find("win_64")) and not base.is_file("../build/win_64/lib/libboost_system-" + win_vs_version + "-mt-x64-1_72.lib"):
if (-1 != config.option("platform").find("win_64")) and not base.is_dir("../build/win_64"):
base.cmd("bootstrap.bat", [win_boot_arg])
base.cmd("b2.exe", ["headers"])
base.cmd("b2.exe", ["--clean"])
base.cmd("b2.exe", ["--prefix=./../build/win_64", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=64", "install"])
if (-1 != config.option("platform").find("win_32")) and not base.is_file("../build/win_32/lib/libboost_system-" + win_vs_version + "-mt-x32-1_72.lib"):
if (-1 != config.option("platform").find("win_32")) and not base.is_dir("../build/win_32"):
base.cmd("bootstrap.bat", [win_boot_arg])
base.cmd("b2.exe", ["headers"])
base.cmd("b2.exe", ["--clean"])
base.cmd("b2.exe", ["--prefix=./../build/win_32", "link=static", "--with-filesystem", "--with-system", "--with-date_time", "--with-regex", "--toolset=" + win_toolset, "address-model=32", "install"])
if (-1 != config.option("platform").find("win_arm64") and not base.is_file("../build/win_arm64/lib/libboost_system-" + win_vs_version + "-mt-a64-1_72.lib")):
boost_bat = []
boost_bat.append("call bootstrap.bat " + win_boot_arg) # first build b2 for win64, so vcvarsall_call with arm64 later
vcvarsall_call = ("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + "x64_arm64")
boost_bat.append(vcvarsall_call)
boost_bat.append("call b2.exe headers")
boost_bat.append("call b2.exe --clean")
boost_bat.append("call b2.exe --prefix=./../build/win_arm64 architecture=arm link=static --with-filesystem --with-system --with-date_time --with-regex --toolset=" + win_toolset + " address-model=64 install")
base.run_as_bat(boost_bat)
correct_install_includes_win(base_dir, "win_64")
correct_install_includes_win(base_dir, "win_32")
correct_install_includes_win(base_dir, "win_arm64")
correct_install_includes_win(base_dir, "win_32")
if config.check_option("platform", "linux_64") and not base.is_dir("../build/linux_64"):
if config.option("sysroot") == "":
addon_config = []
addon_compile = []
if "1" == config.option("use-clang"):
addon_config = ["--with-toolset=clang"]
addon_compile = ["cxxflags=-stdlib=libc++", "linkflags=-stdlib=libc++", "define=_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION"]
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"] + addon_config)
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
base.cmd("./b2", ["--prefix=./../build/linux_64", "link=static", "cxxflags=-fPIC"] + addon_compile + ["install"])
else: # build via qmake when custom sysroot is needed
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "linux_64")
directory_build = base_dir + "/build/linux_64/lib"
base.delete_file(directory_build + "/libboost_system.a")
base.delete_file(directory_build + "/libboost_system.dylib")
base.copy_files(directory_build + "/linux_64/*.a", directory_build)
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex"])
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
base.cmd("./b2", ["--prefix=./../build/linux_64", "link=static", "cxxflags=-fPIC", "install"])
# TODO: support x86
if config.check_option("platform", "linux_arm64") and not base.is_dir("../build/linux_arm64"):
@ -125,27 +99,21 @@ def make():
base.copy_files(directory_build + "/linux_arm64/*.a", directory_build)
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios"):
old_cur2 = os.getcwd()
clang_correct()
os.chdir("../")
base.bash("./boost_ios")
os.chdir(old_cur2)
if (-1 != config.option("platform").find("ios")) and not base.is_dir("../build/ios_xcframework"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios_simulator", "xcframework_platform_ios_simulator")
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "ios_xcframework/ios")
if (-1 != config.option("platform").find("android")) and not base.is_dir("../build/android"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"])
if config.check_option("platform", "mac_64") and not base.is_dir("../build/mac_64"):
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_64")
directory_build = base_dir + "/build/mac_64/lib"
base.delete_file(directory_build + "/libboost_system.a")
base.delete_file(directory_build + "/libboost_system.dylib")
base.copy_files(directory_build + "/mac_64/*.a", directory_build)
if config.check_option("platform", "mac_arm64") and not base.is_dir("../build/mac_arm64"):
if (-1 != config.option("platform").find("mac_arm64")) and not base.is_dir("../build/mac_arm64"):
boost_qt.make(os.getcwd(), ["filesystem", "system", "date_time", "regex"], "mac_arm64")
directory_build = base_dir + "/build/mac_arm64/lib"
base.delete_file(directory_build + "/libboost_system.a")

View File

@ -0,0 +1,108 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
platforms = {
"arm64_v8a" : {
"name" : "arm64-v8a",
"toolset" : "arm64v8a",
"clang_triple" : "aarch64-linux-android21",
"tool_triple" : "aarch64-linux-android",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
},
"armv7" : {
"name" : "armeabi-v7a",
"toolset" : "armeabiv7a",
"clang_triple" : "armv7a-linux-androideabi16",
"tool_triple" : "arm-linux-androideabi",
"abi" : "aapcs",
"arch" : "arm",
"address_model" : "32",
"compiler_flags" : "-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp",
"linker_flags" : "-Wl,--fix-cortex-a8"
},
"x86" : {
"name" : "x86",
"toolset" : "x86",
"clang_triple" : "i686-linux-android16",
"tool_triple" : "i686-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "32",
"compiler_flags" : "",
"linker_flags" : ""
},
"x86_64" : {
"name" : "x86_64",
"toolset" : "x8664",
"clang_triple" : "x86_64-linux-android21",
"tool_triple" : "x86_64-linux-android",
"abi" : "sysv",
"arch" : "x86",
"address_model" : "64",
"compiler_flags" : "",
"linker_flags" : ""
}
}
base_dir = base.get_script_dir()
def make(platform):
tmp_build_dir = base_dir + "/core_common/modules/boost"
if (base.is_dir(tmp_build_dir)):
base.delete_dir(tmp_build_dir)
base.copy_dir(base_dir + "/../tools/android/boost", tmp_build_dir)
current_platform = platforms[platform]
if (base.host_platform() == "mac"):
source = "prebuilt/linux-x86_64"
dest = "prebuilt/darwin-x86_64"
base.replaceInFile(tmp_build_dir + "/user-config.jam", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/as", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/hide/strip", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ar", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/clang++", source, dest)
base.replaceInFile(tmp_build_dir + "/bin/ranlib", source, dest)
build_dir_tmp = tmp_build_dir + "/tmp"
base.cmd("./bootstrap.sh", ["--with-libraries=filesystem,system,date_time,regex", "--prefix=../build/android_" + platform])
base.cmd("./b2", ["headers"])
base.cmd("./b2", ["--clean"])
old_path = base.get_env("PATH")
base.set_env("PATH", tmp_build_dir + "/bin:" + old_path)
base.set_env("NDK_DIR", base.get_env("ANDROID_NDK_ROOT"))
base.set_env("BFA_CLANG_TRIPLE_FOR_ABI", current_platform["clang_triple"])
base.set_env("BFA_TOOL_TRIPLE_FOR_ABI", current_platform["tool_triple"])
base.set_env("BFA_COMPILER_FLAGS_FOR_ABI", current_platform["compiler_flags"])
base.set_env("BFA_LINKER_FLAGS_FOR_ABI", current_platform["linker_flags"])
print(current_platform)
base.cmd("./b2", ["-q", "-j4",
"toolset=clang-" + current_platform["toolset"],
"binary-format=elf",
"address-model=" + current_platform["address_model"],
"architecture=" + current_platform["arch"],
"abi=" + current_platform["abi"],
"link=static",
"threading=multi",
"target-os=android",
"--user-config=" + tmp_build_dir + "/user-config.jam",
"--ignore-site-config",
"--layout=system",
"install"], True)
base.set_env("PATH", old_path)
base.delete_dir(tmp_build_dir)
return

View File

@ -5,18 +5,15 @@ sys.path.append('../..')
import config
import base
import os
import qmake
import build
def make(src_dir, modules, build_platform="android", qmake_addon=""):
def make(src_dir, modules, build_platform="android"):
old_cur = os.getcwd()
old_env = dict(os.environ)
b2_addon = ""
print("boost-headers...")
base.cmd("./bootstrap.sh", ["--with-libraries=system"])
base.cmd("./bootstrap.sh", ["--with-libraries=system"])
base.cmd("./b2", ["--prefix=./../build/" + build_platform, "headers", "install"])
for module in modules:
print("boost-module: " + module + " ...")
module_dir = src_dir + "/libs/" + module
@ -26,13 +23,17 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("TARGET = boost_" + module)
pro_file_content.append("TEMPLATE = lib")
pro_file_content.append("CONFIG += staticlib")
if (build_platform == "android"):
pro_file_content.append("DEFINES += \"_HAS_AUTO_PTR_ETC=0\"")
pro_file_content.append("")
pro_file_content.append("CORE_ROOT_DIR = $$PWD/../../../../../..")
pro_file_content.append("PWD_ROOT_DIR = $$PWD")
pro_file_content.append("include($$PWD/../../../../../base.pri)")
pro_file_content.append("")
pro_file_content.append("MAKEFILE=$$PWD/build.makefile_$$CORE_BUILDS_PLATFORM_PREFIX")
pro_file_content.append("core_debug:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_debug_\")")
pro_file_content.append("build_xp:MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"_xp\")")
pro_file_content.append("OO_BRANDING_SUFFIX = $$(OO_BRANDING)")
pro_file_content.append("!isEmpty(OO_BRANDING_SUFFIX):MAKEFILE=$$join(MAKEFILE, MAKEFILE, \"\", \"$$OO_BRANDING_SUFFIX\")")
pro_file_content.append("")
pro_file_content.append("BOOST_SOURCES=$$PWD/../..")
pro_file_content.append("INCLUDEPATH += $$BOOST_SOURCES")
pro_file_content.append("INCLUDEPATH += $$PWD/include")
@ -42,9 +43,7 @@ def make(src_dir, modules, build_platform="android", qmake_addon=""):
pro_file_content.append("DESTDIR = $$BOOST_SOURCES/../build/" + build_platform + "/lib/$$CORE_BUILDS_PLATFORM_PREFIX")
base.save_as_script(module_dir + "/" + module + ".pro", pro_file_content)
os.chdir(module_dir)
qmake.make_all_platforms(module_dir + "/" + module + ".pro", qmake_addon)
os.environ.clear()
os.environ.update(old_env)
build.make_pro_file("./", module + ".pro")
os.chdir(old_cur)
return

View File

@ -5,13 +5,6 @@ sys.path.append('../..')
import config
import base
import os
import glob
def clear_module():
for child in glob.glob("./*"):
if base.is_dir(child):
base.delete_dir(child)
return
def make():
print("[fetch & build]: cef")
@ -20,77 +13,56 @@ def make():
old_cur = os.getcwd()
os.chdir(base_dir)
base.check_module_version("2", clear_module)
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64", "win_arm64", "linux_arm64"]
platforms = ["win_64", "win_32", "win_64_xp", "win_32_xp", "linux_64", "linux_32", "mac_64", "mac_arm64"]
url = "http://d2ettrnqo7v976.cloudfront.net/cef/4280/"
for platform in platforms:
if not config.check_option("platform", platform):
continue
url = "https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/cef/"
archive_name = "./cef_binary.7z"
if (-1 != platform.find("_xp")):
url += "4280/"
archive_name = "./cef_binary_xp.7z"
elif (config.check_option("config", "cef_version_107")):
url += "5304/"
archive_name = "./cef_binary_107.7z"
elif ("mac_64" == platform) and (config.check_option("config", "use_v8")):
url += "5060/"
archive_name = "./cef_binary_103.7z"
else:
url += "5414/"
url_platform = (url + platform + "/cef_binary.7z")
archive_name_data = archive_name + ".data"
if not base.is_dir(platform):
base.create_dir(platform)
os.chdir(platform)
data_url = base.get_file_last_modified_url(url_platform)
old_data_url = base.readFile(archive_name_data)
build_dir_name = "build"
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
build_dir_name = "build_107"
if ("mac_64" == platform) and (config.check_option("config", "use_v8")):
build_dir_name = "build_103"
data_url = base.get_file_last_modified_url(url_platform)
old_data_url = base.readFile("./cef_binary.7z.data")
if (data_url != old_data_url):
if base.is_file(archive_name):
base.delete_file(archive_name)
if base.is_dir(build_dir_name):
base.delete_dir(build_dir_name)
if base.is_file("./cef_binary.7z"):
base.delete_file("./cef_binary.7z")
if base.is_dir("build"):
base.delete_dir("build")
if base.is_dir(build_dir_name):
if base.is_dir("build"):
os.chdir(base_dir)
continue
# download
if not base.is_file(archive_name):
base.download(url_platform, archive_name)
if not base.is_file("./cef_binary.7z"):
base.download(url_platform, "./cef_binary.7z")
# extract
base.extract(archive_name, "./")
base.extract("./cef_binary.7z", "./")
base.delete_file(archive_name_data)
base.writeFile(archive_name_data, data_url)
base.delete_file("./cef_binary.7z.data")
base.writeFile("./cef_binary.7z.data", data_url)
base.create_dir("./" + build_dir_name)
base.create_dir("./build")
# deploy
if (0 != platform.find("mac")):
base.copy_files("cef_binary/Release/*", "build/")
base.copy_files("cef_binary/Resources/*", "build/")
if (0 == platform.find("linux")):
base.cmd("chmod", ["a+xr", "build/locales"])
if (0 == platform.find("mac")):
base.cmd("mv", ["Chromium Embedded Framework.framework", build_dir_name + "/Chromium Embedded Framework.framework"])
base.delete_dir("./Chromium Embedded Framework.framework")
else:
base.copy_files("cef_binary/Release/*", build_dir_name + "/")
base.copy_files("cef_binary/Resources/*", build_dir_name + "/")
if (0 == platform.find("linux")):
base.cmd("chmod", ["a+xr", build_dir_name + "/locales"])
base.delete_dir("./cef_binary")
base.cmd("mv", ["Chromium Embedded Framework.framework", "build/Chromium Embedded Framework.framework"])
os.chdir(base_dir)

View File

@ -2,19 +2,21 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import subprocess
import os
import base
import curl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/curl"
old_cur = os.getcwd()
os.chdir(path)
if (-1 != config.option("platform").find("android")):
curl_android.make()
if base.is_dir(path + "/build/android"):
os.chdir(old_cur)
return
subprocess.call(["./build-android-curl.sh"])
elif (-1 != config.option("platform").find("ios")):
if base.is_dir(path + "/build/ios"):
os.chdir(old_cur)

View File

@ -8,7 +8,7 @@ import os
def clean():
if base.is_dir("glew-2.1.0"):
base.delete_dir("glew-2.1.0")
base.delete_dir("glew-2.1.0");
return
def make():
@ -16,7 +16,7 @@ def make():
return
if not config.check_option("module", "mobile"):
return
return;
print("[fetch & build]: glew")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/glew"
@ -26,7 +26,7 @@ def make():
base.common_check_version("glew", "1", clean)
if not base.is_dir("glew-2.1.0"):
base.download("https://github.com/ONLYOFFICE-data/build_tools_data/raw/refs/heads/master/glew/glew-2.1.0-win32.zip", "./archive.zip")
base.download("https://deac-ams.dl.sourceforge.net/project/glew/glew/2.1.0/glew-2.1.0-win32.zip", "./archive.zip")
base.extract("./archive.zip", "./")
base.delete_file("./archive.zip")

View File

@ -1,19 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
def make():
print("[fetch]: googletest")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/googletest"
old_cur = os.getcwd()
os.chdir(base_dir)
if not base.is_dir("googletest"):
base.cmd("git", ["clone", "https://github.com/google/googletest.git", "-b", "v1.13.0"])
os.chdir(old_cur)
return

View File

@ -1,15 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
def make():
print("[fetch & build]: harfbuzz")
base.cmd_in_dir(base.get_script_dir() + "/../../core/Common/3dParty/harfbuzz", "./make.py")
return
if __name__ == '__main__':
# manual compile
make()

View File

@ -1,455 +0,0 @@
import sys
sys.path.append('../..')
import base
import os
import config
# NOTE:
# - requires CMake >= 3.21, < 4.0.0
# libs versions
X265_VERSION = "4.1"
DE265_VERSION = "1.0.16"
AOM_VERSION = "3.12.0"
# 1.18.2 - the latest version of libheif supporting C++11 builds (as for now)
HEIF_VERSION = "1.18.2"
# ios cmake toolchain
IOS_CMAKE_VERSION = "4.5.0"
IOS_CMAKE_TOOLCHAIN_FILE = base.get_script_dir() + "/../../core/Common/3dParty/heif/ios-cmake/ios.toolchain.cmake"
# android cmake toolchain
ANDROID_CMAKE_TOOLCHAIN_FILE = base.get_env("ANDROID_NDK_ROOT") + "/build/cmake/android.toolchain.cmake"
# linux arm64 cmake toolchain
LINUX_SYSTEM_AARCH64_TOOLCHAIN_FILE = base.get_script_dir() + "/../tools/linux/sysroot/system-aarch64.toolchain.cmake"
LINUX_CUSTOM_SYSROOT_TOOLCHAIN_FILE = base.get_script_dir() + "/../tools/linux/sysroot/custom-sysroot.toolchain.cmake"
OLD_ENV = dict()
def get_vs_version():
vs_version = "14 2015"
if config.option("vs-version") == "2019":
vs_version = "16 2019"
return vs_version
def get_xcode_sdk(platform):
xcode_sdk = "iphoneos"
if "simulator" in platform:
xcode_sdk = "iphonesimulator"
return xcode_sdk
def fetch_repo(repo_url, branch_or_tag):
base.cmd("git", ["clone", "--depth", "1", "--branch", branch_or_tag, repo_url])
return
def get_build_dir(base_dir, repo_dir, platform, build_type):
return os.path.join(base_dir, repo_dir, "build", platform, build_type.lower())
# general build function that builds for ONE platform (supposing we are located in the build directory)
def build_with_cmake(platform, cmake_args, build_type):
# extend cmake arguments
cmake_args_ext = []
# WINDOWS
if "win" in platform:
cmake_args_ext = [
"-G", f"Visual Studio {get_vs_version()}"
]
if platform == "win_64" or platform == "win_64_xp":
cmake_args_ext += ["-A", "x64"]
elif platform == "win_32" or platform == "win_32_xp":
cmake_args_ext += ["-A", "Win32"]
elif platform == "win_arm64":
cmake_args_ext += ["-A", "ARM64"]
# LINUX, MAC
elif "linux" in platform or "mac" in platform:
cmake_args_ext = [
"-G", "Unix Makefiles",
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON" # on UNIX we need to compile with fPIC
]
if platform == "mac_64":
cmake_args_ext += ["-DCMAKE_OSX_DEPLOYMENT_TARGET=10.11", "-DCMAKE_OSX_ARCHITECTURES=x86_64"]
elif platform == "mac_arm64":
cmake_args_ext += ["-DCMAKE_OSX_DEPLOYMENT_TARGET=11.0", "-DCMAKE_OSX_ARCHITECTURES=arm64"]
elif config.option("sysroot") != "":
# force use custom CXXFLAGS with Release/Debug build
cmake_args += ["-DCMAKE_TOOLCHAIN_FILE=" + LINUX_CUSTOM_SYSROOT_TOOLCHAIN_FILE]
elif platform == "linux_arm64" and not base.is_os_arm():
cmake_args += ["-DCMAKE_TOOLCHAIN_FILE=" + LINUX_SYSTEM_AARCH64_TOOLCHAIN_FILE]
# IOS
elif "ios" in platform:
cmake_args_ext = [
"-G", "Xcode",
"-DCMAKE_TOOLCHAIN_FILE=" + IOS_CMAKE_TOOLCHAIN_FILE,
"-DDEPLOYMENT_TARGET=11.0"
]
if platform == "ios":
cmake_args_ext += ["-DPLATFORM=OS64"]
elif platform == "ios_simulator":
cmake_args_ext += ["-DPLATFORM=SIMULATOR64COMBINED"]
# ANDROID
elif "android" in platform:
cmake_args_ext = [
"-G", "Unix Makefiles",
"-DCMAKE_TOOLCHAIN_FILE=" + ANDROID_CMAKE_TOOLCHAIN_FILE,
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
]
def get_cmake_args_android(arch, api_level):
return [
"-DANDROID_ABI=" + arch,
"-DANDROID_NATIVE_API_LEVEL=" + api_level
]
if platform == "android_arm64_v8a":
cmake_args_ext += get_cmake_args_android("arm64-v8a", "21")
elif platform == "android_armv7":
cmake_args_ext += get_cmake_args_android("armeabi-v7a", "16")
elif platform == "android_x86":
cmake_args_ext += get_cmake_args_android("x86", "16")
elif platform == "android_x86_64":
cmake_args_ext += get_cmake_args_android("x86_64", "21")
# env setup for custom sysroot
if config.option("sysroot") != "":
base.set_sysroot_env("linux_arm64")
# run cmake
base.cmd("cmake", cmake_args + cmake_args_ext)
# build
if "Unix Makefiles" in cmake_args_ext:
base.cmd("make", ["-j4"])
else:
base.cmd("cmake", ["--build", ".", "--config", build_type])
if config.option("sysroot") != "":
base.restore_sysroot_env()
return
# general make function that calls `build_func` callback for configured platform(s) with specified cmake arguments
def make_common(build_func, cmake_args):
# WINDOWS
if "windows" == base.host_platform():
# win_64
if config.check_option("platform", "win_64") or config.check_option("platform", "win_64_xp"):
build_func("win_64", cmake_args)
# win_32
if config.check_option("platform", "win_32") or config.check_option("platform", "win_32_xp"):
build_func("win_32", cmake_args)
# win_arm64
if config.check_option("platform", "win_arm64"):
build_func("win_arm64", cmake_args)
# LINUX
elif "linux" == base.host_platform():
# linux_64
if config.check_option("platform", "linux_64"):
build_func("linux_64", cmake_args)
# linux_arm64
if config.check_option("platform", "linux_arm64"):
build_func("linux_arm64", cmake_args)
# MAC
elif "mac" == base.host_platform():
# mac_64
if config.check_option("platform", "mac_64"):
build_func("mac_64", cmake_args)
# mac_arm64
if config.check_option("platform", "mac_arm64"):
build_func("mac_arm64", cmake_args)
# IOS
if -1 != config.option("platform").find("ios"):
# ios (arm64)
build_func("ios", cmake_args)
# ios simulator (x86_64 and arm64 FAT lib)
build_func("ios_simulator", cmake_args)
# ANDROID
if -1 != config.option("platform").find("android"):
# android_arm64_v8a
if config.check_option("platform", "android_arm64_v8a"):
build_func("android_arm64_v8a", cmake_args)
# android_armv7
if config.check_option("platform", "android_armv7"):
build_func("android_armv7", cmake_args)
# android_x86
if config.check_option("platform", "android_x86"):
build_func("android_x86", cmake_args)
# android_x86_64
if config.check_option("platform", "android_x86_64"):
build_func("android_x86_64", cmake_args)
return
def make_x265(base_dir, build_type):
# fetch lib repo
if not base.is_dir("x265_git"):
fetch_repo("https://bitbucket.org/multicoreware/x265_git.git", f"Release_{X265_VERSION}")
# fix x265 version detection so it reads version from x265Version.txt instead of parsing it from .git
base.replaceInFile(
base_dir + "/x265_git/source/cmake/Version.cmake",
"elseif(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/../x265Version.txt)",
"endif()\n if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/../x265Version.txt)"
)
# prepare cmake args
cmake_dir = base_dir + "/x265_git/source"
cmake_args = [
cmake_dir,
"-DCMAKE_BUILD_TYPE=" + build_type,
"-DENABLE_CLI=OFF", # do not build standalone CLI app
"-DENABLE_SHARED=OFF", # do not build shared libs
"-DENABLE_ASSEMBLY=OFF", # disable assembly optimizations
"-DENABLE_LIBNUMA=OFF", # disable libnuma usage (affects Linux only)
]
# lib build function
def build_x265(platform, cmake_args):
# check if target lib has already been built
build_dir = get_build_dir(base_dir, "x265_git", platform, build_type)
if platform.find("win") != -1:
target_lib = os.path.join(build_dir, build_type, "x265-static.lib")
else:
target_lib = os.path.join(build_dir, "libx265.a")
if base.is_file(target_lib):
return
# go to the build directory
base.create_dir(build_dir)
os.chdir(build_dir)
# run build
build_with_cmake(platform, cmake_args, build_type)
# for iOS there is no target for building libx265.a, so we need to form it ourselves from libcommon.a and libencoder.a
if platform.find("ios") != -1:
xcode_sdk = get_xcode_sdk(platform)
base.cmd("libtool", [
"-static",
"-o", "libx265.a",
f"build/common.build/{build_type}-{xcode_sdk}/libcommon.a",
f"build/encoder.build/{build_type}-{xcode_sdk}/libencoder.a"
])
# copy header
base.copy_file(base_dir + "/x265_git/source/x265.h", build_dir)
# reset directory
os.chdir(base_dir)
return
make_common(build_x265, cmake_args)
return
def make_de265(base_dir, build_type):
# fetch lib repo
if not base.is_dir("libde265"):
fetch_repo("https://github.com/strukturag/libde265.git", f"v{DE265_VERSION}")
# prepare cmake args
cmake_dir = base_dir + "/libde265"
cmake_args = [
cmake_dir,
"-DCMAKE_BUILD_TYPE=" + build_type,
"-DBUILD_SHARED_LIBS=OFF", # do not build shared libs
"-DENABLE_SDL=OFF", # disable SDL
"-DENABLE_DECODER=OFF", # do not build decoder CLI executable
"-DENABLE_ENCODER=OFF", # do not build encoder CLI executable
]
# lib build function
def build_de265(platform, cmake_args):
# check if target lib has already been built
build_dir = get_build_dir(base_dir, "libde265", platform, build_type)
if platform.find("win") != -1:
target_lib = os.path.join(build_dir, "libde265", build_type, "libde265.lib")
else:
target_lib = os.path.join(build_dir, "libde265/libde265.a")
if base.is_file(target_lib):
return
# go to the build directory
base.create_dir(build_dir)
os.chdir(build_dir)
# run build
build_with_cmake(platform, cmake_args, build_type)
# for ios copy target library from the default build path
if platform.find("ios") != -1:
xcode_sdk = get_xcode_sdk(platform)
base.copy_file(f"libde265/{build_type}-{xcode_sdk}/libde265.a", "libde265")
# copy header
base.copy_file(base_dir + "/libde265/libde265/de265.h", "libde265")
# reset directory
os.chdir(base_dir)
return
make_common(build_de265, cmake_args)
return
def make_aom(base_dir, build_type):
# fetch lib repo
if not base.is_dir("aom"):
fetch_repo("https://aomedia.googlesource.com/aom", f"v{AOM_VERSION}")
# prepare cmake args
cmake_dir = base_dir + "/aom"
cmake_args = [
cmake_dir,
"-DCMAKE_BUILD_TYPE=" + build_type,
"-DBUILD_SHARED_LIBS=OFF", # do not build shared libs
"-DENABLE_SDL=OFF", # disable SDL
"-DENABLE_DECODER=ON", # enable AOM V1 decoder (for AVIF image format)
"-DENABLE_ENCODER=ON", # enable AOM V1 encoder (for AVIF image format)
"-DENABLE_TESTS=OFF", # do not build tests
"-DENABLE_DOCS=OFF", # do not build docs
"-DENABLE_EXAMPLES=OFF", # do not build examples
"-DENABLE_TOOLS=OFF", # do not build lib tools
]
# lib build function
def build_aom(platform, cmake_args):
# check if target lib has already been built
build_dir = get_build_dir(base_dir, "aom", platform, build_type)
if platform.find("win") != -1:
target_lib = os.path.join(build_dir, build_type, "aom.lib")
else:
target_lib = os.path.join(build_dir, "aom.a")
if base.is_file(target_lib):
return
# go to the build directory
base.create_dir(build_dir)
os.chdir(build_dir)
# run build
build_with_cmake(platform, cmake_args, build_type)
# for ios copy target library from the default build path
if platform.find("ios") != -1:
xcode_sdk = get_xcode_sdk(platform)
base.copy_file(f"/{build_type}-{xcode_sdk}/aom.a", build_dir)
# reset directory
os.chdir(base_dir)
return
make_common(build_aom, cmake_args)
return
def make_heif(base_dir, build_type):
# fetch lib repo
if not base.is_dir("libheif"):
fetch_repo("https://github.com/strukturag/libheif.git", f"v{HEIF_VERSION}")
# do not build heifio module
base.replaceInFile(
base_dir + "/libheif/CMakeLists.txt",
"add_subdirectory(heifio)",
"# add_subdirectory(heifio)"
)
base.replaceInFile(
base_dir + "/libheif/CMakeLists.txt",
"if (DOXYGEN_FOUND)",
"if (FALSE)"
)
# prepare cmake args
cmake_dir = base_dir + "/libheif"
cmake_args = [
cmake_dir,
"--preset=release-noplugins", # preset to disable plugins system
"-DCMAKE_BUILD_TYPE=" + build_type,
"-DBUILD_SHARED_LIBS=OFF", # do not build shared libs
"-DWITH_LIBSHARPYUV=OFF", # do not build libsharpyuv (for RGB <--> YUV color space conversions)
"-DWITH_AOM_DECODER=ON", # enable AOM V1 decoder (for AVIF image format)
"-DWITH_AOM_ENCODER=ON", # enable AOM V1 encoder (for AVIF image format)
"-DWITH_GDK_PIXBUF=OFF", # do not build gdk-pixbuf plugin (UNIX only)
"-DWITH_GNOME=OFF", # do not build gnome plugin (Linux only)
"-DWITH_EXAMPLES=OFF", # do not build examples
"-DWITH_EXAMPLE_HEIF_VIEW=OFF", # do not build heif-view CLI tool
"-DWITH_X265=ON", # enable x265 codec
"-DWITH_LIBDE265=ON", # enable de265 codec
"-DCMAKE_CXX_FLAGS=-DLIBDE265_STATIC_BUILD", # add macro definition to properly compile with de265 static library
"-DCMAKE_C_FLAGS=-DLIBDE265_STATIC_BUILD", # same ^
]
# lib build function
def build_heif(platform, cmake_args):
# check if target lib has already been built
build_dir = get_build_dir(base_dir, "libheif", platform, build_type)
if platform.find("win") != -1:
target_lib = os.path.join(build_dir, "libheif", build_type, "heif.lib")
else:
target_lib = os.path.join(build_dir, "libheif/libheif.a")
if base.is_file(target_lib):
return
# go to the build directory
base.create_dir(build_dir)
os.chdir(build_dir)
# add paths to dependent libraries and includes to cmake args
de265_build_dir = get_build_dir(base_dir, "libde265", platform, build_type)
x265_build_dir = get_build_dir(base_dir, "x265_git", platform, build_type)
aom_build_dir = get_build_dir(base_dir, "aom", platform, build_type)
cmake_args_ext = [
f"-DLIBDE265_INCLUDE_DIR={de265_build_dir}",
f"-DX265_INCLUDE_DIR={x265_build_dir}",
f"-DAOM_INCLUDE_DIR={base_dir}/aom"
]
if platform.find("win") != -1:
cmake_args_ext += [
f"-DLIBDE265_LIBRARY={de265_build_dir}/libde265/{build_type}/libde265.lib",
f"-DX265_LIBRARY={x265_build_dir}/{build_type}/x265-static.lib",
f"-DAOM_LIBRARY={aom_build_dir}/{build_type}/aom.lib"
]
else:
cmake_args_ext += [
f"-DLIBDE265_LIBRARY={de265_build_dir}/libde265/libde265.a",
f"-DX265_LIBRARY={x265_build_dir}/libx265.a",
f"-DAOM_LIBRARY={aom_build_dir}/aom.a"
]
# run build
build_with_cmake(platform, cmake_args + cmake_args_ext, build_type)
# for ios copy target library from the default build path
if platform.find("ios") != -1:
xcode_sdk = get_xcode_sdk(platform)
base.copy_file(f"libheif/{build_type}-{xcode_sdk}/libheif.a", "libheif")
# reset directory
os.chdir(base_dir)
return
make_common(build_heif, cmake_args)
return
def clear_module():
if base.is_dir("aom"):
base.delete_dir_with_access_error("aom")
if base.is_dir("libde265"):
base.delete_dir_with_access_error("libde265")
if base.is_dir("x265_git"):
base.delete_dir_with_access_error("x265_git")
if base.is_dir("libheif"):
base.delete_dir_with_access_error("libheif")
return
def make():
print("[fetch & build]: heif")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/heif"
old_dir = os.getcwd()
os.chdir(base_dir)
base.check_module_version("2", clear_module)
build_type = "Release"
if (-1 != config.option("config").lower().find("debug")):
build_type = "Debug"
# fetch custom cmake toolchain for ios
if -1 != config.option("platform").find("ios"):
if not base.is_file(IOS_CMAKE_TOOLCHAIN_FILE):
fetch_repo("https://github.com/leetal/ios-cmake.git", IOS_CMAKE_VERSION)
# build encoder library
make_x265(base_dir, build_type)
# build decoder library
make_de265(base_dir, build_type)
# build decoder/encoder library for AVIF image format
make_aom(base_dir, build_type)
# build libheif
make_heif(base_dir, build_type)
os.chdir(old_dir)
return
if __name__ == '__main__':
make()

View File

@ -7,27 +7,7 @@ import base
import os
import subprocess
def clear_module():
directories = ["gumbo-parser", "katana-parser"]
for dir in directories:
if base.is_dir(dir):
base.delete_dir_with_access_error(dir)
def make():
old_cur_dir = os.getcwd()
print("[fetch]: html")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/html"
os.chdir(base_dir)
base.check_module_version("2", clear_module)
os.chdir(old_cur_dir)
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
return
if __name__ == '__main__':
# manual compile
make()

View File

@ -3,11 +3,6 @@ sys.path.append('../../../scripts')
import base
import os
def clean():
if base.is_dir("hunspell"):
base.delete_dir_with_access_error("hunspell")
return
def make(build_js = True):
old_cur_dir = os.getcwd()
@ -16,8 +11,6 @@ def make(build_js = True):
core_common_dir = base.get_script_dir() + "/../../core/Common"
os.chdir(core_common_dir + "/3dParty/hunspell")
base.common_check_version("hunspell", "1", clean)
base.cmd("python", ["./before.py"])
if (build_js):

View File

@ -1,21 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
def make():
print("[fetch]: hyphen")
new_dir = base.get_script_dir() + "/../../core/Common/3dParty/hyphen"
old_dir = os.getcwd()
os.chdir(new_dir)
if not base.is_dir("hyphen"):
base.cmd("git", ["clone", "https://github.com/hunspell/hyphen"])
os.chdir(old_dir)
return

View File

@ -2,59 +2,26 @@
import sys
sys.path.append('../..')
sys.path.append('android')
import config
import base
import os
import glob
import icu_android
def fetch_icu(major, minor, target_dir="icu"):
if (base.is_dir("./icu2")):
base.delete_dir_with_access_error("icu2")
base.cmd("git", ["clone", "--depth", "1", "--branch", "release-" + major + "-" + minor, "https://github.com/unicode-org/icu.git", "./icu2"])
base.copy_dir("./icu2/icu4c", target_dir)
base.delete_dir_with_access_error("icu2")
return
def clear_module():
if base.is_dir("icu"):
base.delete_dir_with_access_error("icu")
# remove build
for child in glob.glob("./*"):
if base.is_dir(child):
base.delete_dir(child)
return
def make():
print("[fetch & build]: icu")
if (-1 != config.option("platform").find("android")):
icu_android.make()
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu"
old_cur = os.getcwd()
os.chdir(base_dir)
base.check_module_version("8", clear_module)
if (-1 != config.option("platform").find("android")):
icu_android.make()
os.chdir(base_dir)
icu_major = "74"
icu_major = "58"
icu_minor = "2"
if not base.is_dir("icu"):
fetch_icu(icu_major, icu_minor)
# old version for win_xp
icu_major_old = "58"
icu_minor_old = "3"
if config.check_option("platform", "win_64_xp") or config.check_option("platform", "win_32_xp"):
if not base.is_dir("icu58"):
fetch_icu(icu_major_old, icu_minor_old, "icu58")
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
if ("windows" == base.host_platform()):
platformToolset = "v140"
@ -65,109 +32,54 @@ def make():
need_platforms.append("win_64")
if (-1 != config.option("platform").find("win_32")):
need_platforms.append("win_32")
if (-1 != config.option("platform").find("win_arm64")):
need_platforms.append("win_64") # for exe files
need_platforms.append("win_arm64")
def build_icu_win(source_dir, out_dir, icu_major):
if base.is_dir(out_dir):
return
compile_bat = []
compile_bat.append("setlocal")
args = {
"win_32" : {
"msbuild_platfrom" : "Win32",
"vcvarsall_arch" : "x86",
"out_bin_dir" : source_dir + "/bin/",
"out_lib_dir" : source_dir + "/lib/"
},
"win_64" : {
"msbuild_platfrom" : "X64",
"vcvarsall_arch" : "x64",
"out_bin_dir" : source_dir + "/bin64/",
"out_lib_dir" : source_dir + "/lib64/"
},
"win_arm64" : {
"msbuild_platfrom" : "ARM64",
"vcvarsall_arch" : "x64_arm64",
"out_bin_dir" : source_dir + "/binARM64/",
"out_lib_dir" : source_dir + "/libARM64/"
}
}
platform_args = args[platform]
compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + platform_args['vcvarsall_arch'])
compile_bat.append("call MSBuild.exe " + source_dir + "/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + platform_args['msbuild_platfrom'])
compile_bat.append("endlocal")
base.run_as_bat(compile_bat)
base.create_dir(out_dir)
base.copy_file(platform_args['out_bin_dir'] + "icudt" + icu_major + ".dll", out_dir)
base.copy_file(platform_args['out_bin_dir'] + "icuuc" + icu_major + ".dll", out_dir)
base.copy_file(platform_args['out_lib_dir'] + "icudt.lib", out_dir)
base.copy_file(platform_args['out_lib_dir'] + "icuuc.lib", out_dir)
for platform in need_platforms:
if not config.check_option("platform", platform) and not config.check_option("platform", platform + "_xp"):
continue
if not (config.check_option("platform", "win_64_xp") or config.check_option("platform", "win_32_xp")):
build_icu_win("icu", platform + "/build", icu_major)
else:
# xp
build_icu_win("icu58", platform + "/build/xp", icu_major_old)
if not base.is_dir(platform + "/build"):
base.create_dir(platform)
compile_bat = []
compile_bat.append("setlocal")
compile_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + ("x86" if base.platform_is_32(platform) else "x64"))
compile_bat.append("call MSBuild.exe icu/source/allinone/allinone.sln /p:Configuration=Release /p:PlatformToolset=" + platformToolset + " /p:Platform=" + ("Win32" if base.platform_is_32(platform) else "X64"))
compile_bat.append("endlocal")
base.run_as_bat(compile_bat)
bin_dir = "icu/bin64/" if ("win_64" == platform) else "icu/bin/"
lib_dir = "icu/lib64/" if ("win_64" == platform) else "icu/lib/"
base.create_dir(platform + "/build")
base.copy_file(bin_dir + "icudt" + icu_major + ".dll", platform + "/build/")
base.copy_file(bin_dir + "icuuc" + icu_major + ".dll", platform + "/build/")
base.copy_file(lib_dir + "icudt.lib", platform + "/build/")
base.copy_file(lib_dir + "icuuc.lib", platform + "/build/")
os.chdir(old_cur)
return
if ("linux" == base.host_platform()):
if not base.is_file("./icu/source/i18n/digitlst.cpp.bak"):
base.copy_file("./icu/source/i18n/digitlst.cpp", "./icu/source/i18n/digitlst.cpp.bak")
base.replaceInFile("./icu/source/i18n/digitlst.cpp", "xlocale", "locale")
if base.is_dir(base_dir + "/linux_64"):
base.delete_dir(base_dir + "/linux_64")
if base.is_dir(base_dir + "/linux_arm64"):
base.delete_dir(base_dir + "/linux_arm64")
if not base.is_dir(base_dir + "/linux_64"):
base.create_dir(base_dir + "/icu/cross_build")
os.chdir("icu/cross_build")
command_configure = "./../source/runConfigureICU"
command_compile_addon = "-static-libstdc++ -static-libgcc"
if "1" == config.option("use-clang"):
command_configure = "CXXFLAGS=-stdlib=libc++ " + command_configure
command_compile_addon = "-stdlib=libc++"
if "" == config.option("sysroot"):
base.cmd(command_configure, ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
base.replaceInFile("./../source/icudefs.mk.in", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS)", "LDFLAGS = @LDFLAGS@ $(RPATHLDFLAGS) " + command_compile_addon)
else:
base.set_sysroot_env("linux_64")
sysroot_path = config.option("sysroot_linux_64")
sysroot_path_bin = config.get_custom_sysroot_bin("linux_64")
base.cmd_exe("./../source/configure", ["--prefix=" + base_dir + "/icu/cross_build_install",
"CC=" + sysroot_path_bin + "/gcc", "CXX=" + sysroot_path_bin + "/g++",
"AR=" + sysroot_path_bin + "/ar", "RANLIB=" + sysroot_path_bin + "/ranlib",
"CFLAGS=--sysroot=" + sysroot_path,
"CXXFLAGS=--sysroot=" + sysroot_path + " " + command_compile_addon,
"LDFLAGS=--sysroot=" + sysroot_path])
if "" == config.option("sysroot"):
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
else:
base.cmd_exe("make", ["-j4"])
base.cmd_exe("make", ["install"], True)
base.restore_sysroot_env()
base.cmd("./../source/runConfigureICU", ["Linux", "--prefix=" + base_dir + "/icu/cross_build_install"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(base_dir + "/linux_64")
base.create_dir(base_dir + "/linux_64/build")
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicudata.so." + icu_major)
base.copy_file(base_dir + "/icu/cross_build_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_64/build/libicuuc.so." + icu_major)
base.copy_dir(base_dir + "/icu/cross_build_install/include", base_dir + "/linux_64/build/include")
if config.check_option("platform", "linux_arm64") and not base.is_dir(base_dir + "/linux_arm64") and not base.is_os_arm():
base.create_dir(base_dir + "/icu/linux_arm64")
os.chdir(base_dir + "/icu/linux_arm64")
compiler_gcc_prefix = base.get_compiler_gcc_prefix("linux_arm64")
if config.option("sysroot") != "":
base.set_sysroot_env("linux_arm64")
base_arm_tool_dir = base.get_prefix_cross_compiler_arm64()
base.cmd("./../source/configure", ["--host=arm-linux", "--prefix=" + base_dir + "/icu/linux_arm64_install", "--with-cross-build=" + base_dir + "/icu/cross_build",
"CC=" + compiler_gcc_prefix + "gcc", "CXX=" + compiler_gcc_prefix + "g++", "AR=" + compiler_gcc_prefix + "ar", "RANLIB=" + compiler_gcc_prefix + "ranlib"])
"CC=" + base_arm_tool_dir + "gcc", "CXX=" + base_arm_tool_dir + "g++", "AR=" + base_arm_tool_dir + "ar", "RANLIB=" + base_arm_tool_dir + "ranlib"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
base.create_dir(base_dir + "/linux_arm64")
@ -175,8 +87,6 @@ def make():
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicudata.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicudata.so." + icu_major)
base.copy_file(base_dir + "/icu/linux_arm64_install/lib/libicuuc.so." + icu_major + "." + icu_minor, base_dir + "/linux_arm64/build/libicuuc.so." + icu_major)
base.copy_dir(base_dir + "/icu/linux_arm64_install/include", base_dir + "/linux_arm64/build/include")
if config.option("sysroot") != "":
base.restore_sysroot_env()
os.chdir("../..")
@ -193,6 +103,6 @@ def make():
if (-1 != config.option("platform").find("ios")):
if not base.is_dir("build"):
base.bash("./icu_ios")
os.chdir(old_cur)
return

View File

@ -0,0 +1,172 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/icu/android"
toolshains_dir = current_dir + "/toolchains"
icu_major = "58"
icu_minor = "2"
icu_is_shared = False
current_path = base.get_env("PATH")
platforms = {
"arm64" : {
"arch" : "aarch64-linux-android",
"bin" : "aarch64-linux-android"
},
"arm" : {
"arch" : "arm-linux-androideabi",
"bin" : "arm-linux-androideabi"
},
"x86_64" : {
"arch" : "x86_64-linux-android",
"bin" : "x86_64-linux-android"
},
"x86" : {
"arch" : "x86-linux-android",
"bin" : "i686-linux-android"
}
}
def build_arch(arch, api_version):
print("icu build: " + arch + " ----------------------------------------")
if base.is_dir(current_dir + "/icu/" + arch):
base.delete_dir(current_dir + "/icu/" + arch)
base.create_dir(current_dir + "/icu/" + arch)
os.chdir(current_dir + "/icu/" + arch)
base.cmd(base.get_env("ANDROID_NDK_ROOT") + "/build/tools/make-standalone-toolchain.sh", [
"--platform=android-" + api_version,
"--install-dir=" + current_dir + "/toolchain/" + arch,
"--toolchain=" + platforms[arch]["arch"],
"--force"
])
base.set_env("PATH", current_dir + "/toolchain/" + arch + "/bin:" + current_path)
command_args = "--prefix=" + current_dir + "/build_tmp/" + arch + " --host=!!!MASK!!! --with-cross-build=" + current_dir + "/icu/cross_build CFLAGS=-Os CXXFLAGS=--std=c++11 CC=!!!MASK!!!-clang CXX=!!!MASK!!!-clang++ AR=!!!MASK!!!-ar RANLIB=!!!MASK!!!-ranlib"
if not icu_is_shared:
command_args += " --enable-static --enable-shared=no --with-data-packaging=archive CFLAGS=-fPIC CXXFLAGS=-fPIC"
command_args = command_args.replace("!!!MASK!!!", platforms[arch]["bin"])
base.cmd("../source/configure", command_args.split())
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
base.set_env("PATH", current_path)
os.chdir(current_dir)
return
def make():
if not base.is_dir(current_dir):
base.create_dir(current_dir)
if base.is_dir(current_dir + "/build"):
return
current_dir_old = os.getcwd()
print("[fetch & build]: icu_android")
os.chdir(current_dir)
if not base.is_dir("icu"):
base.cmd("svn", ["export", "https://github.com/unicode-org/icu/tags/release-" + icu_major + "-" + icu_minor + "/icu4c", "./icu", "--non-interactive", "--trust-server-cert"])
if ("linux" == base.host_platform()):
base.replaceInFile(current_dir + "/icu/source/i18n/digitlst.cpp", "xlocale", "locale")
#if ("mac" == base.host_platform()):
# base.replaceInFile(current_dir + "/icu/source/tools/pkgdata/pkgdata.cpp", "cmd, \"%s %s -o %s%s %s %s%s %s %s\",", "cmd, \"%s %s -o %s%s %s %s %s %s %s\",")
if not base.is_dir(current_dir + "/icu/cross_build"):
base.create_dir(current_dir + "/icu/cross_build")
os.chdir(current_dir + "/icu/cross_build")
base.cmd("../source/runConfigureICU", ["Linux" if "linux" == base.host_platform() else "MacOSX",
"--prefix=" + current_dir + "/icu/cross_build", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
os.chdir(current_dir)
build_arch("arm64", "21")
build_arch("arm", "16")
build_arch("x86_64","21")
build_arch("x86", "16")
os.chdir(current_dir)
base.create_dir(current_dir + "/build")
base.copy_dir(current_dir + "/build_tmp/arm64/include", current_dir + "/build/include")
if icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/arm64_v8a/libicuuc.so")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicudata.so")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/armv7/libicuuc.so")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86_64/libicuuc.so")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicudata.so")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.so." + icu_major + "." + icu_minor, current_dir + "/build/x86/libicuuc.so")
# patch elf information
os.chdir(current_dir + "/build")
base.cmd("git", ["clone", "https://github.com/NixOS/patchelf.git"])
os.chdir("./patchelf")
base.cmd("./bootstrap.sh")
base.cmd("./configure", ["--prefix=" + current_dir + "/build/patchelf/usr"])
base.cmd("make")
base.cmd("make", ["install"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../arm64_v8a/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../arm64_v8a/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../armv7/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../armv7/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86_64/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86_64/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicudata.so", "./../x86/libicudata.so"])
base.cmd("./usr/bin/patchelf", ["--set-soname", "libicuuc.so", "./../x86/libicuuc.so"])
base.cmd("./usr/bin/patchelf", ["--replace-needed", "libicudata.so." + icu_major, "libicudata.so", "./../x86/libicuuc.so"])
base.delete_dir(current_dir + "/build/patchelf")
if not icu_is_shared:
base.create_dir(current_dir + "/build/arm64_v8a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicudata.a", current_dir + "/build/arm64_v8a/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm64/lib/libicuuc.a", current_dir + "/build/arm64_v8a/libicuuc.a")
base.copy_file(current_dir + "/icu/arm64/data/out/icudt58l.dat", current_dir + "/build/arm64_v8a/icudt58l.dat")
base.create_dir(current_dir + "/build/armv7")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicudata.a", current_dir + "/build/armv7/libicudata.a")
base.copy_file(current_dir + "/build_tmp/arm/lib/libicuuc.a", current_dir + "/build/armv7/libicuuc.a")
base.copy_file(current_dir + "/icu/arm/data/out/icudt58l.dat", current_dir + "/build/armv7/icudt58l.dat")
base.create_dir(current_dir + "/build/x86_64")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicudata.a", current_dir + "/build/x86_64/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86_64/lib/libicuuc.a", current_dir + "/build/x86_64/libicuuc.a")
base.copy_file(current_dir + "/icu/x86_64/data/out/icudt58l.dat", current_dir + "/build/x86_64/icudt58l.dat")
base.create_dir(current_dir + "/build/x86")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicudata.a", current_dir + "/build/x86/libicudata.a")
base.copy_file(current_dir + "/build_tmp/x86/lib/libicuuc.a", current_dir + "/build/x86/libicuuc.a")
base.copy_file(current_dir + "/icu/x86/data/out/icudt58l.dat", current_dir + "/build/x86/icudt58l.dat")
os.chdir(current_dir_old)
return

View File

@ -5,28 +5,36 @@ sys.path.append('../..')
import base
import os
def change_icu_defs(arch):
old_env = dict(os.environ)
def change_icu_defs(current_dir, arch):
icudef_file = current_dir + "/icudefs.mk"
icudef_file_old = current_dir + "/icudefs.mk.back"
param = "-arch x86_64"
if arch == "arm64":
param = "-arch arm64"
param = "-arch arm64 -isysroot " + base.find_mac_sdk()
param += " -isysroot " + base.find_mac_sdk()
param += " -mmacosx-version-min=10.12"
os.environ["CFLAGS"] = param
os.environ["CXXFLAGS"] = param + " --std=c++11"
os.environ["LDFLAGS"] = param
base.copy_file(icudef_file, icudef_file_old)
return old_env
base.replaceInFile(icudef_file, "CFLAGS = ", "CFLAGS = " + param + " ")
base.replaceInFile(icudef_file, "CXXFLAGS = ", "CXXFLAGS = " + param + " ")
base.replaceInFile(icudef_file, "RPATHLDFLAGS =", "RPATHLDFLAGS2 =")
base.replaceInFile(icudef_file, "LDFLAGS = ", "LDFLAGS = " + param + " ")
base.replaceInFile(icudef_file, "RPATHLDFLAGS2 =", "RPATHLDFLAGS =")
def restore_icu_defs(old_env):
os.environ.clear()
os.environ.update(old_env)
return
icu_major = "74"
def restore_icu_defs(current_dir):
icudef_file = current_dir + "/icudefs.mk"
icudef_file_old = current_dir + "/icudefs.mk.back"
base.delete_file(icudef_file)
base.copy_file(icudef_file_old, icudef_file)
base.delete_file(icudef_file_old)
return
icu_major = "58"
icu_minor = "2"
current_dir_old = os.getcwd()
@ -38,33 +46,29 @@ if not base.is_dir(current_dir + "/mac_cross_64"):
base.create_dir(current_dir + "/mac_cross_64")
os.chdir(current_dir + "/mac_cross_64")
old_env = change_icu_defs("x86_64")
base.cmd("../icu/source/runConfigureICU", ["MacOSX",
"--prefix=" + current_dir + "/mac_64_install", "--enable-static"])
"--prefix=" + current_dir + "/mac_cross_64", "CFLAGS=-Os CXXFLAGS=--std=c++11"])
change_icu_defs(current_dir + "/mac_cross_64", "x86_64")
base.cmd("make", ["-j4"])
base.cmd("make", ["install"], True)
restore_icu_defs(old_env)
restore_icu_defs(current_dir + "/mac_cross_64")
os.chdir(current_dir)
os.chdir(current_dir + "/icu/source")
old_env = change_icu_defs("arm64")
base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm_64",
"--with-cross-build=" + current_dir + "/mac_cross_64", "VERBOSE=1"])
addon = []
if not base.is_os_arm():
addon = ["--host=aarch64-apple-darwin"]
base.cmd("./configure", ["--prefix=" + current_dir + "/mac_arm64_install",
"--with-cross-build=" + current_dir + "/mac_cross_64", "--enable-static", "VERBOSE=1"] + addon)
change_icu_defs(current_dir + "/icu/source", "arm64")
base.cmd("make", ["-j4"])
base.cmd("make", ["install"])
restore_icu_defs(old_env)
restore_icu_defs(current_dir + "/icu/source")
os.chdir(current_dir)
@ -80,26 +84,15 @@ base.create_dir(current_dir + "/mac_64/build")
base.create_dir(current_dir + "/mac_arm64")
base.create_dir(current_dir + "/mac_arm64/build")
base.copy_dir(current_dir + "/mac_64_install/include", current_dir + "/mac_64/build/include")
# copy shared libs
base.copy_file(current_dir + "/mac_64_install/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicudata." + icu_major + ".dylib")
base.copy_file(current_dir + "/mac_64_install/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicuuc." + icu_major + ".dylib")
# copy static libs
base.copy_file(current_dir + "/mac_64_install/lib/libicudata.a", current_dir + "/mac_64/build")
base.copy_file(current_dir + "/mac_64_install/lib/libicui18n.a", current_dir + "/mac_64/build")
base.copy_file(current_dir + "/mac_64_install/lib/libicuuc.a", current_dir + "/mac_64/build")
base.copy_dir(current_dir + "/mac_cross_64/include", current_dir + "/mac_64/build/include")
base.copy_file(current_dir + "/mac_cross_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicudata." + icu_major + ".dylib")
base.copy_file(current_dir + "/mac_cross_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_64/build/libicuuc." + icu_major + ".dylib")
base.copy_dir(current_dir + "/mac_arm64_install/include", current_dir + "/mac_arm64/build/include")
# copy shared libs
base.copy_file(current_dir + "/mac_arm64_install/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicudata." + icu_major + ".dylib")
base.copy_file(current_dir + "/mac_arm64_install/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicuuc." + icu_major + ".dylib")
# copy static libs
base.copy_file(current_dir + "/mac_arm64_install/lib/libicudata.a", current_dir + "/mac_arm64/build")
base.copy_file(current_dir + "/mac_arm64_install/lib/libicui18n.a", current_dir + "/mac_arm64/build")
base.copy_file(current_dir + "/mac_arm64_install/lib/libicuuc.a", current_dir + "/mac_arm64/build")
base.copy_dir(current_dir + "/mac_arm_64/include", current_dir + "/mac_arm64/build/include")
base.copy_file(current_dir + "/mac_arm_64/lib/libicudata." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicudata." + icu_major + ".dylib")
base.copy_file(current_dir + "/mac_arm_64/lib/libicuuc." + icu_major + "." + icu_minor + ".dylib", current_dir + "/mac_arm64/build/libicuuc." + icu_major + ".dylib")
base.delete_dir(current_dir + "/mac_cross_64")
base.delete_dir(current_dir + "/mac_64_install")
base.delete_dir(current_dir + "/mac_arm64_install")
base.delete_dir(current_dir + "/mac_arm_64")
os.chdir(current_dir_old)

View File

@ -1,38 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
import subprocess
def clear_module():
directories = ["glm", "libetonyek", "libodfgen", "librevenge", "mdds"]
for dir in directories:
if base.is_dir(dir):
base.delete_dir_with_access_error(dir)
def make(use_gperf = True):
old_cur_dir = os.getcwd()
print("[fetch & build]: iwork")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/apple"
os.chdir(base_dir)
base.check_module_version("3", clear_module)
os.chdir(old_cur_dir)
cmd_args = ["fetch.py"]
if use_gperf:
cmd_args.append("--gperf")
base.cmd_in_dir(base_dir, "python", cmd_args)
return
if __name__ == '__main__':
# manual compile
make(False)

View File

@ -126,14 +126,11 @@ def make():
def param_apple(platform, arch):
return ["-G","Xcode", "-DDEPLOYMENT_TARGET=10", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
def param_apple_ios(platform, arch, params=[]):
return params + ["-G","Xcode", "-DDEPLOYMENT_TARGET=11", "-DENABLE_BITCODE=1", "-DPLATFORM=" + platform, "-DARCHS=" + arch, "-DCMAKE_TOOLCHAIN_FILE=" + CMAKE_TOOLCHAIN_FILE]
if(platform == "ios"):
#build_arch("ios", "armv7", param_apple("OS", "armv7"))
build_arch("ios", "arm64", param_apple_ios("OS64", "arm64"))
#build_arch("ios", "i386", param_apple_ios("SIMULATOR", "i386"))
build_arch("ios", "x86_64", param_apple_ios("SIMULATOR64", "x86_64", ["-DCMAKE_CXX_FLAGS=-std=c++11"]))
build_arch("ios", "armv7", param_apple("OS", "armv7"))
build_arch("ios", "arm64", param_apple("OS64", "arm64"))
build_arch("ios", "i386", param_apple("SIMULATOR", "i386"))
build_arch("ios", "x86_64", param_apple("SIMULATOR64", "x86_64"))
else:
build_arch("mac", "mac_arm64", param_apple("MAC_ARM64", "arm64"))
build_arch("mac", "mac_64", param_apple("MAC", "x86_64"))
@ -147,7 +144,7 @@ def make():
#copy include
prefix_dir = current_dir + "/IXWebSocket/build/ios/"
postfix_dir = ""
if base.is_dir(prefix_dir + "arm64/usr"):
if base.is_dir(prefix_dir + "armv7/usr"):
postfix_dir = "/usr"
if base.is_dir(prefix_dir + "armv7" + postfix_dir + "/include"):
@ -160,16 +157,10 @@ def make():
base.cmd("cp", [ "-r", prefix_dir + "x86_64" + postfix_dir + "/include", current_dir + "/IXWebSocket/build/ios/ixwebsocket-universal"])
# Create fat lib
if (True):
base.cmd("lipo", ["IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
else:
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
base.cmd("lipo", ["IXWebSocket/build/ios/armv7" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/arm64" + postfix_dir + "/lib/libixwebsocket.a",
"IXWebSocket/build/ios/i386" + postfix_dir + "/lib/libixwebsocket.a", "IXWebSocket/build/ios/x86_64" + postfix_dir + "/lib/libixwebsocket.a",
"-create", "-output",
"IXWebSocket/build/ios/ixwebsocket-universal/lib/libixwebsocket.a"])
elif (-1 != config.option("platform").find("linux")):

View File

@ -1,124 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
def docker_build(image_name, dockerfile_dir, base_dir):
base.cmd("docker", ["build", "-t", image_name, dockerfile_dir])
vlc_dir = base_dir + "/vlc"
base.cmd("docker", ["run", "--rm", "-v", vlc_dir + ":/vlc", image_name])
base.cmd("docker", ["image", "rm", image_name])
return
def form_build_win(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/sdk/include", dest_dir + "/include")
# form lib dir
base.create_dir(dest_dir + "/lib")
base.copy_file(src_dir + "/sdk/lib/libvlc.lib", dest_dir + "/lib/vlc.lib")
base.copy_file(src_dir + "/sdk/lib/libvlccore.lib", dest_dir + "/lib/vlccore.lib")
base.copy_dir(src_dir + "/plugins", dest_dir + "/lib/plugins")
base.copy_file(src_dir + "/libvlc.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/libvlccore.dll", dest_dir + "/lib")
base.copy_file(src_dir + "/vlc-cache-gen.exe", dest_dir + "/lib")
# generate cache file 'plugins.dat' for plugins loading
base.cmd_exe(dest_dir + "/lib/vlc-cache-gen", [dest_dir + "/lib/plugins"])
return
def form_build_linux(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
def form_build_mac(src_dir, dest_dir):
if not base.is_dir(dest_dir):
base.create_dir(dest_dir)
# copy include dir
base.copy_dir(src_dir + "/include", dest_dir + "/include")
# copy and form lib dir
base.copy_dir(src_dir + "/lib", dest_dir + "/lib")
base.cmd("find", [dest_dir + "/lib", "-name", "\"*.la\"", "-type", "f", "-delete"])
base.delete_dir(dest_dir + "/lib/pkgconfig")
base.delete_file(dest_dir + "/lib/vlc/libcompat.a")
# generate cache file 'plugins.dat' for plugins loading
base.run_command("DYLD_LIBRARY_PATH=" + dest_dir + "/lib " + dest_dir + "/lib/vlc/vlc-cache-gen " + dest_dir + "/lib/vlc/plugins")
return
def make():
print("[fetch & build]: libvlc")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/libvlc"
vlc_dir = base_dir + "/vlc"
vlc_version = "3.0.18"
tools_dir = base.get_script_dir() + "/../tools"
old_cur = os.getcwd()
os.chdir(base_dir)
if not base.is_dir(vlc_dir):
# temporary disable auto CRLF for Windows
if "windows" == base.host_platform():
autocrlf_old = base.run_command("git config --global core.autocrlf")['stdout']
base.cmd("git", ["config", "--global", "core.autocrlf", "false"])
base.cmd("git", ["clone", "https://code.videolan.org/videolan/vlc.git", "--branch", vlc_version])
if "windows" == base.host_platform():
base.cmd("git", ["config", "--global", "core.autocrlf", autocrlf_old])
base.create_dir("build")
base.copy_file("tools/ignore-cache-time.patch", "vlc")
# windows
if "windows" == base.host_platform():
if config.check_option("platform", "win_64"):
base.copy_file("tools/win_64/build.patch", "vlc")
docker_build("libvlc-win64", base_dir + "/tools/win_64", base_dir)
form_build_win(vlc_dir + "/build/win64/vlc-" + vlc_version, base_dir + "/build/win_64")
if config.check_option("platform", "win_32"):
base.copy_file("tools/win_32/build.patch", "vlc")
docker_build("libvlc-win32", base_dir + "/tools/win_32", base_dir)
form_build_win(vlc_dir + "/build/win32/vlc-" + vlc_version, base_dir + "/build/win_32")
# linux
if config.check_option("platform", "linux_64"):
base.copy_file(tools_dir + "/linux/elf/patchelf", "vlc")
base.copy_file("tools/linux_64/change-rpaths.sh", "vlc")
docker_build("libvlc-linux64", base_dir + "/tools/linux_64", base_dir)
form_build_linux(vlc_dir + "/build/linux_64", base_dir + "/build/linux_64")
# mac
if "mac" == base.host_platform():
os.chdir(vlc_dir)
base.cmd("git", ["restore", "src/modules/bank.c"])
base.cmd("patch", ["-p1", "src/modules/bank.c", "../tools/ignore-cache-time.patch"])
if config.check_option("platform", "mac_64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_64/build.patch"])
base.create_dir("build/mac_64")
os.chdir("build/mac_64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_64/vlc_install_dir", base_dir + "/build/mac_64")
if config.check_option("platform", "mac_arm64"):
base.cmd("git", ["restore", "extras/package/macosx/build.sh"])
base.cmd("patch", ["-p1", "extras/package/macosx/build.sh", "../tools/mac_arm64/build.patch"])
base.create_dir("build/mac_arm64")
os.chdir("build/mac_arm64")
base.cmd("../../extras/package/macosx/build.sh", ["-c"])
form_build_mac(vlc_dir + "/build/mac_arm64/vlc_install_dir", base_dir + "/build/mac_arm64")
os.chdir(old_cur)
return

View File

@ -1,20 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
import subprocess
def make():
print("[fetch]: md")
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/md"
base.cmd_in_dir(base_dir, "python", ["fetch.py"])
return
if __name__ == '__main__':
# manual compile
make()

View File

@ -1,15 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import base
import os
def make():
print("[fetch & build]: brotli")
base.cmd_in_dir(base.get_script_dir() + "/../../core/Common/3dParty/brotli", "./make.py")
return
if __name__ == '__main__':
# manual compile
make()

View File

@ -19,16 +19,15 @@ def make():
print("[fetch & build]: openssl")
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
openssl_mobile.make()
return
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(base_dir)
base.common_check_version("openssl", "4", clean)
if (-1 != config.option("platform").find("android") or -1 != config.option("platform").find("ios")):
os.chdir(old_cur)
openssl_mobile.make()
return
base.common_check_version("openssl", "3", clean)
if not base.is_dir("openssl"):
base.cmd("git", ["clone", "--depth=1", "--branch", "OpenSSL_1_1_1f", "https://github.com/openssl/openssl.git"])
@ -42,7 +41,7 @@ def make():
base.create_dir("./../build/win_64")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm enable-md2")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64 --openssldir=" + old_cur_dir + "\\build\\win_64 no-shared no-asm")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -50,15 +49,7 @@ def make():
base.create_dir("./../build/win_32")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm enable-md2")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
if (-1 != config.option("platform").find("win_arm64")) and not base.is_dir("../build/win_arm64"):
base.create_dir("./../build/win_arm64")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64_arm64")
qmake_bat.append("perl Configure VC-WIN64-ARM --prefix=" + old_cur_dir + "\\build\\win_arm64 --openssldir=" + old_cur_dir + "\\build\\win_arm64 no-shared no-asm enable-md2")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32 --openssldir=" + old_cur_dir + "\\build\\win_32 no-shared no-asm")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -71,7 +62,7 @@ def make():
base.create_dir("./../build/win_64_xp")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x64")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async enable-md2")
qmake_bat.append("perl Configure VC-WIN64A --prefix=" + old_cur_dir + "\\build\\win_64_xp --openssldir=" + old_cur_dir + "\\build\\win_64_xp no-shared no-asm no-async")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -79,7 +70,7 @@ def make():
base.create_dir("./../build/win_32_xp")
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" x86")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async enable-md2")
qmake_bat.append("perl Configure VC-WIN32 --prefix=" + old_cur_dir + "\\build\\win_32_xp --openssldir=" + old_cur_dir + "\\build\\win_32_xp no-shared no-asm no-async")
qmake_bat.append("call nmake clean")
qmake_bat.append("call nmake build_libs install")
base.run_as_bat(qmake_bat, True)
@ -87,49 +78,30 @@ def make():
# -------------------------------------------------------------------------------------------------------
return
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
base.cmd("./config", ["enable-md2", "no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
if "1" == config.option("use-clang"):
base.replaceInFile("./Makefile", "CC=$(CROSS_COMPILE)gcc", "CC=$(CROSS_COMPILE)clang")
base.replaceInFile("./Makefile", "CXX=$(CROSS_COMPILE)g++", "CXX=$(CROSS_COMPILE)clang++")
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden -stdlib=libc++")
base.replaceInFile("./Makefile", "LDFLAGS=", "LDFLAGS=-stdlib=libc++")
elif config.option("sysroot") == "":
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
else:
base.replaceInFile("./Makefile", "CROSS_COMPILE=", "CROSS_COMPILE=" + config.get_custom_sysroot_bin("linux_64") + "/")
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden --sysroot=" + config.option("sysroot_linux_64"))
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden --sysroot=" + config.option("sysroot_linux_64"))
if config.option("sysroot") == "":
base.cmd("make", [])
base.cmd("make", ["install"])
base.cmd("make", ["clean"], True)
else:
base.set_sysroot_env("linux_64")
base.cmd_exe("make", [])
base.cmd_exe("make", ["install"])
base.cmd_exe("make", ["clean"], True)
base.restore_sysroot_env()
if (-1 != config.option("platform").find("linux")) and not base.is_dir("../build/linux_64"):
base.cmd("./config", ["no-shared", "no-asm", "--prefix=" + old_cur_dir + "/build/linux_64", "--openssldir=" + old_cur_dir + "/build/linux_64"])
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
base.cmd("make")
base.cmd("make", ["install"])
# TODO: support x86
if (-1 != config.option("platform").find("linux_arm64")) and not base.is_dir("../build/linux_arm64"):
if (base.is_os_arm()):
if ("x86_64" != platform.machine()):
base.copy_dir("../build/linux_64", "../build/linux_arm64")
else:
if config.option("sysroot") != "":
base.set_sysroot_env("linux_arm64")
base.cmd("./Configure", ["linux-aarch64", "enable-md2", "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
cross_compiler_arm64 = config.option("arm64-toolchain-bin")
if ("" == cross_compiler_arm64):
cross_compiler_arm64 = "/usr/bin"
cross_compiler_arm64_prefix = cross_compiler_arm64 + "/" + base.get_prefix_cross_compiler_arm64()
base.cmd("./Configure", ["linux-aarch64", "--cross-compile-prefix=" + cross_compiler_arm64_prefix, "no-shared", "no-asm", "no-tests", "--prefix=" + old_cur_dir + "/build/linux_arm64", "--openssldir=" + old_cur_dir + "/build/linux_arm64"])
base.replaceInFile("./Makefile", "CFLAGS=-Wall -O3", "CFLAGS=-Wall -O3 -fvisibility=hidden")
base.replaceInFile("./Makefile", "CXXFLAGS=-Wall -O3", "CXXFLAGS=-Wall -O3 -fvisibility=hidden")
base.cmd("make", [], True)
base.cmd("make", ["install"], True)
if config.option("sysroot") != "":
base.restore_sysroot_env()
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_64"):
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-x86_64-cc", "--prefix=" + old_cur_dir + "/build/mac_64", "--openssldir=" + old_cur_dir + "/build/mac_64", "-mmacosx-version-min=10.11"])
base.cmd("make", ["build_libs", "install"])
if (-1 != config.option("platform").find("mac")) and not base.is_dir("../build/mac_arm64"):
@ -147,7 +119,7 @@ def make():
},\n\
\"darwin64-x86_64-cc\" => {"
base.replaceInFile(base_dir + "/openssl2/Configurations/10-main.conf", replace1, replace2)
base.cmd("./Configure", ["enable-md2", "no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
base.cmd("./Configure", ["no-shared", "no-asm", "darwin64-arm64-cc", "--prefix=" + old_cur_dir + "/build/mac_arm64", "--openssldir=" + old_cur_dir + "/build/mac_arm64"])
base.cmd("make", ["build_libs", "install"])
os.chdir(old_cur)

View File

@ -1,19 +1,18 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
sys.path.append('android')
import base
import config
import os
import subprocess
import openssl_android
def make():
path = base.get_script_dir() + "/../../core/Common/3dParty/openssl"
old_cur = os.getcwd()
os.chdir(path)
if (-1 != config.option("platform").find("android")):
openssl_android.make()
base.set_env("ANDROID_HOME", base.get_android_sdk_home())
if (-1 != config.option("platform").find("android") and not base.is_dir("./build/android")):
subprocess.call(["./build-android-openssl.sh"])
if (-1 != config.option("platform").find("ios") and not base.is_dir("./build/ios")):
subprocess.call(["./build-ios-openssl.sh"])

View File

@ -1,74 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import os
import subprocess
import glob
def clean():
if base.is_dir("socket.io-client-cpp"):
base.delete_dir_with_access_error("socket.io-client-cpp")
return
def correct_namespace(dir):
folder = dir
if ("/" != folder[-1:]):
folder += "/"
folder += "*"
for file in glob.glob(folder):
if base.is_file(file):
base.replaceInFile(file, "namespace sio", "namespace sio_no_tls")
base.replaceInFile(file, "asio::", "asio_no_tls::")
base.replaceInFile(file, "sio::", "sio_no_tls::")
base.replaceInFile(file, "asio_no_tls::", "asio::")
elif base.is_dir(file):
correct_namespace(file)
return
def make():
base_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketio"
old_cur = os.getcwd()
os.chdir(base_dir)
base.common_check_version("socketio", "1", clean)
os.chdir(old_cur)
if not base.is_dir(base_dir + "/socket.io-client-cpp"):
base.cmd_in_dir(base_dir, "git", ["clone", "https://github.com/socketio/socket.io-client-cpp.git"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["checkout", "da779141a7379cc30c870d48295033bc16a23c66"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "init"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp", "git", ["submodule", "update"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/asio", "git", ["checkout", "230c0d2ae035c5ce1292233fcab03cea0d341264"])
base.cmd_in_dir(base_dir + "/socket.io-client-cpp/lib/websocketpp", "git", ["checkout", "56123c87598f8b1dd471be83ca841ceae07f95ba"])
# patches
base.apply_patch(base_dir + "/socket.io-client-cpp/lib/websocketpp/websocketpp/impl/connection_impl.hpp", base_dir + "/patches/websocketpp.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_fail.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_open.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_close_timeout.patch")
base.apply_patch(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", base_dir + "/patches/sio_client_impl_encode.patch")
# no tls realization (remove if socket.io fix this)
dst_dir = base_dir + "/socket.io-client-cpp/src_no_tls"
base.copy_dir(base_dir + "/socket.io-client-cpp/src", dst_dir)
correct_namespace(dst_dir)
base.replaceInFile(dst_dir + "/internal/sio_client_impl.h", "SIO_TLS", "SIO_TLS_NO")
base.replaceInFile(dst_dir + "/internal/sio_client_impl.cpp", "SIO_TLS", "SIO_TLS_NO")
base.replaceInFile(dst_dir + "/sio_socket.h", "SIO_SOCKET_H", "SIO_SOCKET_NO_TLS_H")
base.replaceInFile(dst_dir + "/sio_client.h", "SIO_CLIENT_H", "SIO_CLIENT_NO_TLS_H")
base.replaceInFile(dst_dir + "/sio_message.h", "__SIO_MESSAGE_H__", "__SIO_MESSAGE_NO_TLS_H__")
base.replaceInFile(dst_dir + "/internal/sio_packet.h", "SIO_PACKET_H", "SIO_PACKET_NO_TLS_H")
old_ping = " m_ping_timeout_timer->expires_from_now(milliseconds(m_ping_interval + m_ping_timeout), ec);"
new_ping = "#if defined(PING_TIMEOUT_INTERVAL)\n"
new_ping += " m_ping_timeout_timer->expires_from_now(milliseconds(PING_TIMEOUT_INTERVAL), ec);\n"
new_ping += "#else\n"
new_ping += old_ping
new_ping += "\n#endif"
base.replaceInFile(base_dir + "/socket.io-client-cpp/src/internal/sio_client_impl.cpp", old_ping, new_ping)
base.replaceInFile(base_dir + "/socket.io-client-cpp/src_no_tls/internal/sio_client_impl.cpp", old_ping, new_ping)
return

View File

@ -10,17 +10,13 @@ import config
current_dir = base.get_script_dir() + "/../../core/Common/3dParty/socketrocket"
def buildIOS():
# Build for iphone
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-devices.xcarchive", "-sdk", "iphoneos", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
base.cmd("xcodebuild", ["-sdk", "iphoneos", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
# Build for simulator
base.cmd("xcodebuild", ["archive", "-project", current_dir + "/SocketRocket.xcodeproj", "-scheme", "SocketRocket", "-archivePath", current_dir + "/build/SocketRocket-simulators.xcarchive", "-sdk", "iphonesimulator", "ENABLE_BITCODE=NO", "BUILD_LIBRARY_FOR_DISTRIBUTION=YES", "SKIP_INSTALL=NO"])
base.cmd("xcodebuild", ["-sdk", "iphonesimulator", "BITCODE_GENERATION_MODE = bitcode", "ENABLE_BITCODE = YES", "OTHER_CFLAGS = -fembed-bitcode", "-configuration", "Release"])
# Package xcframework
base.cmd("xcodebuild", ["-create-xcframework", "-library", current_dir + "/build/SocketRocket-devices.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-library", current_dir + "/build/SocketRocket-simulators.xcarchive/Products/usr/local/lib/libSocketRocket.a", "-output", current_dir + "/build/SocketRocket.xcframework"])
# Remove arm64 for simulator for SDK 14
base.cmd("lipo", ["-remove", "arm64", "-output", "build/Release-iphonesimulator/libSocketRocket.a", "build/Release-iphonesimulator/libSocketRocket.a"])

View File

@ -10,10 +10,10 @@ import v8_89
def clean():
if base.is_dir("depot_tools"):
base.delete_dir_with_access_error("depot_tools")
base.delete_dir_with_access_error("depot_tools");
base.delete_dir("depot_tools")
if base.is_dir("v8"):
base.delete_dir_with_access_error("v8")
base.delete_dir_with_access_error("v8");
base.delete_dir("v8")
if base.is_exist("./.gclient"):
base.delete_file("./.gclient")
@ -26,8 +26,6 @@ def clean():
def is_main_platform():
if (config.check_option("platform", "win_64") or config.check_option("platform", "win_32")):
return True
if (config.check_option("platform", "win_arm64")):
return True
if (config.check_option("platform", "linux_64") or config.check_option("platform", "linux_32") or config.check_option("platform", "linux_arm64")):
return True
if config.check_option("platform", "mac_64"):
@ -44,9 +42,17 @@ def is_xp_platform():
return False
def is_use_clang():
if config.option("sysroot") == "" and "1" == config.option("use-clang"):
return "true"
return "false"
gcc_version = 4
gcc_version_str = base.run_command("gcc -dumpfullversion -dumpversion")['stdout']
if (gcc_version_str != ""):
gcc_version = int(gcc_version_str.split(".")[0])
is_clang = "false"
if (gcc_version >= 6):
is_clang = "true"
print("gcc major version: " + str(gcc_version) + ", use clang:" + is_clang)
return is_clang
def make():
if not is_main_platform():
@ -65,9 +71,13 @@ def make():
if ("mac" == base.host_platform()) and (-1 == config.option("config").find("use_v8")):
return
use_v8_89 = True
if config.check_option("config", "v8_version_60"):
use_v8_89 = False
use_v8_89 = False
if (-1 != config.option("config").lower().find("v8_version_89")):
use_v8_89 = True
if ("windows" == base.host_platform()) and (config.option("vs-version") == "2019"):
use_v8_89 = True
if config.check_option("platform", "linux_arm64"):
use_v8_89 = True
if (use_v8_89):
v8_89.make()
@ -90,7 +100,6 @@ def make():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
@ -116,7 +125,7 @@ def make():
# windows hack (delete later) ----------------------
if ("windows" == base.host_platform()):
base.delete_dir_with_access_error("v8/buildtools/win")
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
base.cmd("gclient", ["sync", "--force"], True)
else:
base.cmd("gclient", ["sync"], True)
@ -177,7 +186,6 @@ def make():
base.cmd2("gn", ["gen", "out.gn/mac_64", "--args=\"is_debug=false " + base_args64 + "\""])
base.cmd("ninja", ["-C", "out.gn/mac_64"])
# add enable_iterator_debugging=false for disable _ITERATOR_DEBUG_LEVEL
if config.check_option("platform", "win_64"):
if (-1 != config.option("config").lower().find("debug")):
base.cmd2("gn", ["gen", "out.gn/win_64/debug", "--args=\"is_debug=true " + base_args64 + " is_clang=false\""])
@ -223,16 +231,13 @@ def make_xp():
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
v8_89.change_bootstrap()
if ("windows" == base.host_platform()):
# hack for 32 bit system!!!
if base.is_file("depot_tools/cipd.ps1"):
base.replaceInFile("depot_tools/cipd.ps1", "windows-386", "windows-amd64")
# old variant
path_to_python2 = "/depot_tools/bootstrap-2@3_11_8_chromium_35_bin/python/bin"
os.environ["PATH"] = os.pathsep.join([base_dir + "/depot_tools",
base_dir + path_to_python2,
base_dir + "/depot_tools/win_tools-2_7_13_chromium7_bin/python/bin",
config.option("vs-path") + "/../Common7/IDE",
os.environ["PATH"]])
@ -242,7 +247,7 @@ def make_xp():
base.cmd("./depot_tools/fetch", ["v8"], True)
base.cmd("./depot_tools/gclient", ["sync", "-r", "4.10.253"], True)
base.delete_dir_with_access_error("v8/buildtools/win")
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
base.cmd("gclient", ["sync", "--force"], True)
# save common py script
@ -265,16 +270,8 @@ def make_xp():
"for file in projects:",
" replaceInFile(file, '<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>')",
" replaceInFile(file, '<RuntimeLibrary>MultiThreaded</RuntimeLibrary>', '<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>')",
])
]);
programFilesDir = base.get_env("ProgramFiles")
if ("" != base.get_env("ProgramFiles(x86)")):
programFilesDir = base.get_env("ProgramFiles(x86)")
dev_path = programFilesDir + "\\Microsoft Visual Studio 14.0\\Common7\\IDE"
if (base.is_dir(dev_path)):
os.environ["PATH"] = dev_path + os.pathsep + os.environ["PATH"]
# add "SET CL=\"/D_ITERATOR_DEBUG_LEVEL=0\"" before devenv for disable _ITERATOR_DEBUG_LEVEL in debug
if config.check_option("platform", "win_64_xp"):
if not base.is_dir("win_64/release"):
base.run_as_bat(["call python v8/build/gyp_v8 -Dtarget_arch=x64", "call python v8/build/common_xp.py", "call devenv v8/tools/gyp/v8.sln /Rebuild Release"])

View File

@ -1,37 +0,0 @@
class StrongRootBlockAllocator {
public:
using pointer = Address*;
using const_pointer = const Address*;
using reference = Address&;
using const_reference = const Address&;
using value_type = Address;
using size_type = size_t;
using difference_type = ptrdiff_t;
template <class U>
struct rebind;
explicit StrongRootBlockAllocator(Heap* heap) : heap_(heap) {}
Address* allocate(size_t n);
void deallocate(Address* p, size_t n) noexcept;
private:
Heap* heap_;
};
// Rebinding to Address gives another StrongRootBlockAllocator.
template <>
struct StrongRootBlockAllocator::rebind<Address> {
using other = StrongRootBlockAllocator;
};
// Rebinding to something other than Address gives a std::allocator that
// is copy-constructable from StrongRootBlockAllocator.
template <class U>
struct StrongRootBlockAllocator::rebind {
class other : public std::allocator<U> {
public:
// NOLINTNEXTLINE
other(const StrongRootBlockAllocator&) {}
};
};

View File

@ -7,52 +7,6 @@ import base
import os
import subprocess
def clean():
if base.is_dir("depot_tools"):
base.delete_dir_with_access_error("depot_tools")
base.delete_dir("depot_tools")
if base.is_dir("v8"):
base.delete_dir_with_access_error("v8")
base.delete_dir("v8")
if base.is_exist("./.gclient"):
base.delete_file("./.gclient")
if base.is_exist("./.gclient_entries"):
base.delete_file("./.gclient_entries")
if base.is_exist("./.gclient_previous_sync_commits"):
base.delete_file("./.gclient_previous_sync_commits")
if base.is_exist("./.gcs_entries"):
base.delete_file("./.gcs_entries")
if base.is_exist("./.cipd"):
base.delete_dir("./.cipd")
return
def change_bootstrap():
base.move_file("./depot_tools/bootstrap/manifest.txt", "./depot_tools/bootstrap/manifest.txt.bak")
content = "# changed by build_tools\n\n"
content += "$VerifiedPlatform windows-amd64 windows-arm64 linux-amd64 mac-amd64 mac-arm64\n\n"
content += "@Subdir python\n"
content += "infra/3pp/tools/cpython/${platform} version:2@2.7.18.chromium.39\n\n"
content += "@Subdir python3\n"
if ("windows" == base.host_platform()):
content += "infra/3pp/tools/cpython3/${platform} version:2@3.11.8.chromium.35\n\n"
else:
content += "infra/3pp/tools/cpython3/${platform} version:2@3.8.10.chromium.23\n\n"
content += "@Subdir git\n"
content += "infra/3pp/tools/git/${platform} version:2@2.41.0.chromium.11\n"
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
"raise subprocess.CalledProcessError(proc.returncode, argv, None)", "return")
base.replaceInFile("./depot_tools/bootstrap/bootstrap.py",
" _win_git_bootstrap_config()", " #_win_git_bootstrap_config()")
base.writeFile("./depot_tools/bootstrap/manifest.txt", content)
return
def make_args(args, platform, is_64=True, is_debug=False):
args_copy = args[:]
if is_64:
@ -67,99 +21,35 @@ def make_args(args, platform, is_64=True, is_debug=False):
args_copy.append("target_cpu=\\\"arm64\\\"")
args_copy.append("v8_target_cpu=\\\"arm64\\\"")
args_copy.append("use_sysroot=true")
if (platform == "win_arm64"):
args_copy = args[:]
args_copy.append("target_cpu=\\\"arm64\\\"")
args_copy.append("v8_target_cpu=\\\"arm64\\\"")
args_copy.append("is_clang=false")
if is_debug:
args_copy.append("is_debug=true")
if (platform == "windows"):
args_copy.append("enable_iterator_debugging=true")
else:
args_copy.append("is_debug=false")
linux_clang = False
if platform == "linux":
if "" != config.option("sysroot"):
args_copy.append("use_sysroot=false")
args_copy.append("is_clang=false")
else:
args_copy.append("is_clang=true")
if "1" == config.option("use-clang"):
linux_clang = True
else:
args_copy.append("use_sysroot=false")
if (platform == "linux"):
args_copy.append("is_clang=true")
args_copy.append("use_sysroot=false")
if (platform == "windows"):
args_copy.append("is_clang=false")
if (platform == "mac") and base.is_os_arm():
args_copy.append("host_cpu=\\\"x64\\\"")
if linux_clang != True:
args_copy.append("use_custom_libcxx=false")
args_copy.append("is_clang=false")
return "--args=\"" + " ".join(args_copy) + "\""
def ninja_windows_make(args, is_64=True, is_debug=False, is_arm=False):
def ninja_windows_make(args, is_64=True, is_debug=False):
directory_out = "out.gn/"
if is_arm:
directory_out += "win_arm64/"
else:
directory_out += ("win_64/" if is_64 else "win_32/")
directory_out += ("win_64/" if is_64 else "win_32/")
directory_out += ("debug" if is_debug else "release")
if is_arm:
base.cmd2("gn", ["gen", directory_out, make_args(args, "win_arm64", is_64, is_debug)])
else:
base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)])
base.cmd2("gn", ["gen", directory_out, make_args(args, "windows", is_64, is_debug)])
base.copy_file("./" + directory_out + "/obj/v8_wrappers.ninja", "./" + directory_out + "/obj/v8_wrappers.ninja.bak")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "target_output_name = v8_wrappers", "target_output_name = v8_wrappers\nbuild obj/v8_wrappers.obj: cxx ../../../src/base/platform/wrappers.cc")
base.replaceInFile("./" + directory_out + "/obj/v8_wrappers.ninja", "build obj/v8_wrappers.lib: alink", "build obj/v8_wrappers.lib: alink obj/v8_wrappers.obj")
win_toolset_wrapper_file = "build/toolchain/win/tool_wrapper.py"
win_toolset_wrapper_file_content = base.readFile("build/toolchain/win/tool_wrapper.py")
if (-1 == win_toolset_wrapper_file_content.find("line = line.decode('utf8')")):
base.replaceInFile(win_toolset_wrapper_file, "for line in link.stdout:\n", "for line in link.stdout:\n line = line.decode('utf8')\n")
base.cmd("ninja", ["-C", directory_out, "v8_wrappers"])
if is_arm:
base.copy_file('./' + directory_out + '/obj/v8_wrappers.lib', './' + directory_out + '/x64/obj/v8_wrappers.lib')
base.cmd("ninja", ["-C", directory_out])
base.delete_file("./" + directory_out + "/obj/v8_wrappers.ninja")
base.move_file("./" + directory_out + "/obj/v8_wrappers.ninja.bak", "./" + directory_out + "/obj/v8_wrappers.ninja")
return
# patch v8 for build ---------------------------------------------------
def patch_windows_debug():
# v8 8.9 version does not built with enable_iterator_debugging flag
# patch heap.h file:
file_patch = "./src/heap/heap.h"
base.copy_file(file_patch, file_patch + ".bak")
content_old = base.readFile(file_patch)
posStart = content_old.find("class StrongRootBlockAllocator {")
posEnd = content_old.find("};", posStart + 1)
posEnd = content_old.find("};", posEnd + 1)
content = content_old[0:posStart]
content += base.readFile("./../../../../../build_tools/scripts/core_common/modules/v8_89.patch")
content += content_old[posEnd + 2:]
base.writeFile(file_patch, content)
return
def unpatch_windows_debug():
file_patch = "./src/heap/heap.h"
base.move_file(file_patch + ".bak", file_patch)
return
# ----------------------------------------------------------------------
def make():
old_env = dict(os.environ)
old_cur = os.getcwd()
@ -168,15 +58,9 @@ def make():
if not base.is_dir(base_dir):
base.create_dir(base_dir)
if ("mac" == base.host_platform()):
base.cmd("git", ["config", "--global", "http.postBuffer", "157286400"], True)
os.chdir(base_dir)
base.common_check_version("v8", "1", clean)
if not base.is_dir("depot_tools"):
base.cmd("git", ["clone", "https://chromium.googlesource.com/chromium/tools/depot_tools.git"])
change_bootstrap()
os.environ["PATH"] = base_dir + "/depot_tools" + os.pathsep + os.environ["PATH"]
@ -186,44 +70,18 @@ def make():
if not base.is_dir("v8"):
base.cmd("./depot_tools/fetch", ["v8"], True)
base.copy_dir("./v8/third_party", "./v8/third_party_new")
if ("windows" == base.host_platform()):
os.chdir("v8")
base.cmd("git", ["config", "--system", "core.longpaths", "true"], True)
base.cmd("git", ["config", "--system", "core.longpaths", "true"])
os.chdir("../")
v8_branch_version = "remotes/branch-heads/8.9"
if ("mac" == base.host_platform()):
v8_branch_version = "remotes/branch-heads/9.9"
base.cmd("./depot_tools/gclient", ["sync", "-r", v8_branch_version], True)
base.cmd("./depot_tools/gclient", ["sync", "-r", "remotes/branch-heads/8.9"], True)
base.cmd("gclient", ["sync", "--force"], True)
base.copy_dir("./v8/third_party_new/ninja", "./v8/third_party/ninja")
if ("windows" == base.host_platform()):
base.replaceInFile("v8/build/config/win/BUILD.gn", ":static_crt", ":dynamic_crt")
# fix for new depot_tools and vs2019, as VC folder contains a folder with a symbol in the name
# sorting is done by increasing version, so 0 is a dummy value
replace_src = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return x"
replace_dst = " def to_int_if_int(x):\n try:\n return int(x)\n except ValueError:\n return 0"
base.replaceInFile("v8/build/vs_toolchain.py", replace_src, replace_dst)
if not base.is_file("v8/src/base/platform/wrappers.cc"):
base.writeFile("v8/src/base/platform/wrappers.cc", "#include \"src/base/platform/wrappers.h\"\n")
if config.check_option("platform", "win_arm64"):
base.replaceInFile("v8/build/toolchain/win/setup_toolchain.py", "SDK_VERSION = \'10.0.26100.0\'", "SDK_VERSION = \'10.0.22621.0\'")
else:
base.replaceInFile("depot_tools/gclient_paths.py", "@functools.lru_cache", "")
if ("mac" == base.host_platform()):
if not base.is_file("v8/build/config/compiler/BUILD.gn.bak"):
base.copy_file("v8/build/config/compiler/BUILD.gn", "v8/build/config/compiler/BUILD.gn.bak")
base.replaceInFile("v8/build/config/compiler/BUILD.gn", "\"-Wloop-analysis\",", "\"-Wloop-analysis\", \"-D_Float16=short\",")
if not base.is_file("v8/third_party/jinja2/tests.py.bak"):
base.copy_file("v8/third_party/jinja2/tests.py", "v8/third_party/jinja2/tests.py.bak")
base.replaceInFile("v8/third_party/jinja2/tests.py", "from collections import Mapping", "try:\n from collections.abc import Mapping\nexcept ImportError:\n from collections import Mapping")
os.chdir("v8")
@ -231,29 +89,12 @@ def make():
"is_component_build=false",
"v8_monolithic=true",
"v8_use_external_startup_data=false",
"use_custom_libcxx=false",
"treat_warnings_as_errors=false"]
if config.check_option("platform", "linux_64"):
if config.option("sysroot") != "":
sysroot_path = config.option("sysroot_linux_64")
sysroot_path_bin = config.get_custom_sysroot_bin("linux_64")
src_replace = "config(\"compiler\") {\n asmflags = []\n cflags = []\n cflags_c = []\n cflags_cc = []\n cflags_objc = []\n cflags_objcc = []\n ldflags = []"
dst_replace = "config(\"compiler\") {\n asmflags = []\n cflags = [\"--sysroot=" + sysroot_path + "\"]" + "\n cflags_c = []\n cflags_cc = [\"--sysroot=" + sysroot_path + "\"]" + "\n cflags_objc = []\n cflags_objcc = []\n ldflags = [\"--sysroot=" + sysroot_path + "\"]"
base.replaceInFile("build/config/compiler/BUILD.gn", src_replace, dst_replace)
src_replace = "gcc_toolchain(\"x64\") {\n cc = \"gcc\"\n cxx = \"g++\""
dst_replace = "gcc_toolchain(\"x64\") {\n cc = \""+ sysroot_path_bin + "/gcc\"\n cxx = \"" + sysroot_path_bin + "/g++\""
base.replaceInFile("build/toolchain/linux/BUILD.gn", src_replace, dst_replace)
old_env = dict(os.environ)
base.set_sysroot_env("linux_64")
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")], False)
base.cmd2("ninja", ["-C", "out.gn/linux_64"], False)
base.restore_sysroot_env()
else:
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")], False)
base.cmd2("ninja", ["-C", "out.gn/linux_64"], False)
base.cmd2("gn", ["gen", "out.gn/linux_64", make_args(gn_args, "linux")])
base.cmd("ninja", ["-C", "out.gn/linux_64"])
if config.check_option("platform", "linux_32"):
base.cmd2("gn", ["gen", "out.gn/linux_32", make_args(gn_args, "linux", False)])
@ -267,16 +108,11 @@ def make():
if config.check_option("platform", "mac_64"):
base.cmd2("gn", ["gen", "out.gn/mac_64", make_args(gn_args, "mac")])
base.cmd("ninja", ["-C", "out.gn/mac_64"])
if config.check_option("platform", "win_arm64") and not base.is_file("out.gn/win_arm64/release/obj/v8_monolith.lib"):
ninja_windows_make(gn_args, True, False, True)
if config.check_option("platform", "win_64"):
if (-1 != config.option("config").lower().find("debug")):
if not base.is_file("out.gn/win_64/debug/obj/v8_monolith.lib"):
patch_windows_debug()
ninja_windows_make(gn_args, True, True)
unpatch_windows_debug()
if not base.is_file("out.gn/win_64/release/obj/v8_monolith.lib"):
ninja_windows_make(gn_args)
@ -284,9 +120,7 @@ def make():
if config.check_option("platform", "win_32"):
if (-1 != config.option("config").lower().find("debug")):
if not base.is_file("out.gn/win_32/debug/obj/v8_monolith.lib"):
patch_windows_debug()
ninja_windows_make(gn_args, False, True)
unpatch_windows_debug()
if not base.is_file("out.gn/win_32/release/obj/v8_monolith.lib"):
ninja_windows_make(gn_args, False)

View File

@ -0,0 +1,16 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
import ixwebsocket
import socketrocket
config_file = base.get_script_dir() + "/../../core/Common/WebSocket/websocket.pri"
def make():
ixwebsocket.make()
socketrocket.make()
return

View File

@ -1,16 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../..')
import config
import base
#import ixwebsocket
#import socketrocket
import socket_io
def make():
#ixwebsocket.make()
#socketrocket.make()
socket_io.make()
return

View File

@ -7,7 +7,6 @@ import deploy_builder
import deploy_server
import deploy_core
import deploy_mobile
import deploy_osign
def make():
if config.check_option("module", "desktop"):
@ -20,8 +19,4 @@ def make():
deploy_core.make()
if config.check_option("module", "mobile"):
deploy_mobile.make()
if config.check_option("module", "osign"):
deploy_osign.make()
if base.is_use_create_artifacts_qemu_any_platform():
base.create_artifacts_qemu_any_platform()
return

View File

@ -15,7 +15,6 @@ def make():
continue
root_dir = base_dir + ("/" + native_platform + "/" + branding + ("/DocumentBuilder" if base.is_windows() else "/documentbuilder"))
root_dir_win64 = base_dir + "/win_64/" + branding + "/DocumentBuilder"
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
@ -37,53 +36,52 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "OFDFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "StarMathConverter")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
if ("ios" == platform):
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir)
# icu
base.deploy_icu(core_dir, root_dir, native_platform)
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll")
if (0 == platform.find("linux")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58")
if (0 == platform.find("mac")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib")
# doctrenderer
if isWindowsXP:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir, "doctrenderer")
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/xp/doctrenderer.lib", root_dir + "/doctrenderer.lib")
base.copy_files(core_dir + "/Common/3dParty/v8/v8_xp/" + platform + "/release/icudt*.dll", root_dir + "/")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
if (0 == platform.find("win")):
base.copy_file(core_build_dir + "/lib/" + platform_postfix + "/doctrenderer.lib", root_dir + "/doctrenderer.lib")
base.copy_v8_files(core_dir, root_dir, platform, isWindowsXP)
# python wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.c")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.python/src/docbuilder.py", root_dir + "/docbuilder.py")
# java wrapper
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "docbuilder.jni")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.java/build/libs/docbuilder.jar", root_dir + "/docbuilder.jar")
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", root_dir + "/")
elif (-1 == config.option("config").find("use_javascript_core")):
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", root_dir + "/icudtl.dat")
# app
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "docbuilder")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.copy_dir(git_dir + "/document-templates/new/en-US", root_dir + "/empty")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
base.copy_dir(git_dir + "/DocumentBuilder/empty", root_dir + "/empty")
base.copy_dir(git_dir + "/DocumentBuilder/samples", root_dir + "/samples")
# js
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", root_dir + "/sdkjs")
@ -95,54 +93,19 @@ def make():
base.create_dir(root_dir + "/include")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/common_deploy.h", root_dir + "/include/common.h")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.h", root_dir + "/include/docbuilder.h")
if (0 == platform.find("win")):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/src/docbuilder_midl.h", root_dir + "/include/docbuilder_midl.h")
base.replaceInFile(root_dir + "/include/docbuilder.h", "Q_DECL_EXPORT", "BUILDING_DOCBUILDER")
if ("win_64" == platform):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_64/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_64/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/x64/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
elif ("win_32" == platform):
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/deploy/win_32/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.net/deploy/win_32/docbuilder.net.dll", root_dir + "/docbuilder.net.dll")
base.copy_file(core_dir + "/DesktopEditor/doctrenderer/docbuilder.com/Win32/Release/docbuilder.com.dll", root_dir + "/docbuilder.com.dll")
# correct ios frameworks
if ("ios" == platform):
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
if (0 == platform.find("linux")):
base.linux_correct_rpath_docbuilder(root_dir)
base.generate_plist(root_dir)
if (0 == platform.find("mac")):
base.for_each_framework(root_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
base.mac_correct_rpath_x2t(root_dir)
base.mac_correct_rpath_docbuilder(root_dir)
base.create_x2t_js_cache(root_dir, "builder", platform)
base.create_dir(root_dir + "/fonts")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
# delete unnecessary builder files
def delete_files(files):
for file in files:
base.delete_file(file)
delete_files(base.find_files(root_dir, "*.wasm"))
delete_files(base.find_files(root_dir, "*_ie.js"))
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/cmap.bin")
if 0 != platform.find("mac"):
delete_files(base.find_files(root_dir, "sdk-all.js"))
delete_files(base.find_files(root_dir, "sdk-all-min.js"))
base.delete_dir(root_dir + "/sdkjs/slide/themes")
base.delete_dir(root_dir + "/sdkjs/cell/css")
base.delete_file(root_dir + "/sdkjs/pdf/src/engine/viewer.js")
base.delete_file(root_dir + "/sdkjs/common/spell/spell/spell.js.mem")
base.delete_dir(root_dir + "/sdkjs/common/Images")
return

View File

@ -30,20 +30,16 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "OFDFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "StarMathConverter")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", archive_dir + "/sdkjs")
@ -53,31 +49,19 @@ def make():
if ("windows" == base.host_platform()):
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*.dll", archive_dir + "/")
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", archive_dir + "/")
else:
if not (0 == platform.find("mac") and config.check_option("config", "bundle_dylibs")):
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*", archive_dir + "/")
base.copy_v8_files(core_dir, archive_dir, platform)
base.copy_files(core_dir + "/Common/3dParty/icu/" + platform + "/build/*", archive_dir + "/")
if (-1 == config.option("config").find("use_javascript_core")):
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", archive_dir + "/")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allfontsgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "allthemesgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "pluginsmanager")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "standardtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2ttester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester")
# correct mac frameworks
if (0 == platform.find("mac")):
base.for_each_framework(archive_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
base.mac_correct_rpath_x2t(archive_dir)
if base.is_file(archive_dir + "/core.7z"):
base.delete_file(archive_dir + "/core.7z")
base.archive_folder(archive_dir, archive_dir + "/core.7z")
# js cache
base.generate_doctrenderer_config(archive_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.create_x2t_js_cache(archive_dir, "core", platform)
base.delete_file(archive_dir + "/DoctRenderer.config")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False)
return

View File

@ -4,19 +4,6 @@ import config
import base
import os
import platform
import glob
def copy_lib_with_links(src_dir, dst_dir, lib, version):
lib_full_name = lib + "." + version
major_version = version[:version.find(".")]
lib_major_name = lib + "." + major_version
base.copy_file(src_dir + "/" + lib_full_name, dst_dir + "/" + lib_full_name)
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_full_name, "./" + lib_major_name])
base.cmd_in_dir(dst_dir, "ln", ["-s", "./" + lib_major_name, "./" + lib])
return
def make():
base_dir = base.get_script_dir() + "/../out"
@ -40,7 +27,7 @@ def make():
isWindowsXP = False if (-1 == native_platform.find("_xp")) else True
platform = native_platform[0:-3] if isWindowsXP else native_platform
apps_postfix = "build" + base.qt_dst_postfix()
apps_postfix = "build" + base.qt_dst_postfix();
if ("" != config.option("branding")):
apps_postfix += ("/" + config.option("branding"))
apps_postfix += "/"
@ -54,106 +41,91 @@ def make():
platform_postfix = platform + base.qt_dst_postfix()
build_libraries_path = core_build_dir + "/lib/" + platform_postfix
# x2t
base.create_dir(root_dir + "/converter")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel")
base.copy_lib(build_libraries_path, root_dir + "/converter", "kernel_network")
base.copy_lib(build_libraries_path, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(build_libraries_path, root_dir + "/converter", "graphics")
base.copy_lib(build_libraries_path, root_dir + "/converter", "PdfFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DjVuFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "XpsFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "OFDFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HtmlFile2")
base.copy_lib(build_libraries_path, root_dir + "/converter", "Fb2File")
base.copy_lib(build_libraries_path, root_dir + "/converter", "EpubFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "IWorkFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "HWPFile")
base.copy_lib(build_libraries_path, root_dir + "/converter", "DocxRenderer")
base.copy_lib(build_libraries_path, root_dir + "/converter", "StarMathConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "DocxRenderer")
if ("ios" == platform):
base.copy_lib(build_libraries_path, root_dir + "/converter", "x2t")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "x2t")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "x2t")
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", root_dir + "/converter")
# icu
base.deploy_icu(core_dir, root_dir + "/converter", native_platform)
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/converter/icudt58.dll")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/converter/icuuc58.dll")
if (0 == platform.find("linux")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/converter/libicudata.so.58")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/converter/libicuuc.so.58")
if (0 == platform.find("mac")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/converter/libicudata.58.dylib")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/converter/libicuuc.58.dylib")
# doctrenderer
if isWindowsXP:
base.copy_lib(build_libraries_path + "/xp", root_dir + "/converter", "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + "/xp", root_dir + "/converter", "doctrenderer")
base.copy_files(core_dir + "/Common/3dParty/v8/v8_xp/" + platform + "/release/icudt*.dll", root_dir + "/converter/")
else:
base.copy_lib(build_libraries_path, root_dir + "/converter", "doctrenderer")
base.copy_v8_files(core_dir, root_dir + "/converter", platform, isWindowsXP)
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir + "/converter", "doctrenderer")
if (0 == platform.find("win")):
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", root_dir + "/converter/")
elif (-1 == config.option("config").find("use_javascript_core")):
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", root_dir + "/converter/icudtl.dat")
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop", "", "../dictionaries")
base.generate_doctrenderer_config(root_dir + "/converter/DoctRenderer.config", "../editors/", "desktop")
base.copy_dir(git_dir + "/document-templates/new", root_dir + "/converter/empty")
base.copy_dir(git_dir + "/desktop-apps/common/templates", root_dir + "/converter/templates")
# dictionaries
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries")
base.copy_dir(git_dir + "/core-fonts/opensans", root_dir + "/fonts")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
base.create_dir(root_dir + "/dictionaries")
base.copy_dir_content(git_dir + "/dictionaries", root_dir + "/dictionaries", "", ".git")
base.copy_dir(git_dir + "/desktop-apps/common/package/fonts", root_dir + "/fonts")
base.copy_file(git_dir + "/desktop-apps/common/package/license/3dparty/3DPARTYLICENSE", root_dir + "/3DPARTYLICENSE")
# cef
build_dir_name = "build"
if (0 == platform.find("linux")) and (config.check_option("config", "cef_version_107")):
build_dir_name = "build_107"
elif (0 == platform.find("mac")) and (config.check_option("config", "use_v8")):
build_dir_name = "build_103"
if not isWindowsXP:
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/" + build_dir_name + "/*", root_dir)
base.copy_files(core_dir + "/Common/3dParty/cef/" + platform + "/build/*", root_dir)
else:
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/" + build_dir_name + "/*", root_dir)
if (0 == platform.find("mac")):
dir_base_old = os.getcwd()
os.chdir(root_dir + "/Chromium Embedded Framework.framework")
base.create_dir("Versions")
base.create_dir("Versions/A")
base.move_file("Chromium Embedded Framework", "Versions/A/Chromium Embedded Framework")
base.move_dir("Resources", "Versions/A/Resources")
base.move_dir("Libraries", "Versions/A/Libraries")
base.cmd("ln", ["-s", "Versions/A/Chromium Embedded Framework", "Chromium Embedded Framework"])
base.cmd("ln", ["-s", "Versions/A/Resources", "Resources"])
base.cmd("ln", ["-s", "Versions/A/Libraries", "Libraries"])
base.cmd("ln", ["-s", "A", "Versions/Current"])
os.chdir(dir_base_old);
base.copy_files(core_dir + "/Common/3dParty/cef/" + native_platform + "/build/*", root_dir)
isUseQt = True
if (0 == platform.find("mac")) or (0 == platform.find("ios")):
isUseQt = False
# libraries
base.copy_lib(build_libraries_path, root_dir, "hunspell")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "hunspell")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ooxmlsignature")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "ascdocumentscore")
if (0 != platform.find("mac")):
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "qtascdocumentscore")
if (0 == platform.find("mac")):
base.copy_dir(core_build_dir + "/bin/" + platform_postfix + "/editors_helper.app", root_dir + "/editors_helper.app")
else:
base.copy_exe(core_build_dir + "/bin/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "editors_helper")
if isUseQt:
base.qt_copy_lib("Qt5Core", root_dir)
base.qt_copy_lib("Qt5Gui", root_dir)
base.qt_copy_lib("Qt5PrintSupport", root_dir)
base.qt_copy_lib("Qt5Svg", root_dir)
base.qt_copy_lib("Qt5Widgets", root_dir)
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_lib("Qt5Network", root_dir)
base.qt_copy_lib("Qt5OpenGL", root_dir)
@ -163,70 +135,37 @@ def make():
base.qt_copy_plugin("platforms", root_dir)
base.qt_copy_plugin("platforminputcontexts", root_dir)
base.qt_copy_plugin("printsupport", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("platformthemes", root_dir)
base.qt_copy_plugin("xcbglintegrations", root_dir)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.qt_copy_lib("Qt5Multimedia", root_dir)
base.qt_copy_lib("Qt5MultimediaWidgets", root_dir)
base.qt_copy_plugin("mediaservice", root_dir)
base.qt_copy_plugin("playlistformats", root_dir)
base.qt_copy_plugin("styles", root_dir)
if (0 == platform.find("linux")):
base.qt_copy_lib("Qt5DBus", root_dir)
base.qt_copy_lib("Qt5X11Extras", root_dir)
base.qt_copy_lib("Qt5XcbQpa", root_dir)
base.qt_copy_icu(root_dir, platform)
if not base.check_congig_option_with_platfom(platform, "libvlc"):
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
base.qt_copy_icu(root_dir)
base.copy_files(base.get_env("QT_DEPLOY") + "/../lib/libqgsttools_p.so*", root_dir)
if (0 == platform.find("win")):
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/projicons/" + apps_postfix + "/projicons.exe", root_dir + "/DesktopEditors.exe")
if not isWindowsXP:
base.copy_file(git_dir + "/desktop-apps/win-linux/extras/update-daemon/" + apps_postfix + "/updatesvc.exe", root_dir + "/updatesvc.exe")
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors.exe", root_dir + "/editors.exe")
base.copy_file(git_dir + "/desktop-apps/win-linux/res/icons/desktopeditors.ico", root_dir + "/app.ico")
elif (0 == platform.find("linux")):
base.copy_file(git_dir + "/desktop-apps/win-linux/" + apps_postfix + "/DesktopEditors", root_dir + "/DesktopEditors")
if base.check_congig_option_with_platfom(platform, "libvlc"):
vlc_dir = git_dir + "/core/Common/3dParty/libvlc/build/" + platform + "/lib"
if (0 == platform.find("win")):
base.copy_dir(vlc_dir + "/plugins", root_dir + "/plugins")
base.copy_files(vlc_dir + "/*.dll", root_dir)
base.copy_file(vlc_dir + "/vlc-cache-gen.exe", root_dir + "/vlc-cache-gen.exe")
elif (0 == platform.find("linux")):
base.copy_dir(vlc_dir + "/vlc/plugins", root_dir + "/plugins")
base.copy_file(vlc_dir + "/vlc/libcompat.a", root_dir + "/libcompat.a")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_pulse.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_vdpau.so", "0.0.0")
copy_lib_with_links(vlc_dir + "/vlc", root_dir, "libvlc_xcb_events.so", "0.0.0")
copy_lib_with_links(vlc_dir, root_dir, "libvlc.so", "5.6.1")
copy_lib_with_links(vlc_dir, root_dir, "libvlccore.so", "9.0.1")
base.copy_file(vlc_dir + "/vlc/vlc-cache-gen", root_dir + "/vlc-cache-gen")
if isWindowsXP:
base.copy_lib(build_libraries_path + "/mediaplayer/xp", root_dir, "videoplayer")
else:
base.copy_lib(build_libraries_path + "/mediaplayer", root_dir, "videoplayer")
else:
base.copy_lib(build_libraries_path + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix + ("/xp" if isWindowsXP else ""), root_dir, "videoplayer")
base.create_dir(root_dir + "/editors")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/sdkjs", root_dir + "/editors/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/desktop/web-apps", root_dir + "/editors/web-apps")
for file in glob.glob(root_dir + "/editors/web-apps/apps/*/*/*.js.map"):
base.delete_file(file)
base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/resources/local", root_dir + "/editors/sdkjs/common/Images/local")
base.create_dir(root_dir + "/editors/sdkjs-plugins")
if not isWindowsXP:
base.copy_marketplace_plugin(root_dir + "/editors/sdkjs-plugins", True, True, True)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True, isWindowsXP)
base.copy_sdkjs_plugins(root_dir + "/editors/sdkjs-plugins", True, True)
# remove some default plugins
if base.is_dir(root_dir + "/editors/sdkjs-plugins/speech"):
base.delete_dir(root_dir + "/editors/sdkjs-plugins/speech")
@ -238,42 +177,18 @@ def make():
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", root_dir + "/editors/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(root_dir + "/editors/sdkjs-plugins")
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "manager", True)
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt", root_dir + "/editors/sdkjs-plugins", "advanced2", True)
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/common/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}", root_dir + "/editors/sdkjs-plugins/{14A8FC87-8E26-4216-B34E-F27F053B2EC4}")
#base.copy_dir(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/encrypt/ui/engine/database/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}", root_dir + "/editors/sdkjs-plugins/{9AB4BBA8-A7E5-48D5-B683-ECE76A020BB1}")
base.copy_sdkjs_plugin(git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins", root_dir + "/editors/sdkjs-plugins", "sendto", True)
isUseAgent = True
if isWindowsXP:
isUseAgent = False
if (0 == platform.find("mac")) and (config.check_option("config", "use_v8")):
isUseAgent = False
if (isUseAgent):
agent_plugin_dir = git_dir + "/desktop-sdk/ChromiumBasedEditors/plugins/ai-agent"
if (False):
base.cmd_in_dir(agent_plugin_dir, "npm", ["install"], True)
base.cmd_in_dir(agent_plugin_dir, "npm", ["run", "build"], True)
base.copy_dir(agent_plugin_dir + "/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}", root_dir + "/editors/sdkjs-plugins/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}")
else:
base.copy_dir(agent_plugin_dir + "/deploy/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}", root_dir + "/editors/sdkjs-plugins/{9DC93CDB-B576-4F0C-B55E-FCC9C48DD777}")
base.copy_file(base_dir + "/js/" + branding + "/desktop/index.html", root_dir + "/index.html")
base.create_dir(root_dir + "/editors/webext")
base.copy_file(base_dir + "/js/" + branding + "/desktop/noconnect.html", root_dir + "/editors/webext/noconnect.html")
if isWindowsXP:
base.create_dir(root_dir + "/providers")
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers/onlyoffice", root_dir + "/providers/onlyoffice")
else:
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
base.copy_dir(git_dir + "/desktop-apps/common/loginpage/providers", root_dir + "/providers")
isUseJSC = False
if (0 == platform.find("mac")):
doctrenderer_lib = "libdoctrenderer.dylib"
if config.check_option("config", "bundle_dylibs"):
doctrenderer_lib = "doctrenderer.framework/doctrenderer"
file_size_doctrenderer = os.path.getsize(root_dir + "/converter/" + doctrenderer_lib)
file_size_doctrenderer = os.path.getsize(root_dir + "/converter/libdoctrenderer.dylib")
print("file_size_doctrenderer: " + str(file_size_doctrenderer))
if (file_size_doctrenderer < 5*1024*1024):
isUseJSC = True
@ -281,68 +196,42 @@ def make():
if isUseJSC:
base.delete_file(root_dir + "/converter/icudtl.dat")
base.create_x2t_js_cache(root_dir + "/converter", "desktop", platform)
if (0 == platform.find("win")):
base.copy_lib(git_dir + "/desktop-apps/win-linux/3dparty/WinSparkle/" + platform, root_dir, "WinSparkle")
base.delete_file(root_dir + "/cef_sandbox.lib")
base.delete_file(root_dir + "/libcef.lib")
is_host_not_arm = False
host_platform = ""
# TODO: fix this on mac_arm64 (qemu)
# on windows we are using qemu
isMacArmPlaformOnIntel = False
if (platform == "mac_arm64") and not base.is_os_arm():
is_host_not_arm = True
host_platform = "mac_64"
isMacArmPlaformOnIntel = True
# all themes generate ----
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allfontsgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir + "/converter", "allthemesgen")
if (0 == platform.find("mac")):
# gen plists with max_depth 2 because frameworks are only located in root_dir and converter subdirectory
base.for_each_framework(root_dir, "mac", callbacks=[base.generate_plist], max_depth=2)
base.mac_correct_rpath_desktop(root_dir)
if is_host_not_arm:
if isMacArmPlaformOnIntel:
sdkjs_dir = root_dir + "/editors/sdkjs"
str1 = "/" + platform + "/"
str2 = "/" + host_platform + "/"
sdkjs_dir_host = sdkjs_dir.replace(str1, str2)
end_find_platform = sdkjs_dir.rfind("/mac_arm64/")
sdkjs_dir_mac64 = sdkjs_dir[0:end_find_platform] + "/mac_64/" + sdkjs_dir[end_find_platform+11:]
base.delete_dir(sdkjs_dir)
base.copy_dir(sdkjs_dir_host, sdkjs_dir)
base.copy_dir(sdkjs_dir_mac64, sdkjs_dir)
else:
themes_params = []
if ("" != config.option("themesparams")):
themes_params = ["--params=\"" + config.option("themesparams") + "\""]
params_allfontsgen = ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""]
params_allthemesgen = ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params
if (0 == platform.find("linux_arm") and not base.is_os_arm()):
x2t_origin = ""
if (config.option("sysroot") != ""):
x2t_origin = base.create_qemu_wrapper(root_dir + "/converter/x2t", platform)
base.cmd_in_dir_qemu(platform, root_dir + "/converter", "./allfontsgen", params_allfontsgen, True)
base.cmd_in_dir_qemu(platform, root_dir + "/converter", "./allthemesgen", params_allthemesgen, True)
if "" != x2t_origin:
base.delete_file(root_dir + "/converter/x2t")
base.move_file(x2t_origin, root_dir + "/converter/x2t")
else:
base.cmd_exe(root_dir + "/converter/allfontsgen", params_allfontsgen, True)
base.cmd_exe(root_dir + "/converter/allthemesgen", params_allthemesgen, True)
base.cmd_exe(root_dir + "/converter/allfontsgen", ["--use-system=\"1\"", "--input=\"" + root_dir + "/fonts\"", "--input=\"" + git_dir + "/core-fonts\"", "--allfonts=\"" + root_dir + "/converter/AllFonts.js\"", "--selection=\"" + root_dir + "/converter/font_selection.bin\""])
base.cmd_exe(root_dir + "/converter/allthemesgen", ["--converter-dir=\"" + root_dir + "/converter\"", "--src=\"" + root_dir + "/editors/sdkjs/slide/themes\"", "--allfonts=\"AllFonts.js\"", "--output=\"" + root_dir + "/editors/sdkjs/common/Images\""] + themes_params)
base.delete_file(root_dir + "/converter/AllFonts.js")
base.delete_file(root_dir + "/converter/font_selection.bin")
base.delete_file(root_dir + "/converter/fonts.log")
if not base.is_use_create_artifacts_qemu(platform):
base.delete_exe(root_dir + "/converter/allfontsgen")
base.delete_exe(root_dir + "/converter/allthemesgen")
base.delete_exe(root_dir + "/converter/allfontsgen")
base.delete_exe(root_dir + "/converter/allthemesgen")
if not isUseJSC:
base.delete_file(root_dir + "/editors/sdkjs/slide/sdk-all.cache")
return

View File

@ -6,19 +6,7 @@ import base
def exclude_arch(directory, frameworks):
for lib in frameworks:
base.cmd("lipo", ["-remove", "arm64", directory + "/" + lib + ".framework/" + lib, "-o", directory + "/" + lib + ".framework/" + lib])
return
def deploy_fonts(git_dir, root_dir, platform=""):
base.create_dir(root_dir + "/fonts")
base.copy_file(git_dir + "/core-fonts/ASC.ttf", root_dir + "/fonts/ASC.ttf")
base.copy_dir(git_dir + "/core-fonts/asana", root_dir + "/fonts/asana")
base.copy_dir(git_dir + "/core-fonts/caladea", root_dir + "/fonts/caladea")
base.copy_dir(git_dir + "/core-fonts/crosextra", root_dir + "/fonts/crosextra")
base.copy_dir(git_dir + "/core-fonts/openoffice", root_dir + "/fonts/openoffice")
if (platform == "android"):
base.copy_dir(git_dir + "/core-fonts/dejavu", root_dir + "/fonts/dejavu")
base.copy_dir(git_dir + "/core-fonts/liberation", root_dir + "/fonts/liberation")
return
return
def make():
base_dir = base.get_script_dir() + "/../out"
@ -35,7 +23,7 @@ def make():
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
return
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
@ -54,36 +42,52 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "OFDFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "StarMathConverter")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", root_dir + "/cmap.bin")
if (0 == platform.find("win") or 0 == platform.find("linux") or 0 == platform.find("mac")):
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, root_dir, "x2t")
else:
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "x2t")
if ("ios" == platform) and config.check_option("config", "bundle_dylibs") and config.check_option("config", "simulator"):
exclude_arch(root_dir, ["kernel", "kernel_network", "UnicodeConverter", "graphics", "PdfWriter",
"PdfReader", "DjVuFile", "XpsFile", "HtmlFile2", "HtmlRenderer", "doctrenderer",
"Fb2File", "EpubFile", "x2t"])
# icu
base.deploy_icu(core_dir, root_dir, platform)
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", root_dir + "/icudt58.dll")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", root_dir + "/icuuc58.dll")
if (0 == platform.find("linux")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", root_dir + "/libicudata.so.58")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", root_dir + "/libicuuc.so.58")
if (0 == platform.find("mac")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", root_dir + "/libicudata.58.dylib")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", root_dir + "/libicuuc.58.dylib")
if (0 == platform.find("android")):
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicudata.so", root_dir + "/libicudata.so")
#base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/libicuuc.so", root_dir + "/libicuuc.so")
base.copy_file(core_dir + "/Common/3dParty/icu/android/build/" + platform[8:] + "/icudt58l.dat", root_dir + "/icudt58l.dat")
# js
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
# correct ios frameworks
if ("ios" == platform):
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
deploy_fonts(git_dir, root_dir)
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
base.generate_plist(root_dir)
if (0 == platform.find("mac")):
base.mac_correct_rpath_x2t(root_dir)
@ -97,11 +101,8 @@ def make():
base.create_dir(root_dir)
# js
base.copy_dir(base_dir + "/js/" + branding + "/mobile/sdkjs", root_dir + "/sdkjs")
# fonts
deploy_fonts(git_dir, root_dir, "android")
base.copy_dictionaries(git_dir + "/dictionaries", root_dir + "/dictionaries", True, False)
# app
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder", "", "./dictionaries")
base.generate_doctrenderer_config(root_dir + "/DoctRenderer.config", "./", "builder")
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/mobile/*.so", libs_dir + "/arm64-v8a")

View File

@ -1,60 +0,0 @@
#!/usr/bin/env python
import config
import base
def make():
base_dir = base.get_script_dir() + "/../out"
git_dir = base.get_script_dir() + "/../.."
core_dir = git_dir + "/core"
branding = config.branding()
platforms = config.option("platform").split()
for native_platform in platforms:
if not native_platform in config.platforms:
continue
root_dir = base_dir + "/" + native_platform + "/" + branding + "/osign"
if base.get_env("DESTDIR_BUILD_OVERRIDE") != "":
return
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
qt_dir = base.qt_setup(native_platform)
platform = native_platform
core_build_dir = core_dir + "/build"
if ("" != config.option("branding")):
core_build_dir += ("/" + config.option("branding"))
platform_postfix = platform + base.qt_dst_postfix()
# x2t
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, root_dir, "osign")
# correct ios frameworks
if ("ios" == platform):
base.for_each_framework(root_dir, "ios", callbacks=[base.generate_plist, base.generate_xcprivacy])
for native_platform in platforms:
if native_platform == "android":
# make full version
root_dir = base_dir + "/android/" + branding + "/osign"
if (base.is_dir(root_dir)):
base.delete_dir(root_dir)
base.create_dir(root_dir)
libs_dir = root_dir + "/lib"
base.create_dir(libs_dir + "/arm64-v8a")
base.copy_files(base_dir + "/android_arm64_v8a/" + branding + "/osign/*.so", libs_dir + "/arm64-v8a")
base.create_dir(libs_dir + "/armeabi-v7a")
base.copy_files(base_dir + "/android_armv7/" + branding + "/osign/*.so", libs_dir + "/armeabi-v7a")
base.create_dir(libs_dir + "/x86")
base.copy_files(base_dir + "/android_x86/" + branding + "/osign/*.so", libs_dir + "/x86")
base.create_dir(libs_dir + "/x86_64")
base.copy_files(base_dir + "/android_x86_64/" + branding + "/osign/*.so", libs_dir + "/x86_64")
break
return

View File

@ -5,7 +5,6 @@ import base
import re
import shutil
import glob
from tempfile import mkstemp
def make():
@ -41,30 +40,24 @@ def make():
build_server_dir = root_dir + '/server'
server_dir = base.get_script_dir() + "/../../server"
bin_server_dir = server_dir + "/build/server"
base.create_dir(build_server_dir + '/DocService')
base.copy_dir(server_dir + '/Common/config', build_server_dir + '/Common/config')
base.copy_dir(bin_server_dir + '/Common/config', build_server_dir + '/Common/config')
base.create_dir(build_server_dir + '/DocService')
base.copy_exe(server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.copy_exe(bin_server_dir + "/DocService", build_server_dir + '/DocService', "docservice")
base.create_dir(build_server_dir + '/FileConverter')
base.copy_exe(server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.copy_exe(bin_server_dir + "/FileConverter", build_server_dir + '/FileConverter', "converter")
base.create_dir(build_server_dir + '/Metrics')
base.copy_exe(server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.copy_exe(bin_server_dir + "/Metrics", build_server_dir + '/Metrics', "metrics")
base.copy_dir(bin_server_dir + '/Metrics/config', build_server_dir + '/Metrics/config')
base.create_dir(build_server_dir + '/Metrics/node_modules/modern-syslog/build/Release')
base.copy_file(server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
base.copy_file(bin_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node", build_server_dir + "/Metrics/node_modules/modern-syslog/build/Release/core.node")
# AdminPanel server part
base.create_dir(build_server_dir + '/AdminPanel/server')
base.copy_exe(server_dir + "/AdminPanel/server", build_server_dir + '/AdminPanel/server', "adminpanel")
# AdminPanel client part
base.create_dir(build_server_dir + '/AdminPanel/client/build')
base.copy_dir(server_dir + '/AdminPanel/client/build', build_server_dir + '/AdminPanel/client/build')
qt_dir = base.qt_setup(native_platform)
platform = native_platform
@ -82,80 +75,72 @@ def make():
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "kernel_network")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "UnicodeConverter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "graphics")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfWriter")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "PdfReader")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DjVuFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "XpsFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "OFDFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlFile2")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HtmlRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "doctrenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "Fb2File")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "EpubFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "IWorkFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "HWPFile")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "DocxRenderer")
base.copy_lib(core_build_dir + "/lib/" + platform_postfix, converter_dir, "StarMathConverter")
base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", converter_dir + "/cmap.bin")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "x2t")
#if (native_platform == "linux_64"):
# base.generate_check_linux_system(git_dir + "/build_tools", converter_dir)
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server", "", "../../../dictionaries")
base.generate_doctrenderer_config(converter_dir + "/DoctRenderer.config", "../../../", "server")
# icu
base.deploy_icu(core_dir, converter_dir, platform)
if (0 == platform.find("win")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icudt58.dll", converter_dir + "/icudt58.dll")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/icuuc58.dll", converter_dir + "/icuuc58.dll")
base.copy_v8_files(core_dir, converter_dir, platform)
if (0 == platform.find("linux")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.so.58", converter_dir + "/libicudata.so.58")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.so.58", converter_dir + "/libicuuc.so.58")
if (0 == platform.find("mac")):
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicudata.58.dylib", converter_dir + "/libicudata.58.dylib")
base.copy_file(core_dir + "/Common/3dParty/icu/" + platform + "/build/libicuuc.58.dylib", converter_dir + "/libicuuc.58.dylib")
if (0 == platform.find("win")):
base.copy_files(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/release/icudt*.dat", converter_dir + "/")
elif (-1 == config.option("config").find("use_javascript_core")):
base.copy_file(core_dir + "/Common/3dParty/v8/v8/out.gn/" + platform + "/icudtl.dat", converter_dir + "/icudtl.dat")
# builder
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, converter_dir, "docbuilder")
base.copy_dir(git_dir + "/document-templates/new/en-US", converter_dir + "/empty")
# correct mac frameworks
if (0 == platform.find("mac")):
base.for_each_framework(converter_dir, "mac", callbacks=[base.generate_plist], max_depth=1)
base.copy_dir(git_dir + "/DocumentBuilder/empty", converter_dir + "/empty")
# js
js_dir = root_dir
base.copy_dir(base_dir + "/js/" + branding + "/builder/sdkjs", js_dir + "/sdkjs")
base.copy_dir(base_dir + "/js/" + branding + "/builder/web-apps", js_dir + "/web-apps")
for file in glob.glob(js_dir + "/web-apps/apps/*/*/*.js.map") \
+ glob.glob(js_dir + "/web-apps/apps/*/mobile/dist/js/*.js.map"):
base.delete_file(file)
base.create_x2t_js_cache(converter_dir, "server", platform)
# add embed worker code
base.cmd_in_dir(git_dir + "/sdkjs/common/embed", "python", ["make.py", js_dir + "/web-apps/apps/api/documents/api.js"])
# plugins
base.create_dir(js_dir + "/sdkjs-plugins")
base.copy_marketplace_plugin(js_dir + "/sdkjs-plugins", False, True)
if ("1" == config.option("preinstalled-plugins")):
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
else:
base.generate_sdkjs_plugin_list(js_dir + "/sdkjs-plugins/plugin-list-default.json")
base.copy_sdkjs_plugins(js_dir + "/sdkjs-plugins", False, True)
base.copy_sdkjs_plugins_server(js_dir + "/sdkjs-plugins", False, True)
base.create_dir(js_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", js_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", js_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", js_dir + "/sdkjs-plugins/v1/plugins.css")
base.support_old_versions_plugins(js_dir + "/sdkjs-plugins")
# tools
tools_dir = root_dir + "/server/tools"
base.create_dir(tools_dir)
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allfontsgen")
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "allthemesgen")
if ("1" != config.option("preinstalled-plugins")):
base.copy_exe(core_build_dir + "/bin/" + platform_postfix, tools_dir, "pluginsmanager")
branding_dir = server_dir + "/branding"
if("" != config.option("branding") and "onlyoffice" != config.option("branding")):
branding_dir = git_dir + '/' + config.option("branding") + '/server'
#dictionaries
base.copy_dictionaries(server_dir + "/../dictionaries", root_dir + "/dictionaries")
spellchecker_dictionaries = root_dir + '/dictionaries'
spellchecker_dictionaries_files = server_dir + '/../dictionaries/*_*'
base.create_dir(spellchecker_dictionaries)
base.copy_files(spellchecker_dictionaries_files, spellchecker_dictionaries)
if (0 == platform.find("win")):
exec_ext = '.exe'
@ -177,8 +162,8 @@ def make():
#document-templates
document_templates_files = server_dir + '/../document-templates'
document_templates = build_server_dir + '/../document-templates'
base.copy_dir(document_templates_files + '/new', document_templates + '/new')
base.copy_dir(document_templates_files + '/sample', document_templates + '/sample')
base.create_dir(document_templates)
base.copy_dir(document_templates_files, document_templates)
#license
license_file1 = server_dir + '/LICENSE.txt'
@ -188,7 +173,6 @@ def make():
base.copy_file(license_file1, build_server_dir)
base.copy_file(license_file2, build_server_dir)
base.copy_dir(license_dir, license)
base.copy_dir(server_dir + '/dictionaries', build_server_dir + '/dictionaries')
#branding
welcome_files = branding_dir + '/welcome'
@ -215,15 +199,15 @@ def make():
base.delete_dir(root_dir_snap)
base.create_dir(root_dir_snap)
base.copy_dir(root_dir, root_dir_snap)
base.copy_dir(server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.copy_dir(bin_server_dir + '/DocService/node_modules', root_dir_snap + '/server/DocService/node_modules')
base.copy_dir(bin_server_dir + '/DocService/sources', root_dir_snap + '/server/DocService/sources')
base.copy_dir(bin_server_dir + '/DocService/public', root_dir_snap + '/server/DocService/public')
base.delete_file(root_dir_snap + '/server/DocService/docservice')
base.copy_dir(server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.copy_dir(bin_server_dir + '/FileConverter/node_modules', root_dir_snap + '/server/FileConverter/node_modules')
base.copy_dir(bin_server_dir + '/FileConverter/sources', root_dir_snap + '/server/FileConverter/sources')
base.delete_file(root_dir_snap + '/server/FileConverter/converter')
base.copy_dir(server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
base.copy_dir(bin_server_dir + '/Common/node_modules', root_dir_snap + '/server/Common/node_modules')
base.copy_dir(bin_server_dir + '/Common/sources', root_dir_snap + '/server/Common/sources')
if (base.is_dir(root_dir_snap_example)):
base.delete_dir(root_dir_snap_example)
base.create_dir(root_dir_snap_example)
@ -231,3 +215,4 @@ def make():
base.delete_file(root_dir_snap + '/example/nodejs/example')
return

View File

@ -1,104 +0,0 @@
# This script was successfully executed on Ubuntu 22.04.5 LTS
# Before starting, make sure that:
# 1. Python >= 3.9
# 2. The current working folder with the script and its path do not contain spaces and use Latin characters.
# 3. Antivirus is turned off
# 4. There is enough free space on the disk (50GB Libre Office and during the unpacking of packages, it's recommended that you allocate at least 80 gigabytes of free space)
# 5. The current working folder with the script and its path do not contain spaces and use Latin characters.
# If the error "You must put some 'source' URIs in your sources.list" occurs, you need to run the command:
# software-properties-gtk
# in the terminal, and then under the "Ubuntu Software" tab, click "Source code" if it's not turned on and submit
# after completion, the file will appear:
# current_folder_with_script/libreoffice_build/instdir/soffice
# debugging can be done via MVS 2022
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
# or via VS Code with c/c++ tools
# https://wiki.documentfoundation.org/Development/IDE#Visual_Studio_Code_(VSCode)
# or via Qt Creator
# https://wiki.documentfoundation.org/Development/IDE#Qt_Creator
# or via attatch to the soffice.bin process
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
import subprocess
import sys
import os
CONFIGURE_PARAMS = [
"--enable-dbgutil",
"--without-doxygen",
"--enable-pch",
"--disable-ccache",
# "--with-visual-studio=2022",
'--enable-symbols="all"'
]
SUDO_DEPENDENCIES = [
"git", "build-essential", "zip", "ccache", "junit4", "libkrb5-dev", "nasm", "graphviz", "python3",
"python3-dev", "python3-setuptools", "qtbase5-dev", "libkf5coreaddons-dev", "libkf5i18n-dev",
"libkf5config-dev", "libkf5windowsystem-dev", "libkf5kio-dev", "libqt5x11extras5-dev", "autoconf",
"libcups2-dev", "libfontconfig1-dev", "gperf", "openjdk-17-jdk", "doxygen", "libxslt1-dev",
"xsltproc", "libxml2-utils", "libxrandr-dev", "libx11-dev", "bison", "flex", "libgtk-3-dev",
"libgstreamer-plugins-base1.0-dev", "libgstreamer1.0-dev", "ant", "ant-optional", "libnss3-dev",
"libavahi-client-dev", "libxt-dev"
]
DIR_NAME = "libreoffice"
OFFICE_PATH = "instdir/program/soffice"
class bcolors:
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
FAIL = '\033[91m'
RESET = '\033[0m'
def run_command(command, exit_on_error=True):
try:
subprocess.run(command, shell=True, check=True)
except subprocess.CalledProcessError as e:
print(f"{bcolors.FAIL}Error executing command: {command}{bcolors.RESET}")
if exit_on_error:
sys.exit(1)
def install_dependencies():
print("Updating package list...")
run_command("sudo apt update")
print("Adding PPA for GCC/G++ update...")
run_command("sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test")
run_command("sudo apt update")
print("Installing dependencies for LibreOffice...")
run_command("sudo apt-get build-dep -y libreoffice")
run_command(f"sudo apt-get install {' '.join(map(str, SUDO_DEPENDENCIES))}")
print("Updating GCC/G++ to v12...")
run_command("sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 60 --slave /usr/bin/g++ g++ /usr/bin/g++-12", exit_on_error=False)
print(bcolors.OKGREEN + "All dependencies successfully installed!" + bcolors.RESET)
def build_libreoffice():
print("Cloning LibreOffice repository...")
run_command(f"git clone https://git.libreoffice.org/core {DIR_NAME}", exit_on_error=False)
print("Changing to build directory...")
os.chdir(f"./{DIR_NAME}")
print("Start configurator autogen.sh...")
run_command(f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}")
print(bcolors.OKCYAN + "Starting libreoffice build, this may take up to 24 hours and takes up about 20 GB of drive space. You will also most likely need at least 8 GBs of RAM, otherwise the machine might fall into swap and appear to freeze up..." + bcolors.RESET)
run_command("make")
print(bcolors.OKGREEN + "LibreOffice build completed!" + bcolors.RESET)
# print(bcolors.OKCYAN + "Running LibreOffice..." + bcolors.RESET)
# run_command(OFFICE_PATH, exit_on_error=False)
if __name__ == "__main__":
install_dependencies()
build_libreoffice()

View File

@ -1,202 +0,0 @@
# Before starting, make sure that:
# 1. MVS 2022 is installed and the necessary individual components are in its installer
# <20> Windows Universal C Runtime
# <20> .NET Framework 4.x SDK (.NET Framework 5.x SDK and later are currently not supported. These don't register their information to registry, don't have csc.exe and they use dotnet command with csc.dll instead for compiling.)
# <20> C++ 20xx Redistributable MSMs (only required to build MSI installer)
# <20> C++ Clang Compiler for Windows (x.x.x)
# 2. Java JDK >= 17
# 3. Antivirus is turned off
# 4. There is enough free space on the disk (50GB Libre Office, 50Gb cygwin64)
# after completion, the files will appear:
# {LO_BUILD_PATH}/sources/libo-core/instdir/program/soffice.exe
# {LO_BUILD_PATH}/sources/libo-core/LibreOffice.sln
# debugging can be done via MVS 2022
# https://wiki.documentfoundation.org/Development/IDE#Microsoft_Visual_Studio
# or via attatch to the soffice.bin process
# https://wiki.documentfoundation.org/Development/How_to_debug#Debugging_with_gdb
import sys
sys.path.append('../../scripts')
import threading
import os
import subprocess
import shutil
import argparse
import base
CYGWIN_DOWNLOAD_URL = 'https://cygwin.com/setup-x86_64.exe'
CYGWIN_TEMP_PATH = './tmp'
CYGWIN_SETUP_FILENAME = 'setup-x86_64.exe'
CYGWIN_SETUP_PARAMS = [
"-P", "autoconf",
"-P", "automake",
"-P", "bison",
"-P", "cabextract",
"-P", "doxygen",
"-P", "flex",
"-P", "gawk=5.2.2-1",
"-P", "gcc-g++",
"-P", "gettext-devel",
"-P", "git",
"-P", "gnupg",
"-P", "gperf",
"-P", "make",
"-P", "mintty",
"-P", "nasm",
"-P", "openssh",
"-P", "openssl",
"-P", "patch",
"-P", "perl",
"-P", "python",
"-P", "python3",
"-P", "pkg-config",
"-P", "rsync",
"-P", "unzip",
"-P", "vim",
"-P", "wget",
"-P", "zip",
"-P", "perl-Archive-Zip",
"-P", "perl-Font-TTF",
"-P", "perl-IO-String",
"--no-admin",
"--quiet-mode"
]
CYGWIN_BAT_PATH = 'C:/cygwin64/Cygwin.bat'
LO_BUILD_PATH = os.path.normpath(os.path.join(os.getcwd(), '../../../LO'))
CONFIGURE_PARAMS = [f'--with-external-tar="{LO_BUILD_PATH}/sources/lo-externalsrc"',
f'--with-junit="{LO_BUILD_PATH}/sources/junit-4.10.jar"',
f'--with-ant-home="{LO_BUILD_PATH}/sources/apache-ant-1.9.5"',
"--enable-pch",
"--disable-ccache",
"--with-visual-studio=2022",
"--enable-dbgutil",
'--enable-symbols="all"']
def create_folder_safe(folder_path):
if not os.path.exists(folder_path):
try:
os.mkdir(folder_path)
print(f"Folder '{folder_path}' created successfully.")
except Exception as e:
print(f"Error creating folder: {e}")
else:
print(f"Folder '{folder_path}' already exists.")
class CygwinRunner:
@staticmethod
def process_commands(commands: list[str]):
proc = subprocess.Popen(
[CYGWIN_BAT_PATH], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,
shell=True, creationflags=subprocess.CREATE_NEW_CONSOLE
)
def read_stdout():
for line in iter(proc.stdout.readline, ''):
sys.stdout.write(line)
proc.stdout.close()
def read_stderr():
for line in iter(proc.stderr.readline, ''):
sys.stderr.write(line)
proc.stderr.close()
stdout_thread = threading.Thread(target=read_stdout)
stderr_thread = threading.Thread(target=read_stderr)
stdout_thread.start()
stderr_thread.start()
for command in commands:
proc.stdin.write(command + '\n')
proc.stdin.flush()
stdout_thread.join()
stderr_thread.join()
proc.stdin.close()
proc.wait()
@staticmethod
def install_gnu_make():
base.print_info("install_gnu_make")
commands = ['mkdir -p /opt/lo/bin',
'cd /opt/lo/bin',
'wget https://dev-www.libreoffice.org/bin/cygwin/make-4.2.1-msvc.exe',
'cp make-4.2.1-msvc.exe make',
'chmod +x make',
'exit']
CygwinRunner.process_commands(commands)
@staticmethod
def install_ant_and_junit():
base.print_info("install_ant_and_junit")
commands = [f'mkdir -p {LO_BUILD_PATH}/sources',
f'cd {LO_BUILD_PATH}/sources',
'wget https://archive.apache.org/dist/ant/binaries/apache-ant-1.9.5-bin.tar.bz2',
'tar -xjvf apache-ant-1.9.5-bin.tar.bz2',
'wget http://downloads.sourceforge.net/project/junit/junit/4.10/junit-4.10.jar',
'exit']
CygwinRunner.process_commands(commands)
@staticmethod
def clone_lo():
base.print_info("clone_lo")
commands = [f'cd {LO_BUILD_PATH}/sources',
'git clone https://gerrit.libreoffice.org/core libo-core',
'exit']
CygwinRunner.process_commands(commands)
@staticmethod
def build_autogen():
base.print_info("build_autogen")
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
f"./autogen.sh {' '.join(map(str, CONFIGURE_PARAMS))}",
'exit']
CygwinRunner.process_commands(commands)
@staticmethod
def run_make_build():
base.print_info("run_make")
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
f'/opt/lo/bin/make gb_COLOR=1',
"exit"]
CygwinRunner.process_commands(commands)
@staticmethod
def build_vs_integration():
base.print_info("run_make")
commands = [f'cd {LO_BUILD_PATH}/sources/libo-core',
f'/opt/lo/bin/make gb_COLOR=1 vs-ide-integration',
"exit"]
CygwinRunner.process_commands(commands)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="options")
parser.add_argument("--lo_build_path", dest="build_path", default=f'../../../LO')
parser.add_argument("--disable_sln", dest="disable_sln", action=argparse.BooleanOptionalAction)
args = parser.parse_args()
LO_BUILD_PATH = args.build_path
DISABLE_SLN = args.disable_sln
create_folder_safe(f'{LO_BUILD_PATH}/sources/lo-externalsrc')
create_folder_safe(CYGWIN_TEMP_PATH)
os.chdir(CYGWIN_TEMP_PATH)
base.download(CYGWIN_DOWNLOAD_URL, CYGWIN_SETUP_FILENAME)
subprocess.run([CYGWIN_SETUP_FILENAME] + CYGWIN_SETUP_PARAMS)
os.chdir('..')
shutil.rmtree(CYGWIN_TEMP_PATH)
CygwinRunner.install_gnu_make()
CygwinRunner.install_ant_and_junit()
CygwinRunner.clone_lo()
CygwinRunner.build_autogen()
CygwinRunner.run_make_build()
if not DISABLE_SLN:
CygwinRunner.build_vs_integration()

View File

@ -5,6 +5,9 @@ import base
import os
import json
def get_core_url(arch, branch):
return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z"
def make():
git_dir = base.get_script_dir() + "/../.."
old_cur = os.getcwd()
@ -15,10 +18,16 @@ def make():
os.chdir(work_dir)
url = base.get_autobuild_version("core", "", config.option("branch"))
arch = "x64"
arch2 = "_64"
if ("windows" == base.host_platform()) and not base.host_platform_is64():
arch = "x86"
arch2 = "_32"
url = get_core_url(arch, config.option("branch"))
data_url = base.get_file_last_modified_url(url)
if (data_url == "" and config.option("branch") != "develop"):
url = base.get_autobuild_version("core", "", "develop")
url = get_core_url(arch, "develop")
data_url = base.get_file_last_modified_url(url)
old_data_url = base.readFile("./core.7z.data")
@ -40,25 +49,20 @@ def make():
base.extract("./core.7z", "./")
base.writeFile("./core.7z.data", data_url)
platform = ""
if ("windows" == base.host_platform()):
platform = "win" + arch2
else:
platform = base.host_platform() + arch2
base.copy_files("./core/*", "./")
else:
print("-----------------------------------------------------------")
print("Core is up to date. ---------------------------------------")
print("-----------------------------------------------------------")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/", "../../../dictionaries")
if not base.is_dir(git_dir + "/sdkjs-plugins"):
base.create_dir(git_dir + "/sdkjs-plugins")
if not base.is_dir(git_dir + "/sdkjs-plugins/v1"):
base.create_dir(git_dir + "/sdkjs-plugins/v1")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.js", git_dir + "/sdkjs-plugins/v1/plugins.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins-ui.js", git_dir + "/sdkjs-plugins/v1/plugins-ui.js")
base.download("https://onlyoffice.github.io/sdkjs-plugins/v1/plugins.css", git_dir + "/sdkjs-plugins/v1/plugins.css")
base.generate_doctrenderer_config("./DoctRenderer.config", "../../../sdkjs/deploy/", "server", "../../../web-apps/vendor/")
base.support_old_versions_plugins(git_dir + "/sdkjs-plugins")
base.copy_marketplace_plugin(git_dir + "/sdkjs-plugins", False, False)
if not base.is_dir(git_dir + "/fonts"):
base.create_dir(git_dir + "/fonts")
@ -92,8 +96,7 @@ def make():
server_addons = []
if (config.option("server-addons") != ""):
server_addons = config.option("server-addons").rsplit(", ")
#server-lockstorage is private
if ("server-lockstorage" in server_addons and base.is_dir(git_dir + "/server-lockstorage")):
if ("server-lockstorage" in server_addons):
server_config["editorDataStorage"] = "editorDataRedis"
sdkjs_addons = []
@ -117,8 +120,6 @@ def make():
sql["type"] = config.option("sql-type")
if (config.option("db-port") != ""):
sql["dbPort"] = config.option("db-port")
if (config.option("db-name") != ""):
sql["dbName"] = config.option("db-name")
if (config.option("db-user") != ""):
sql["dbUser"] = config.option("db-user")
if (config.option("db-pass") != ""):
@ -131,17 +132,12 @@ def make():
#site url
example_config = {}
if (base.host_platform() == "linux"):
example_config["port"] = 3000
else:
example_config["port"] = 80
example_config["port"] = 80
example_config["siteUrl"] = "http://" + config.option("siteUrl") + ":8000/"
example_config["apiUrl"] = "web-apps/apps/api/documents/api.js"
example_config["preloaderUrl"] = "web-apps/apps/api/documents/cache-scripts.html"
json_dir = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/"
json_file = json_dir + "/local-development-" + base.host_platform() + ".json"
if base.is_exist(json_dir):
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
json_file = git_dir + "/document-server-integration/web/documentserver-example/nodejs/config/local-development-" + base.host_platform() + ".json"
base.writeFile(json_file, json.dumps({"server": example_config}, indent=2))
os.chdir(old_cur)
return

View File

@ -66,28 +66,6 @@ class CDependencies:
res += ['--remove-path', item]
return res
def check__docker_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
if (host_platform == 'mac'):
return True
checksResult = CDependencies()
checksResult.append(check_nodejs())
checksResult.append(check_7z())
if (len(checksResult.install) > 0):
install_args = ['install.py']
install_args += checksResult.get_uninstall()
install_args += checksResult.get_removepath()
install_args += checksResult.get_install()
base_dir = base.get_script_dir(__file__)
install_args[0] = './scripts/develop/' + install_args[0]
if (host_platform == 'windows'):
code = libwindows.sudo(unicode(sys.executable), install_args)
elif (host_platform == 'linux'):
get_updates()
base.cmd_in_dir(base_dir + "/../../", 'python', install_args, False)
def check_dependencies():
if (host_platform == 'windows' and not check_vc_components()):
return False
@ -110,9 +88,9 @@ def check_dependencies():
if (host_platform == 'windows'):
checksResult.append(check_nodejs())
checksResult.append(check_buildTools())
sql_type = config.option("sql-type")
if (sql_type == 'mysql' and host_platform == 'windows'):
if (config.option("sql-type") == 'mysql' and host_platform == 'windows'):
checksResult.append(check_mysqlServer())
else:
checksResult.append(check_postgreSQL())
@ -191,21 +169,21 @@ def check_nodejs():
nodejs_cur_version_major = int(nodejs_version.split('.')[0][1:])
nodejs_cur_version_minor = int(nodejs_version.split('.')[1])
print('Installed Node.js version: ' + nodejs_version[1:])
nodejs_min_version = '18'
nodejs_min_version = '14.14'
nodejs_min_version_minor = 0
major_minor_min_version = nodejs_min_version.split('.')
nodejs_min_version_major = int(major_minor_min_version[0])
if len(major_minor_min_version) > 1:
nodejs_min_version_minor = int(major_minor_min_version[1])
nodejs_max_version = ""
nodejs_max_version = '14'
nodejs_max_version_minor = float("inf")
major_minor_max_version = nodejs_max_version.split('.')
# nodejs_max_version_major = int(major_minor_max_version[0])
nodejs_max_version_major = float("inf")
nodejs_max_version_major = int(major_minor_max_version[0])
if len(major_minor_max_version) > 1:
nodejs_max_version_minor = int(major_minor_max_version[1])
if (nodejs_min_version_major > nodejs_cur_version_major or nodejs_cur_version_major > nodejs_max_version_major):
print('Installed Node.js version must be 14.14 to 14.x')
isNeedReinstall = True
elif (nodejs_min_version_major == nodejs_cur_version_major):
if (nodejs_min_version_minor > nodejs_cur_version_minor):
@ -215,7 +193,7 @@ def check_nodejs():
isNeedReinstall = True
if (True == isNeedReinstall):
print('Installed Node.js version must be 18 or higher.')
print('Installed Node.js version must be 14.14 to 14.x')
if (host_platform == 'windows'):
dependence.append_uninstall('Node.js')
dependence.append_install('Node.js')
@ -232,24 +210,18 @@ def check_java():
dependence = CDependencies()
base.print_info('Check installed Java')
java_info = base.run_command('java -version')['stderr']
java_version = base.run_command('java -version')['stderr']
version_pos = java_info.find('version "')
java_v = 0
if (version_pos != -1):
try:
java_v = float(java_info[version_pos + len('version "'): version_pos + len('version "') + 2])
except:
pass
if (java_info.find('64-Bit') != -1 and java_v >= 11):
if (java_version.find('64-Bit') != -1):
print('Installed Java is valid')
else:
print('Requires Java version 11+ x64-bit')
dependence.append_install('Java')
if (version_pos != -1):
dependence.append_uninstall('Java')
return dependence
if (java_version.find('32-Bit') != -1):
print('Installed Java must be x64')
else:
print('Java not found')
dependence.append_install('Java')
return dependence
def get_erlang_path_to_bin():
@ -301,21 +273,9 @@ def check_rabbitmq():
print('RabbitMQ is installed')
return dependence
elif (host_platform == 'linux'):
result = ''
# Prefer systemctl for systemd
systemctl_result = base.run_command('systemctl status rabbitmq-server')
if systemctl_result['returncode'] == 0 and systemctl_result['stdout']:
result = systemctl_result['stdout']
# Fallback to service for SysV
if result == '':
command_result = base.run_command('service rabbitmq-server status')
if command_result['returncode'] == 0 and command_result['stdout']:
result = command_result['stdout']
if result != '':
print('RabbitMQ is installed')
result = base.run_command('service rabbitmq-server status')['stdout']
if (result != ''):
print('Installed RabbitMQ is valid')
return dependence
print('RabbitMQ not found')
@ -412,11 +372,10 @@ def check_npm():
def check_vc_components():
base.print_info('Check Visual C++ components')
result = True
if (len(get_programUninstalls('Microsoft Visual C++ 2015-')) == 0):
print('Microsoft Visual C++ 2015-20** Redistributable (x64) not found')
if (len(get_programUninstalls('Microsoft Visual C++ 2015-2019 Redistributable (x64)')) == 0):
print('Microsoft Visual C++ 2015-2019 Redistributable (x64) not found')
result = installProgram('VC2019x64') and result
print('Installed Visual C++ components is valid')
return result
@ -496,8 +455,8 @@ def get_mysql_path_to_bin(mysqlPath = ''):
mysqlPath = os.environ['PROGRAMW6432'] + '\\MySQL\\MySQL Server 8.0\\'
mysqlPath += 'bin'
return mysqlPath
def get_mysqlLoginString():
return 'mysql -u ' + config.option("db-user") + ' -p' + config.option("db-pass")
def get_mysqlLoginSrting():
return 'mysql -u ' + install_params['MySQLServer']['user'] + ' -p' + install_params['MySQLServer']['pass']
def get_mysqlServersInfo():
arrInfo = []
@ -524,15 +483,14 @@ def get_mysqlServersInfo():
def check_mysqlServer():
base.print_info('Check MySQL Server')
dependence = CDependencies()
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
connectionString = mysqlLoginSrt + ' -e "SHOW GLOBAL VARIABLES LIKE ' + r"'PORT';" + '"'
if (host_platform != 'windows'):
result = os.system(mysqlLoginSrt + ' -e "exit"')
if (result == 0):
connectionResult = base.run_command(connectionString)
expected_port = config.option("db-port")
if (connectionResult['stdout'].find('port') != -1 and connectionResult['stdout'].find(expected_port) != -1):
connectionResult = base.run_command(connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
print('MySQL configuration is valid')
dependence.sqlPath = 'mysql'
return dependence
@ -547,17 +505,13 @@ def check_mysqlServer():
continue
mysql_full_name = 'MySQL Server ' + info['Version'] + ' '
mysql_bin_path = get_mysql_path_to_bin(info['Location'])
connectionResult = base.run_command_in_dir(mysql_bin_path, connectionString)
expected_port = config.option("db-port")
if (connectionResult['stdout'].find('port') != -1 and connectionResult['stdout'].find(expected_port) != -1):
connectionResult = base.run_command_in_dir(get_mysql_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find('port') != -1 and connectionResult.find(install_params['MySQLServer']['port']) != -1):
print(mysql_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
print(mysql_full_name + 'configuration is not valid')
# if path exists, then further removal and installation fails(according to startup statistics). it is better to fix issue manually.
return dependence
print('Valid MySQL Server not found')
dependence.append_uninstall('MySQL Server')
@ -577,43 +531,23 @@ def check_mysqlServer():
return dependence
def check_MySQLConfig(mysqlPath = ''):
result = True
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
mysql_path_to_bin = get_mysql_path_to_bin(mysqlPath)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].lower().find(config.option("db-name").lower()) == -1):
print('Database "' + config.option("db-name") + '" not found')
result = create_MySQLDb(mysql_path_to_bin, config.option("db-name"), config.option("db-user"), config.option("db-pass"))
if (not result):
return False
print('Creating ' + config.option("db-name") + ' tables ...')
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SHOW DATABASES;"')['stdout'].find('onlyoffice') == -1):
print('Database onlyoffice not found')
creatdb_path = base.get_script_dir() + "/../../server/schema/mysql/createdb.sql"
result = execMySQLScript(mysql_path_to_bin, config.option("db-name"), creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + config.option("db-user") + "';" + '"')['stdout'].find('mysql_native_password') == -1):
result = execMySQLScript(mysql_path_to_bin, creatdb_path)
if (base.run_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "SELECT plugin from mysql.user where User=' + "'" + install_params['MySQLServer']['user'] + "';" + '"')['stdout'].find('mysql_native_password') == -1):
print('Password encryption is not valid')
result = set_MySQLEncrypt(mysql_path_to_bin, 'mysql_native_password') and result
return result
def create_MySQLDb(mysql_path_to_bin, dbName, dbUser, dbPass):
mysqlLoginSrt = get_mysqlLoginString()
print('CREATE DATABASE ' + dbName + ';')
if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE DATABASE ' + dbName + ';"') != 0):
print('failed CREATE DATABASE ' + dbName + ';')
return False
# print('CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';"') != 0):
# print('failed: CREATE USER IF NOT EXISTS ' + dbUser + ' IDENTIFIED BY \'' + dbPass + '\';')
# return False
# print('GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# if (base.exec_command_in_dir(mysql_path_to_bin, mysqlLoginSrt + ' -e "GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';"') != 0):
# print('failed: GRANT ALL PRIVILEGES ON ' + dbName + '.* TO ' + dbUser + ';')
# return False
return True
def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
def execMySQLScript(mysql_path_to_bin, scriptPath):
print('Execution ' + scriptPath)
mysqlLoginSrt = get_mysqlLoginString()
mysqlLoginSrt = get_mysqlLoginSrting()
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -D ' + dbName + ' < "' + scriptPath + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' < "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
return False
@ -622,7 +556,7 @@ def execMySQLScript(mysql_path_to_bin, dbName, scriptPath):
def set_MySQLEncrypt(mysql_path_to_bin, sEncrypt):
print('Setting MySQL password encrypting...')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginString() + ' -e "' + "ALTER USER '" + config.option("db-user") + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + config.option("db-pass") + "';" + '"')
code = base.exec_command_in_dir(mysql_path_to_bin, get_mysqlLoginSrting() + ' -e "' + "ALTER USER '" + install_params['MySQLServer']['user'] + "'@'localhost' IDENTIFIED WITH " + sEncrypt + " BY '" + install_params['MySQLServer']['pass'] + "';" + '"')
if (code != 0):
print('Setting password encryption failed!')
return False
@ -648,8 +582,8 @@ def get_postrgre_path_to_bin(postgrePath = ''):
return postgrePath
def get_postgreLoginSrting(userName):
if (host_platform == 'windows'):
return 'psql -U ' + userName + ' -h localhost '
return 'PGPASSWORD="' + config.option("db-pass") + '" psql -U ' + userName + ' -h localhost '
return 'psql -U' + userName + ' '
return 'PGPASSWORD="' + install_params['PostgreSQL']['dbPass'] + '" psql -U' + userName + ' -hlocalhost '
def get_postgreSQLInfoByFlag(flag):
arrInfo = []
@ -683,10 +617,9 @@ def check_postgreSQL():
if (host_platform == 'linux'):
result = os.system(postgreLoginSrt + ' -c "\q"')
connectionResult = base.run_command(connectionString)
expected_port = config.option("db-port")
connectionResult = base.run_command(connectionString)['stdout']
if (result != 0 or connectionResult['stdout'].find(expected_port) == -1):
if (result != 0 or connectionResult.find(install_params['PostgreSQL']['dbPort']) == -1):
print('Valid PostgreSQL not found!')
dependence.append_install('PostgreSQL')
dependence.append_uninstall('PostgreSQL')
@ -696,21 +629,19 @@ def check_postgreSQL():
return dependence
arrInfo = get_postgreSQLInfo()
base.set_env('PGPASSWORD', config.option("db-pass"))
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
for info in arrInfo:
if (base.is_dir(info['Location']) == False):
continue
postgre_full_name = 'PostgreSQL ' + info['Version'][:2] + ' '
postgre_bin_path = get_postrgre_path_to_bin(info['Location'])
connectionResult = base.run_command_in_dir(postgre_bin_path, connectionString)
expected_port = config.option("db-port")
if (connectionResult['stdout'].find(expected_port) != -1):
connectionResult = base.run_command_in_dir(get_postrgre_path_to_bin(info['Location']), connectionString)['stdout']
if (connectionResult.find(install_params['PostgreSQL']['dbPort']) != -1):
print(postgre_full_name + 'configuration is valid')
dependence.sqlPath = info['Location']
return dependence
print(postgre_full_name + 'configuration is not valid')
print('Valid PostgreSQL not found')
@ -723,87 +654,60 @@ def check_postgreSQL():
return dependence
def check_postgreConfig(postgrePath = ''):
result = True
base.print_info('Checking PostgreSQL configuration')
if (host_platform == 'windows'):
base.set_env('PGPASSWORD', config.option("db-pass"))
base.set_env('PGPASSWORD', install_params['PostgreSQL']['dbPass'])
rootUser = install_params['PostgreSQL']['root']
dbUser = config.option("db-user")
dbName = config.option("db-name")
dbPass = config.option("db-pass")
dbUser = install_params['PostgreSQL']['dbUser']
dbName = install_params['PostgreSQL']['dbName']
dbPass = install_params['PostgreSQL']['dbPass']
postgre_path_to_bin = get_postrgre_path_to_bin(postgrePath)
postgreLoginRoot = get_postgreLoginSrting(rootUser)
postgreLoginDbUser = get_postgreLoginSrting(dbUser)
creatdb_path = base.get_script_dir() + "/../../server/schema/postgresql/createdb.sql"
# Check if user exists
user_check_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "\du ' + dbUser + '"')
if (user_check_result['stdout'].find(dbUser) != -1):
# User exists, check password
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "\du ' + dbUser + '"')['stdout'].find(dbUser) != -1):
print('User ' + dbUser + ' is exist')
if (os.system(postgreLoginDbUser + '-c "\q"') != 0):
print('Invalid user password, changing...')
print('Invalid user password!')
base.print_info('Changing password...')
result = change_userPass(dbUser, dbPass, postgre_path_to_bin) and result
else:
print('Creating user ' + dbUser + '...')
print('User ' + dbUser + ' not exist!')
base.print_info('Creating ' + dbName + ' user...')
result = create_postgreUser(dbUser, dbPass, postgre_path_to_bin) and result
# Check if database exists
db_check_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')
if (db_check_result['stdout'].find(dbName) == -1):
print('Creating database ' + dbName + '...')
create_result = create_postgreDb(dbName, postgre_path_to_bin)
if create_result:
# Grant privileges to user on database and schema
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + dbUser + ';"')
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-d ' + dbName + ' -c "GRANT ALL ON SCHEMA public TO ' + dbUser + ';"')
configure_result = configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
result = create_result and configure_result
else:
result = False
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + ' -c "SELECT datname FROM pg_database;"')['stdout'].find('onlyoffice') == -1):
print('Database ' + dbName + ' not found')
base.print_info('Creating ' + dbName + ' database...')
result = create_postgreDb(dbName, postgre_path_to_bin) and configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
else:
# Database exists - check if tables need to be created
table_count_result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "SELECT count(*) FROM information_schema.tables WHERE table_schema = \'public\';"')
needs_configure = False
if table_count_result['stdout'].find(' 0') != -1:
# No tables - need to configure
needs_configure = True
if needs_configure:
# Grant privileges and configure
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + dbUser + ';"')
base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-d ' + dbName + ' -c "GRANT ALL ON SCHEMA public TO ' + dbUser + ';"')
configure_result = configureDb(dbUser, dbName, creatdb_path, postgre_path_to_bin)
result = configure_result and result
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "SELECT pg_size_pretty(pg_database_size(' + "'" + dbName + "'" + '));"')['stdout'].find('7559 kB') != -1):
print('Database ' + dbName + ' not configured')
base.print_info('Configuring ' + dbName + ' database...')
result = configureDb(dbName, creatdb_path, postgre_path_to_bin) and result
print('Database ' + dbName + ' is valid')
if (base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "\l+ ' + dbName + '"')['stdout'].find(dbUser +'=CTc/' + rootUser) == -1):
print('User ' + dbUser + ' has no database privileges!')
base.print_info('Setting database privileges for user ' + dbUser + '...')
result = set_dbPrivilegesForUser(dbUser, dbName, postgre_path_to_bin) and result
print('User ' + dbUser + ' has database privileges')
return result
def create_postgreDb(dbName, postgre_path_to_bin = ''):
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "CREATE DATABASE ' + dbName +';"')
if (result['returncode'] != 0):
print('Database creation failed!')
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "CREATE DATABASE ' + dbName +';"') != 0):
return False
return True
def set_dbPrivilegesForUser(userName, dbName, postgre_path_to_bin = ''):
postgreLoginUser = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + userName + ';"')
if (result['returncode'] != 0):
print('Grant privileges failed!')
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginUser + '-c "GRANT ALL privileges ON DATABASE ' + dbName + ' TO ' + userName + ';"') != 0):
return False
return True
def create_postgreUser(userName, userPass, postgre_path_to_bin = ''):
postgreLoginRoot = get_postgreLoginSrting(install_params['PostgreSQL']['root'])
result = base.run_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "CREATE USER ' + userName + ' WITH password ' + "'" + userPass + "'" + ';"')
if (result['returncode'] != 0):
print('User creation failed!')
if (base.exec_command_in_dir(postgre_path_to_bin, postgreLoginRoot + '-c "CREATE USER ' + userName + ' WITH password ' + "'" + userPass + "'" + ';"') != 0):
return False
return True
def change_userPass(userName, userPass, postgre_path_to_bin = ''):
@ -812,24 +716,13 @@ def change_userPass(userName, userPass, postgre_path_to_bin = ''):
return False
return True
def configureDb(userName, dbName, scriptPath, postgre_path_to_bin = ''):
print('Executing ' + scriptPath)
if not base.is_file(scriptPath):
print('ERROR: Script file does not exist!')
return False
print('Execution ' + scriptPath)
postgreLoginSrt = get_postgreLoginSrting(userName)
full_command = postgreLoginSrt + ' -d ' + dbName + ' -f "' + scriptPath + '"'
# Use run_command_in_dir to capture output
result = base.run_command_in_dir(postgre_path_to_bin, full_command)
if (result['returncode'] != 0):
code = base.exec_command_in_dir(postgre_path_to_bin, postgreLoginSrt + ' -d ' + dbName + ' -f "' + scriptPath + '"')
if (code != 0):
print('Execution failed!')
if result['stderr']:
print('Error: ' + result['stderr'])
return False
print('Execution completed')
return True
def uninstall_postgresql():
@ -932,7 +825,6 @@ def installProgram(sName):
print(install_command)
code = os.system(install_command)
base.delete_file(file_name)
elif (host_platform == 'linux'):
if (sName in install_special):
code = install_special[sName]()
@ -963,13 +855,13 @@ def install_gruntcli():
def install_mysqlserver():
if (host_platform == 'windows'):
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + config.option("db-port") + 'enable_tcpip=true;port=' + config.option("db-port") + ';rootpasswd=' + config.option("db-pass") + ' -silent')
return os.system('"' + os.environ['ProgramFiles(x86)'] + '\\MySQL\\MySQL Installer for Windows\\MySQLInstallerConsole" community install server;' + install_params['MySQLServer']['version'] + ';x64:*:type=config;openfirewall=true;generallog=true;binlog=true;serverid=' + install_params['MySQLServer']['port'] + 'enable_tcpip=true;port=' + install_params['MySQLServer']['port'] + ';rootpasswd=' + install_params['MySQLServer']['pass'] + ' -silent')
elif (host_platform == 'linux'):
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + config.option("db-port"))['stdout'])
os.system('sudo kill ' + base.run_command('sudo fuser -vn tcp ' + install_params['MySQLServer']['port'])['stdout'])
code = os.system('sudo ufw enable && sudo ufw allow 22 && sudo ufw allow 3306')
code = os.system('sudo apt-get -y install zsh htop') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + config.option("db-pass") + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
code = os.system('echo "mysql-server mysql-server/root_password_again password ' + install_params['MySQLServer']['pass'] + '" | sudo debconf-set-selections') and code
return os.system('yes | sudo apt install mysql-server') and code
return 1
@ -991,7 +883,7 @@ def install_postgresql():
file_name = "install.exe"
base.download(download_url, file_name)
base.print_info("Install PostgreSQL...")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + config.option("db-pass") + ' --serverport ' + config.option("db-port")
install_command = file_name + ' --mode unattended --unattendedmodeui none --superpassword ' + install_params['PostgreSQL']['dbPass'] + ' --serverport ' + install_params['PostgreSQL']['dbPort']
else:
base.print_info("Install PostgreSQL...")
install_command = 'sudo apt install postgresql -y'
@ -1002,12 +894,12 @@ def install_postgresql():
if (host_platform == 'windows'):
base.delete_file(file_name)
else:
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + config.option("db-pass") + "'" + ';"') and code
code = os.system('sudo -i -u postgres psql -c "ALTER USER postgres PASSWORD ' + "'" + install_params['PostgreSQL']['dbPass'] + "'" + ';"') and code
return code
def install_nodejs():
os.system('curl -sSL https://deb.nodesource.com/setup_18.x | sudo -E bash -')
os.system('curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -')
base.print_info("Install node.js...")
install_command = 'yes | sudo apt install nodejs'
print(install_command)
@ -1016,11 +908,11 @@ def install_nodejs():
downloads_list = {
'Windows': {
'Git': 'https://github.com/git-for-windows/git/releases/download/v2.29.0.windows.1/Git-2.29.0-64-bit.exe',
'Node.js': 'https://nodejs.org/dist/v18.17.1/node-v18.17.1-x64.msi',
'Java': 'https://aka.ms/download-jdk/microsoft-jdk-11.0.18-windows-x64.msi',
'Node.js': 'https://nodejs.org/download/release/v14.17.2/node-v14.17.2-x64.msi',
'Java': 'https://javadl.oracle.com/webapps/download/AutoDL?BundleId=242990_a4634525489241b9a9e1aa73d9e118e6',
'RabbitMQ': 'https://github.com/rabbitmq/rabbitmq-server/releases/download/v3.8.9/rabbitmq-server-3.8.9.exe',
'Erlang': 'http://erlang.org/download/otp_win64_23.1.exe',
'VC2019x64': 'https://aka.ms/vs/17/release/vc_redist.x64.exe',
'VC2019x64': 'https://aka.ms/vs/16/release/vc_redist.x64.exe',
'MySQLInstaller': 'https://dev.mysql.com/get/Downloads/MySQLInstaller/mysql-installer-web-community-8.0.21.0.msi',
'BuildTools': 'https://download.visualstudio.microsoft.com/download/pr/11503713/e64d79b40219aea618ce2fe10ebd5f0d/vs_BuildTools.exe',
'Redis': 'https://github.com/tporadowski/redis/releases/download/v5.0.9/Redis-x64-5.0.9.msi',
@ -1052,14 +944,23 @@ uninstall_special = {
install_params = {
'BuildTools': '--add Microsoft.VisualStudio.Workload.VCTools --includeRecommended --quiet --wait',
'Git': '/VERYSILENT /NORESTART',
'Java': '/s',
'MySQLServer': {
'port': '3306',
'user': 'root',
'pass': 'onlyoffice',
'version': '8.0.21'
},
'Redis': 'PORT=6379 ADD_FIREWALL_RULE=1',
'PostgreSQL': {
'root': 'postgres'
'root': 'postgres',
'dbPort': '5432',
'dbName': 'onlyoffice',
'dbUser': 'onlyoffice',
'dbPass': 'onlyoffice'
}
}
uninstall_params = {
'PostgreSQL': '--mode unattended --unattendedmodeui none'
}

View File

@ -10,15 +10,11 @@ import config_server as develop_config_server
base_dir = base.get_script_dir(__file__)
def build_docker_server():
dependence.check__docker_dependencies()
build_develop_server()
def build_docker_sdk_web_apps(dir):
dependence.check__docker_dependencies()
build_js.build_js_develop(dir)
def build_develop_server():
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
build_server.build_server_develop()
build_js.build_js_develop(base_dir + "/../../..")
develop_config_server.make()
@ -26,12 +22,5 @@ def build_develop_server():
branding_develop_script_dir = base_dir + "/../../../" + config.option("branding") + "/build_tools/scripts"
if base.is_file(branding_develop_script_dir + "/develop.py"):
base.cmd_in_dir(branding_develop_script_dir, "python", ["develop.py"], True)
def make():
if ("1" != config.option("develop")):
return
if not dependence.check_dependencies():
exit(1)
build_develop_server()
exit(0)

View File

@ -1,342 +0,0 @@
#!/usr/bin/env python3
"""
Git Operations Script
Provides functionality to clone repositories and create branches.
Uses existing methods from base module and integrates with release.py patterns.
"""
import sys
import argparse
import logging
from typing import Dict
# Add parent directory to path to import modules
sys.path.append('../')
import base
import config
import dependence
# Setup logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
class GitOperations:
"""Class to handle git clone and branch creation using existing base module methods."""
def __init__(self, branding: str = "onlyoffice", base_branch: str = "develop",
branding_url: str = "ONLYOFFICE/onlyoffice.git", branch_name: str = None,
modules: str = "core desktop builder server mobile"):
"""
Initialize GitOperations with branding configuration and configure repositories.
Args:
branding: Branding name (default: onlyoffice)
base_branch: Base branch to work from (default: develop)
branding_url: Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)
branch_name: Name of the branch to create (required for branch operations)
modules: Modules to include (default: core desktop builder server mobile)
"""
self.branding = branding
self.base_branch = base_branch
self.branding_url = branding_url
self.branch_name = branch_name
self.modules = modules
self.work_dir = None
# Configure repositories immediately
self._configure()
# Update repositories after configuration
repositories = self.get_configured_repositories()
#base.update_repositories(repositories)
def create_branch(self, branch_name: str, repo_dir: str = None) -> bool:
"""
Create a new branch using base.cmd_in_dir.
Args:
branch_name: Name of the new branch
repo_dir: Repository directory (optional, uses current if not specified)
from_branch: Branch to create from (optional, uses current if not specified)
Returns:
bool: True if successful, False otherwise
"""
work_dir = repo_dir or self.work_dir
logger.info(f"Creating branch '{branch_name}' in {work_dir}")
try:
# Create and checkout new branch
base.cmd_in_dir(work_dir, "git", ["checkout", "-b", branch_name], True)
logger.info(f"Successfully created branch: {branch_name}")
return True
except SystemExit:
logger.error(f"Failed to create branch: {branch_name}")
return False
def push_branch(self, branch_name: str, repo_dir: str = None, set_upstream: bool = True) -> bool:
"""
Push a branch to remote repository using base.cmd_in_dir.
Args:
branch_name: Name of the branch to push
repo_dir: Repository directory (optional, uses current if not specified)
set_upstream: Whether to set upstream tracking (default: True)
Returns:
bool: True if successful, False otherwise
"""
work_dir = repo_dir or self.work_dir
logger.info(f"Pushing branch '{branch_name}' in {work_dir}")
try:
if set_upstream:
# Push branch and set upstream tracking
base.cmd_in_dir(work_dir, "git", ["push", "-u", "origin", branch_name], True)
else:
# Just push the branch
base.cmd_in_dir(work_dir, "git", ["push", "origin", branch_name], True)
logger.info(f"Successfully pushed branch: {branch_name}")
return True
except SystemExit:
logger.error(f"Failed to push branch: {branch_name}")
return False
def _configure(self) -> bool:
"""
Configure repositories using existing configure.py pattern from release.py.
Returns:
bool: True if successful, False otherwise
"""
logger.info(f"Configuring and cloning repositories for branch: {self.base_branch}")
try:
# Get build_tools origin and construct branding URL from git host base
build_tools_origin = base.git_get_origin()
# Extract git host base (everything up to the host)
# For https://github.com/ORG/build_tools.git -> https://github.com/
# For git@github.com:ORG/build_tools.git -> git@github.com:
if '://' in build_tools_origin: # HTTPS
host_base = build_tools_origin.split('/', 3)[0] + '//' + build_tools_origin.split('/', 3)[2] + '/'
else: # SSH
host_base = build_tools_origin.split(':', 1)[0] + ':'
branding_url = host_base + self.branding_url
logger.info(f"Build tools origin: {build_tools_origin}")
logger.info(f"Git host base: {host_base}")
logger.info(f"Using branding URL: {branding_url}")
# Check platform and dependencies like in release.py
platform = base.host_platform()
if platform == "windows":
dependence.check_pythonPath()
dependence.check_gitPath()
# Run configure.py like in release.py
configure_args = [
'configure.py',
'--branding', self.branding,
'--branding-url', branding_url,
'--branch', self.base_branch,
'--module', self.modules,
'--update', '1',
'--clean', '0'
]
base.cmd_in_dir('../../', 'python', configure_args)
# Parse configuration like in release.py
config.parse()
# Update build_tools repository
base.git_update('build_tools')
# Update branding repository
base.git_update(self.branding)
# Correct defaults (the branding repo is already updated)
config.parse_defaults()
logger.info("Successfully configured")
return True
except Exception as e:
logger.error(f"Failed to configure and clone: {e}")
return False
def get_configured_repositories(self) -> Dict:
"""Get repositories using existing base.get_repositories() pattern from release.py."""
repositories = base.get_repositories()
repositories['core-ext'] = [True, False]
repositories['build_tools'] = [True, False]
repositories[self.branding] = [True, False]
return repositories
def _iterate_repositories(self, operation_func, operation_name: str) -> bool:
"""
Iterate over all repositories and apply the given operation function.
Args:
operation_func: Function to apply to each repository (takes repo_name and repo_path)
operation_name: Name of the operation for logging
Returns:
bool: True if at least one operation succeeded, False otherwise
"""
repositories = self.get_configured_repositories()
success_count = 0
total_count = len(repositories)
for repo_name in repositories:
current_dir = repositories[repo_name][1]
repo_path = f"../../../{repo_name}" if current_dir == False else current_dir
if base.is_dir(repo_path):
if operation_func(repo_name, repo_path):
success_count += 1
else:
logger.warning(f"✗ Failed to {operation_name} in {repo_name}")
else:
logger.warning(f"Repository {repo_name} not found at {repo_path}")
logger.info(f"{operation_name.capitalize()} completed in {success_count}/{total_count} repositories")
return success_count > 0
def delete_branch(self, branch_name: str, repo_dir: str = None, force: bool = False) -> bool:
"""
Delete a branch using base.cmd_in_dir.
Args:
branch_name: Name of the branch to delete
repo_dir: Repository directory (optional, uses current if not specified)
force: Whether to force delete the branch (default: False)
Returns:
bool: True if successful, False otherwise
"""
work_dir = repo_dir or self.work_dir
logger.info(f"Deleting branch '{branch_name}' in {work_dir}")
try:
# Switch to base branch first to avoid deleting current branch
base.cmd_in_dir(work_dir, "git", ["checkout", self.base_branch], True)
# Delete local branch
delete_flag = "-D" if force else "-d"
base.cmd_in_dir(work_dir, "git", ["branch", delete_flag, branch_name], True)
logger.info(f"Successfully deleted local branch: {branch_name}")
# Delete remote branch
try:
base.cmd_in_dir(work_dir, "git", ["push", "origin", "--delete", branch_name], True)
logger.info(f"Successfully deleted remote branch: {branch_name}")
except SystemExit:
logger.warning(f"Failed to delete remote branch: {branch_name} (may not exist)")
return True
except SystemExit:
logger.error(f"Failed to delete branch: {branch_name}")
return False
def create_branches(self) -> bool:
"""
Create a branch with the given name in all repositories.
Returns:
bool: True if successful, False otherwise
"""
logger.info(f"Creating branch '{self.branch_name}' in all repositories")
def create_and_push_branch(repo_name: str, repo_path: str) -> bool:
"""Create and push branch for a single repository."""
if self.create_branch(self.branch_name, repo_path):
logger.info(f"✓ Created branch '{self.branch_name}' in {repo_name}")
# Push the created branch
if self.push_branch(self.branch_name, repo_path):
logger.info(f"✓ Pushed branch '{self.branch_name}' in {repo_name}")
return True
else:
logger.warning(f"✗ Failed to push branch '{self.branch_name}' in {repo_name}")
return False
else:
logger.warning(f"✗ Failed to create branch '{self.branch_name}' in {repo_name}")
return False
try:
return self._iterate_repositories(create_and_push_branch, f"create and push branch '{self.branch_name}'")
except Exception as e:
logger.error(f"Failed to create branch in all repositories: {e}")
return False
def remove_branches(self, force: bool = False) -> bool:
"""
Remove a branch with the given name from all repositories.
Args:
force: Whether to force delete the branch (default: False)
Returns:
bool: True if successful, False otherwise
"""
logger.info(f"Removing branch '{self.branch_name}' from all repositories")
def delete_branch_operation(repo_name: str, repo_path: str) -> bool:
"""Delete branch for a single repository."""
if self.delete_branch(self.branch_name, repo_path, force):
logger.info(f"✓ Removed branch '{self.branch_name}' from {repo_name}")
return True
else:
logger.warning(f"✗ Failed to remove branch '{self.branch_name}' from {repo_name}")
return False
try:
return self._iterate_repositories(delete_branch_operation, f"remove branch '{self.branch_name}'")
except Exception as e:
logger.error(f"Failed to remove branch from all repositories: {e}")
return False
def main():
"""Main function to handle command line arguments."""
parser = argparse.ArgumentParser(description='Git Operations Tool - Create and Remove Branches')
subparsers = parser.add_subparsers(dest='command', help='Available commands')
# Create branch command (configure, clone and create branch in all repositories)
branch_parser = subparsers.add_parser('create', help='Configure, clone and create branch in all repositories')
branch_parser.add_argument('branch_name', help='Name of the branch to create')
branch_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)')
branch_parser.add_argument('--branding', default='onlyoffice', help='Branding name')
branch_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)')
branch_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include')
# Remove branch command (configure, clone and remove branch from all repositories)
remove_parser = subparsers.add_parser('remove', help='Configure, clone and remove branch from all repositories')
remove_parser.add_argument('branch_name', help='Name of the branch to remove')
remove_parser.add_argument('--base-branch', default='develop', help='Base branch to work from (default: develop)')
remove_parser.add_argument('--branding', default='onlyoffice', help='Branding name')
remove_parser.add_argument('--branding-url', default='ONLYOFFICE/onlyoffice.git', help='Relative path from git host base (default: ONLYOFFICE/onlyoffice.git)')
remove_parser.add_argument('--modules', default='core desktop builder server mobile', help='Modules to include')
remove_parser.add_argument('--force', action='store_true', help='Force delete the branch (equivalent to git branch -D)')
args = parser.parse_args()
if not args.command:
parser.print_help()
return
git_ops = GitOperations(args.branding, args.base_branch, args.branding_url, args.branch_name, args.modules)
if args.command == 'create':
success = git_ops.create_branches()
sys.exit(0 if success else 1)
elif args.command == 'remove':
success = git_ops.remove_branches(args.force)
sys.exit(0 if success else 1)
if __name__ == '__main__':
main()

View File

@ -6,7 +6,6 @@ import base
import shutil
import optparse
import dependence
import config
arguments = sys.argv[1:]
@ -18,10 +17,6 @@ parser.add_option("--remove-path", action="append", type="string", dest="remove-
(options, args) = parser.parse_args(arguments)
configOptions = vars(options)
# parse configuration
config.parse()
config.parse_defaults()
for item in configOptions["uninstall"]:
dependence.uninstallProgram(item)
for item in configOptions["remove-path"]:

View File

@ -3,30 +3,25 @@
import sys
sys.path.append('../')
import argparse
import optparse
import config
import base
import os
parser = argparse.ArgumentParser(description="Print repositories list.")
parser.add_argument('-P', '--platform', type=str, dest='platform',
action='store', default="native", help="Defines platform")
parser.add_argument('-M', '--module', type=str, dest='module',
action='store', default="core desktop builder server",
help="Defines modules")
parser.add_argument('-B', '--branding', type=str, dest='branding',
action='store', help="Defines branding path")
args = parser.parse_args()
arguments = sys.argv[1:]
config_args = [
'configure.py',
'--platform', args.platform,
'--module', args.module
]
if args.branding != None:
config_args += ['--branding', args.branding]
parser = optparse.OptionParser()
parser.add_option("--module", action="store", type="string", dest="module", default="core desktop builder server", help="defines modules")
parser.add_option("--platform", action="store", type="string", dest="platform", default="native", help="defines platform")
parser.add_option("--branding", action="store", type="string", dest="branding", default="onlyoffice", help="provides branding path")
base.cmd_in_dir('../../', 'python', config_args)
(options, args) = parser.parse_args(arguments)
configOptions = vars(options)
base.cmd_in_dir('../../', 'python',['configure.py',
'--module', configOptions["module"],
'--platform', configOptions["platform"],
'--branding', configOptions["branding"]])
# parse configuration
config.parse()
@ -34,6 +29,9 @@ config.parse_defaults()
repositories = base.get_repositories()
# Add other plugins
repositories.update(base.get_plugins('autocomplete, easybib, wordpress'))
# Add other repositories
if config.check_option("module", "builder"):
repositories['document-builder-package'] = [False, False]

View File

@ -39,6 +39,8 @@ config.parse_defaults()
repositories = base.get_repositories()
# Add other plugins
repositories.update(base.get_plugins('autocomplete, easybib, glavred, wordpress'))
# Add other repositories
repositories['core-ext'] = [True, False]

View File

@ -6,16 +6,6 @@ import os
import base
import dependence
import traceback
import develop
# if (sys.version_info[0] >= 3):
# unicode = str
# host_platform = base.host_platform()
# if (host_platform == 'windows'):
# import libwindows
base_dir = base.get_script_dir(__file__)
def install_module(path):
base.print_info('Install: ' + path)
@ -28,18 +18,11 @@ def find_rabbitmqctl(base_path):
return base.find_file(os.path.join(base_path, 'RabbitMQ Server'), 'rabbitmqctl.bat')
def restart_win_rabbit():
# todo maybe restarting is not relevant after many years and versions?
base.print_info('restart RabbitMQ node to prevent "Erl.exe high CPU usage every Monday morning on Windows" https://groups.google.com/forum/#!topic/rabbitmq-users/myl74gsYyYg')
rabbitmqctl = find_rabbitmqctl(os.environ['PROGRAMW6432']) or find_rabbitmqctl(os.environ['ProgramFiles(x86)'])
if rabbitmqctl is not None:
try:
# code = libwindows.sudo(unicode(sys.executable), ['net', 'stop', 'rabbitmq'])
# code = libwindows.sudo(unicode(sys.executable), ['net', 'start', 'rabbitmq'])
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
except SystemExit:
base.print_error('Perhaps Erlang cookies are different: Replace %userprofile%/.erlang.cookie with %WINDIR%/System32/config/systemprofile/.erlang.cookie')
raise
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['stop_app'])
base.cmd_in_dir(base.get_script_dir(rabbitmqctl), 'rabbitmqctl.bat', ['start_app'])
else:
base.print_info('Missing rabbitmqctl.bat')
@ -58,98 +41,56 @@ def start_linux_services():
os.system('sudo service rabbitmq-server restart')
def run_integration_example():
if base.is_exist(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs'):
base.cmd_in_dir(base_dir + '/../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
base.cmd_in_dir('../../../document-server-integration/web/documentserver-example/nodejs', 'python', ['run-develop.py'])
def start_linux_services():
base.print_info('Restart MySQL Server')
def update_config(args):
platform = base.host_platform()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
if ("linux" == platform):
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
def make_start():
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
def make_configure(args):
base.print_info('Build modules')
update_config(args)
base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py'])
def make_install():
platform = base.host_platform()
run_integration_example()
base.create_dir(base_dir + '/../../../server/App_Data')
install_module(base_dir + '/../../../server/DocService')
install_module(base_dir + '/../../../server/Common')
install_module(base_dir + '/../../../server/FileConverter')
def make_run():
platform = base.host_platform()
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module(base_dir + '/../../../server/DocService', ['sources/server.js'])
#run_module(base_dir + '/../../../server/DocService', ['sources/gc.js'])
run_module(base_dir + '/../../../server/FileConverter', ['sources/convertermaster.js'])
#run_module(base_dir + '/../../../server/SpellChecker', ['sources/server.js'])
def run_docker_server(args = []):
try:
make_start()
develop.build_docker_server()
make_install()
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def run_docker_sdk_web_apps(dir):
try:
develop.build_docker_sdk_web_apps(dir)
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)
except KeyboardInterrupt:
pass
except:
input("Unexpected error. " + traceback.format_exc() + "Press Enter to continue...")
def make(args = []):
try:
make_start()
make_configure(args)
make_install()
make_run()
base.configure_common_apps()
platform = base.host_platform()
if ("windows" == platform):
dependence.check_pythonPath()
dependence.check_gitPath()
restart_win_rabbit()
elif ("mac" == platform):
start_mac_services()
elif ("linux" == platform):
start_linux_services()
branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout']
base.print_info('Build modules')
if ("linux" == platform):
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args)
else:
base.cmd_in_dir('../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args)
base.cmd_in_dir('../../', 'python', ['make.py'])
run_integration_example()
base.create_dir('../../../server/App_Data')
install_module('../../../server/DocService')
install_module('../../../server/Common')
install_module('../../../server/FileConverter')
base.set_env('NODE_ENV', 'development-' + platform)
base.set_env('NODE_CONFIG_DIR', '../Common/config')
if ("mac" == platform):
base.set_env('DYLD_LIBRARY_PATH', '../FileConverter/bin/')
elif ("linux" == platform):
base.set_env('LD_LIBRARY_PATH', '../FileConverter/bin/')
run_module('../../../server/DocService', ['sources/server.js'])
# run_module('../../../server/DocService', ['sources/gc.js'])
run_module('../../../server/FileConverter', ['sources/convertermaster.js'])
# run_module('../../../server/SpellChecker', ['sources/server.js'])
except SystemExit:
input("Ignoring SystemExit. Press Enter to continue...")
exit(0)

View File

@ -1,162 +0,0 @@
# license_checker
## Overview
**license_checker** allow you to automatically check
licenses inside specified code files.
## How to use
### Running
**Note**: Pyhton 3.9 and above required
(otherwise `TypeError: 'type' object is not subscriptable`)
* Linux
```bash
python3 license_checker.py
```
* Windows
```bash
python license_checker.py
```
## How to configure
The checker settings are specified in the `config.json`.
The path to the license template is indicated there.
### How to specify a license template
The license template is a plain text
file where the license text is indicated
as you would like to see the license at
the beginning of the file.
### How to configure `config.json`
#### Сonfig parameters
* `basePath` specifies which folder the
paths will be relative to.
**For example:**
```json
"basePath": "../../../"
```
* `reportFolder` specifies in which folder to
save text files with reports.
**For example:**
```json
"reportFolder": "build_tools/scripts/license_checker/reports"
```
* `printChecking` specifies whether to output
information about which file is
being checked to the console.
**For example:**
```json
"printChecking": false
```
* `printReports` specifies whether to output
reports to the console.
**For example:**
```json
"printReports": false
```
* `fix` specifies which categories of reports
should be repaired automatically.
Possible array values:
`"OUTDATED"`,
`"NO_LICENSE"`,
`"INVALID_LICENSE"`,
`"LEN_MISMATCH"`.
**For example:**
```json
"fix": ["OUTDATED", "NO_LICENSE"],
```
Automatically repair files where the license is outdated or not found.
* `configs` license check and repair configurations.
* `dir` folder to check.
**For example:**
```json
"dir": "sdkjs"
```
* `fileExtensions` file extensions to check.
**For example:**
```json
"fileExtensions": [".js"]
```
* `licensePath` specifies the path to the license template.
**For example:**
```json
"licensePath": "header.license"
```
* `ignoreListDir` folder paths to ignore.
**For example:**
```json
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
```
* `ignoreListDirName` folder names to ignore.
**For example:**
```json
"ignoreListDirName": [
"node_modules",
"vendor"
]
```
* `ignoreListFile` file paths to ignore.
**For example:**
```json
"ignoreListFile": [
"sdkjs/develop/awesomeFileToIgnore.js",
]
```
* `allowListFile` file paths to allow. It is needed if you ignore the directory, but there is a file in it that needs to be checked.
**For example:**
```json
"ignoreListDir": [
"sdkjs/develop"
],
"allowListFile": [
"sdkjs/develop/awesomeFileToAllow.js",
]
```
Any number of configurations can be
specified, they can overlap
if we need to check
files in the same folder in different ways.

View File

@ -1,208 +0,0 @@
{
"basePath": "../../../",
"reportFolder": "build_tools/scripts/license_checker/reports",
"printChecking": false,
"printReports": false,
"fix": ["OUTDATED"],
"configs": [
{
"dir": "core",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".cs", ".js", ".m", ".mm", ".license"],
"licensePath": "header.license",
"ignoreListDir": [
"core/build",
"core/Common/cfcpp/test",
"core/Common/js",
"core/DesktopEditor/agg-2.4",
"core/DesktopEditor/cximage",
"core/DesktopEditor/freetype_names/freetype-2.5.3",
"core/DesktopEditor/freetype-2.5.2",
"core/DesktopEditor/freetype-2.10.4",
"core/DesktopEditor/raster/JBig2",
"core/DesktopEditor/raster/Jp2",
"core/DesktopEditor/xml/libxml2",
"core/DesktopEditor/xmlsec",
"core/DjVuFile/libdjvu",
"core/DjVuFile/wasm",
"core/EpubFile",
"core/OOXML/PPTXFormat/Limit/pri",
"core/Fb2File",
"core/HtmlFile2",
"core/Apple",
"core/HwpFile",
"core/OdfFile/Common/utf8cpp",
"core/OfficeUtils/js/emsdk",
"core/OfficeUtils/src/zlib-1.2.11",
"core/PdfFile/lib",
"core/UnicodeConverter/icubuilds-mac",
"core/UnicodeConverter/icubuilds-win32"
],
"ignoreListDirName": [
"node_modules",
"vendor",
"3dParty"
],
"ignoreListFile": [
"core/Test/CoAuthoring/settings.js",
"core/OdfFile/Projects/Linux/precompiled.h",
"core/MsBinaryFile/Projects/XlsFormatLib/Linux/precompiled.h"
],
"allowListFile": [
"core/DesktopEditor/freetype_names/FontMaps/FontMaps.cpp",
"core/Common/3dParty/openssl/test/main.cpp ",
"core/Common/3dParty/openssl/common/common_openssl.h",
"core/Common/3dParty/openssl/common/common_openssl.cpp"
]
},
{
"dir": "core-ext",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDir": [
"core-ext/AutoTester",
"core-ext/cell_android",
"core-ext/cell_android",
"core-ext/desktop-sdk-private",
"core-ext/docbuilder",
"core-ext/Registration",
"core-ext/slide_android",
"core-ext/test",
"core-ext/word_android",
"core-ext/word_ios"
],
"ignoreListFile": [
"core-ext/native_base/json.hpp",
"core-ext/native_base/android_base/libeditors/src/main/cpp/workaround/swab/swab.h"
]
},
{
"dir": "sdkjs",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"sdkjs/deploy",
"sdkjs/develop",
"sdkjs/configs"
],
"ignoreListDirName": [
"node_modules",
"vendor"
],
"ignoreListFile": [
"sdkjs/common/externs/jquery-3.2.js",
"sdkjs/common/externs/socket.io.js",
"sdkjs/common/Native/jquery_native.js",
"sdkjs/common/AllFonts.js",
"sdkjs/slide/themes/themes.js"
]
},
{
"dir": "sdkjs-forms",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "sdkjs-ooxml",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "web-apps",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor",
"search"
],
"ignoreListDir": [
"web-apps/apps/common/mobile",
"web-apps/apps/common/main/lib/mods",
"web-apps/apps/documenteditor/mobile",
"web-apps/apps/spreadsheeteditor/mobile",
"web-apps/apps/presentationeditor/mobile",
"web-apps/build/plugins/grunt-inline"
],
"ignoreListFile": [
"web-apps/apps/api/documents/api.js",
"web-apps/apps/common/main/lib/core/application.js",
"web-apps/apps/common/main/lib/core/keymaster.js",
"web-apps/apps/presentationeditor/embed/resources/less/watch.js"
]
},
{
"dir": "web-apps-mobile",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules",
"vendor"
]
},
{
"dir": "server",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDir": [
"server/FileConverter/bin"
],
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-lockstorage",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "server-license-key",
"fileExtensions": [".js"],
"licensePath": "header.license",
"ignoreListDirName": [
"node_modules"
]
},
{
"dir": "editors-ios",
"fileExtensions": [".h", ".c", ".hpp", ".cpp", ".hxx", ".cxx", ".m", ".mm"],
"licensePath": "header.license",
"ignoreListDirName": [
"vendor",
"Vendor",
"3dParty"
],
"allowListFile": [
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableArray.m",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.h",
"editors-ios/Vendor/ThreadSafeMutable/ThreadSafeMutableDictionary.m"
]
},
{
"dir": "editors-webview-ios",
"fileExtensions": [".swift", ".xcconfig"],
"licensePath": "header.license",
}
]
}

View File

@ -1,31 +0,0 @@
/*
* (c) Copyright Ascensio System SIA 2010-2025
*
* This program is a free software product. You can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License (AGPL)
* version 3 as published by the Free Software Foundation. In accordance with
* Section 7(a) of the GNU AGPL its Section 15 shall be amended to the effect
* that Ascensio System SIA expressly excludes the warranty of non-infringement
* of any third-party rights.
*
* This program is distributed WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. For
* details, see the GNU AGPL at: http://www.gnu.org/licenses/agpl-3.0.html
*
* You can contact Ascensio System SIA at 20A-6 Ernesta Birznieka-Upish
* street, Riga, Latvia, EU, LV-1050.
*
* The interactive user interfaces in modified source and object code versions
* of the Program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU AGPL version 3.
*
* Pursuant to Section 7(b) of the License you must retain the original Product
* logo when distributing the program. Pursuant to Section 7(e) we decline to
* grant you any rights under trademark law for use of our trademarks.
*
* All the Product's GUI elements, including illustrations and icon sets, as
* well as technical writing content are licensed under the terms of the
* Creative Commons Attribution-ShareAlike 4.0 International. See the License
* terms at http://creativecommons.org/licenses/by-sa/4.0/legalcode
*
*/

View File

@ -1,339 +0,0 @@
import os
import re
import enum
import json
import codecs
CONFIG_PATH = 'config.json'
class ErrorType(enum.Enum):
INVALID_LICENSE = 1
NO_LICENSE = 2
OUTDATED = 3
LEN_MISMATCH = 4
FIX_TYPES = {
'OUTDATED': ErrorType.OUTDATED,
'NO_LICENSE': ErrorType.NO_LICENSE,
'INVALID_LICENSE': ErrorType.INVALID_LICENSE,
'LEN_MISMATCH': ErrorType.LEN_MISMATCH
}
class Config(object):
"""
License checker configuration.
Attributes:
dir: Directory to check.
fileExtensions: file extensions to check.
ignoreListDir: Ignored folder paths.
ignoreListDirName: Ignored folder names.
ignoreListFile: Ignored file paths.
allowListFile: allow file paths.
"""
def __init__(self,
dir: str,
fileExtensions: list[str],
licensePath: str = 'header.license',
allowListFile: list[str] = [],
ignoreListDir: list[str] = [],
ignoreListDirName: list[str] = [],
ignoreListFile: list[str] = []) -> None:
self._dir = dir
self._fileExtensions = fileExtensions
self._allowListFile = allowListFile
self._ignoreListDir = ignoreListDir
self._ignoreListDirName = ignoreListDirName
self._ignoreListFile = ignoreListFile
"""Read license template."""
with open(licensePath, 'r', encoding="utf8") as file:
lines = file.readlines()
if not lines:
raise Exception(f'Error getting license template. Cannot read {licensePath} file. Is not it empty?')
non_empty_lines = [s for s in lines if not s.isspace()]
self._startMultiComm = non_empty_lines[0]
self._endMultiComm = non_empty_lines[-1]
self._license_lines = lines
def getDir(self) -> str:
return self._dir
def getFileExtensions(self) -> list[str]:
return self._fileExtensions
def getStartMultiComm(self) -> str:
return self._startMultiComm
def getEndMultiComm(self) -> str:
return self._endMultiComm
def getLicense(self) -> list[str]:
return self._license_lines
def getAllowListFile(self) -> list[str]:
return self._allowListFile
def getIgnoreListDir(self) -> list[str]:
return self._ignoreListDir
def getIgnoreListDirName(self) -> list[str]:
return self._ignoreListDirName
def getIgnoreListFile(self) -> list[str]:
return self._ignoreListFile
with open(CONFIG_PATH, 'r') as j:
_json: dict = json.load(j)
BASE_PATH: str = _json.get('basePath') or '../../../'
REPORT_FOLDER: str = _json.get('reportFolder') or 'build_tools/scripts/license_checker/reports'
if (_json.get('fix')):
try:
FIX: list[ErrorType] = list(map(lambda x: FIX_TYPES[x], _json.get('fix')))
except KeyError:
raise Exception(f'KeyError. "fix" cannot process value. It must be an array of strings. Check {CONFIG_PATH}. Possible array values: "OUTDATED", "NO_LICENSE", "INVALID_LICENSE", "LEN_MISMATCH"')
else:
FIX = False
PRINT_CHECKING: bool = _json.get('printChecking')
PRINT_REPORTS: bool = _json.get('printReports')
CONFIGS: list[Config] = []
for i in _json.get('configs'):
CONFIGS.append(Config(**i))
os.chdir(BASE_PATH)
class Error(object):
def __init__(self, errorType: ErrorType) -> None:
self._errorType = errorType
self._errorMessages = {
ErrorType.INVALID_LICENSE: 'Detected license is invalid',
ErrorType.NO_LICENSE: 'The license was not found',
ErrorType.OUTDATED: 'Detected license is outdated',
ErrorType.LEN_MISMATCH: 'Detected license length does not match pattern'
}
def getErrorType(self) -> ErrorType:
return self._errorType
def getErrorMessage(self) -> str:
return self._errorMessages.get(self._errorType)
class Report(object):
def __init__(self, pathToFile: str, error: Error, message:str = '') -> None:
self._pathToFile = pathToFile
self._error = error
self._message = message
def getPathToFile(self) -> str:
return self._pathToFile
def getError(self) -> Error:
return self._error
def getMessage(self) -> str:
return self._message
def report(self) -> str:
return f'{self.getPathToFile()}: {self.getError().getErrorMessage()}. {self.getMessage()}.'
class Checker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._reports: list[Report] = []
def getReports(self):
return self._reports
def _checkLine(self, line: str, prefix: str) -> bool:
"""Checks if a line has a prefix."""
"""Trim to catch invalid license without leading spaces"""
prefix = prefix.lstrip()
if (re.search(re.escape(prefix), line)):
return True
else:
return False
def findLicense(self, lines: list[str]) -> list[str]:
"""Looks for consecutive comments in a list of strings."""
result = []
isStarted = False
for line in lines:
if line == '\n': continue
if (self._checkLine(line=line, prefix=self._config.getStartMultiComm())):
result.append(line)
isStarted = True
elif(self._checkLine(line=line, prefix=self._config.getEndMultiComm())):
result.append(line)
break
elif (isStarted):
result.append(line)
else:
break
return result
def _checkLicense(self, test: list[str], pathToFile: str) -> Report:
license = self._config.getLicense()
if len(license) != len(test):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.LEN_MISMATCH),
message=f'Found {len(test)} lines, expected {len(license)}')
invalidLinesCount = 0
lastWrongLine = 0
for i in range(len(license)):
if (license[i] != test[i]):
invalidLinesCount += 1
lastWrongLine = i
if (invalidLinesCount == 1):
r = r'\d\d\d\d'
testDate = re.findall(r, test[lastWrongLine])
licenseDate = re.findall(r, license[lastWrongLine])
if not (testDate and licenseDate):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Something wrong...')
testLastYear = int(testDate[-1])
licenseLastYear = int(licenseDate[-1])
if (testLastYear < licenseLastYear):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.OUTDATED),
message=f'Found date {testLastYear}, expected {licenseLastYear}')
else:
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f"Found something similar to the date: {testLastYear}, but it's not correct. Expected: {licenseLastYear}")
elif (invalidLinesCount > 0):
return Report(pathToFile=pathToFile,
error=Error(errorType=ErrorType.INVALID_LICENSE),
message=f'Found {invalidLinesCount} wrong lines out of {len(license)}')
def checkFile(self, pathToFile: str) -> None:
"""Checks a file for a valid license."""
with open(pathToFile, 'r', encoding="utf-8-sig") as file:
test = self.findLicense(lines=file.readlines())
if test:
result = self._checkLicense(test=test, pathToFile=pathToFile)
if result:
self._reports.append(result)
else:
self._reports.append(Report(pathToFile=pathToFile, error=Error(errorType=ErrorType.NO_LICENSE)))
return
class Walker(object):
def __init__(self, config: Config) -> None:
self._config = config
self._checker = Checker(config=self._config)
def getChecker(self):
return self._checker
def getConfig(self):
return self._config
def _getFiles(self) -> list[str]:
result = []
for address, dirs, files in os.walk(self._config.getDir()):
for i in files:
if (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getAllowListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
else:
for i in self._config.getIgnoreListDirName():
if(re.search(re.escape(i), address)):
break
else:
for i in self._config.getIgnoreListDir():
if(re.search(re.escape(os.path.normpath(i)), address)):
break
else:
for i in files:
if not (os.path.join(address, i) in list(map(lambda x: os.path.normpath(x), self._config.getIgnoreListFile()))):
filename, file_extension = os.path.splitext(i)
if file_extension in self._config.getFileExtensions():
result.append(os.path.join(address, i))
return result
def checkFiles(self) -> list[Report]:
files = self._getFiles()
for file in files:
if (PRINT_CHECKING):
print(f'Checking {file}...')
# self._checker.checkFile(file)
try:
self._checker.checkFile(file)
except Exception as e:
print(file)
print(e)
return self._checker.getReports()
class Fixer(object):
def __init__(self, walker: Walker) -> int:
self._walker = walker
self._checker = self._walker.getChecker()
self._config = self._walker.getConfig()
def fix(self):
count = 0
for report in self._checker.getReports():
if ((not FIX and report.getError().getErrorType() == ErrorType.NO_LICENSE) or (report.getError().getErrorType() == ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._addLicense(report.getPathToFile())
count += 1
elif ((not FIX and report.getError().getErrorType() != ErrorType.NO_LICENSE) or (report.getError().getErrorType() != ErrorType.NO_LICENSE and report.getError().getErrorType() in FIX)):
self._fixLicense(report.getPathToFile())
count += 1
return count
def _addLicense(self, pathToFile: str):
buffer = []
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
with open(pathToFile, 'w', encoding="utf8") as file:
license = self._config.getLicense()
file.writelines(license)
file.write('\n')
file.writelines(buffer)
return
def _fixLicense(self, pathToFile: str):
buffer = []
writeEncoding = "utf8"
with open(pathToFile, 'r', encoding="utf8") as file:
buffer = file.readlines()
if buffer and buffer[0].startswith(codecs.decode(codecs.BOM_UTF8)):
writeEncoding = "utf-8-sig"
oldLicense = self._checker.findLicense(buffer)
for i in oldLicense:
buffer.remove(i)
with open(pathToFile, 'w', encoding=writeEncoding) as file:
license = self._config.getLicense()
file.writelines(license)
file.writelines(buffer)
return
walkers: list[Walker] = []
reports: list[Report] = []
def fix(walkers):
count = 0
if FIX:
print(f'Fixing selected files...')
else:
print(f'Fixing all {len(reports)} files...')
for walker in walkers:
fixer = Fixer(walker=walker)
count += fixer.fix()
print(f'Fixed {count} files.')
def writeReports(reports: list[Report]) -> None:
files: dict[str, list[Report]] = dict()
for i in ErrorType:
files[i.name] = []
for i in reports:
files[i.getError().getErrorType().name].append(i)
for i in ErrorType:
with open(f'{REPORT_FOLDER}/{i.name}.txt', 'w', encoding="utf8") as f:
f.writelines(map(lambda x: "".join([x.report(), '\n']), files.get(i.name)))
for config in CONFIGS:
walkers.append(Walker(config=config))
print('Checking files...')
for walker in walkers:
reports = reports + walker.checkFiles()
if reports:
if not os.path.exists(REPORT_FOLDER):
os.mkdir(REPORT_FOLDER)
if PRINT_REPORTS:
print('\n'.join(map(lambda report: report.report(), reports)))
print(f'{len(reports)} invalid licenses were found.')
print(f'Saving reports in {REPORT_FOLDER}')
writeReports(reports=reports)
if FIX:
fix(walkers=walkers)
# else:
# choice = str(input(f'Fix it automatically? [Y/N] ')).lower()
# if choice == 'y':
# fix(walkers=walkers)
else:
print('All licenses are ok.')
# os.system('pause')

View File

@ -1,32 +0,0 @@
#!/usr/bin/env python
import sys
sys.path.append('../../build_tools/scripts')
import base
import os
args = sys.argv[1:]
if (1 > len(args)):
print("Please use min.py PATH_TO_SCRIPT.js")
exit(0)
script_path = args[0]
script_path = os.path.abspath(script_path)
script_dir = os.path.dirname(script_path)
script_name = os.path.splitext(os.path.basename(script_path))[0]
script_path_min = os.path.join(script_dir, script_name + ".min.js")
#compilation_level = "WHITESPACE_ONLY"
compilation_level = "SIMPLE_OPTIMIZATIONS"
base.cmd("java", ["-jar", "../../sdkjs/build/node_modules/google-closure-compiler-java/compiler.jar",
"--compilation_level", compilation_level,
"--js_output_file", script_path_min,
"--js", script_path])
dev_content = base.readFile(script_path)
license = dev_content[0:dev_content.find("*/")+2]
min_content = base.readFile(script_path_min)
base.delete_file(script_path_min)
base.writeFile(script_path_min, license + "\n\n" + min_content)

12
scripts/package.py Normal file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env python
import package_desktop
import package_server
import package_builder
def make(product):
if product == 'desktop': package_desktop.make()
elif product == 'server': package_server.make()
elif product == 'builder': package_builder.make()
else: exit(1)
return

View File

@ -1,71 +1,44 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import package_utils as utils
from package_utils import *
onlyoffice = True
company_name = "ONLYOFFICE"
company_name = 'ONLYOFFICE'
company_name_l = company_name.lower()
publisher_name = "Ascensio System SIA"
cert_name = "Ascensio System SIA"
publisher_name = 'Ascensio System SIA'
cert_name = 'Ascensio System SIA'
s3_bucket = "repo-doc-onlyoffice-com"
s3_region = "eu-west-1"
s3_base_url = "https://s3.eu-west-1.amazonaws.com/repo-doc-onlyoffice-com"
if product == 'desktop':
if utils.is_windows():
desktop_product_name = "Desktop Editors"
desktop_product_name_s = desktop_product_name.replace(" ","")
desktop_package_name = company_name + "-" + desktop_product_name_s
desktop_changes_dir = "desktop-apps/win-linux/package/windows/update/changes"
if utils.is_macos():
desktop_package_name = "ONLYOFFICE"
desktop_build_dir = "desktop-apps/macos"
desktop_branding_dir = "desktop-apps/macos"
desktop_updates_dir = "build/update"
desktop_changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
builder_product_name = "Document Builder"
if utils.is_linux():
desktop_make_targets = [
{
"make": "tar",
"src": "tar/*.tar*",
"dst": "desktop/linux/generic/"
},
{
"make": "deb",
"src": "deb/*.deb",
"dst": "desktop/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/build/RPMS/*/*.rpm",
"dst": "desktop/linux/rhel/"
},
{
"make": "rpm-suse",
"src": "rpm-suse/build/RPMS/*/*.rpm",
"dst": "desktop/linux/suse/"
if system == 'windows':
build_dir = get_path("desktop-apps/win-linux/package/windows")
# branding_dir = get_path(branding, build_dir)
product_name = 'Desktop Editors'
product_name_s = product_name.replace(' ','')
package_name = company_name + '_' + product_name_s
vcredist_list = ['2022']
update_changes_list = {
'en': "changes",
'ru': "changes_ru"
}
]
server_make_targets = [
{
"make": "deb",
"src": "deb/*.deb",
"dst": "server/linux/debian/"
},
{
"make": "rpm",
"src": "rpm/builddir/RPMS/*/*.rpm",
"dst": "server/linux/rhel/"
},
{
"make": "tar",
"src": "*.tar*",
"dst": "server/linux/snap/"
elif system == 'darwin':
build_dir = "desktop-apps/macos"
branding_build_dir = "desktop-apps/macos"
package_name = company_name
updates_dir = "build/update"
changes_dir = "ONLYOFFICE/update/updates/ONLYOFFICE/changes"
update_changes_list = {
'en': "ReleaseNotes",
'ru': "ReleaseNotesRU"
}
]
sparkle_base_url = "https://download.onlyoffice.com/install/desktop/editors/mac"
if product == 'builder':
if system == 'windows':
build_dir = "document-builder-package"
product_name = 'Document Builder'
product_name_s = product_name.replace(' ','')
package_name = company_name + '_' + product_name_s

View File

@ -1,235 +1,101 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import package_utils as utils
import package_common as common
import package_branding as branding
from package_utils import *
from package_branding import *
def make():
utils.log_h1("BUILDER")
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
utils.log("Unsupported host OS")
return
if common.deploy:
make_archive()
if utils.is_windows():
if system == 'windows':
make_windows()
elif utils.is_macos():
make_macos_linux()
elif utils.is_linux():
make_macos_linux()
return
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
utils.log_h2("builder archive build")
utils.delete_file("builder.7z")
args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
elif system == 'linux':
if 'packages' in targets:
set_cwd(build_dir)
log("Clean")
cmd("make", ["clean"])
log("Build packages")
cmd("make", ["packages"])
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("builder archive build", ret)
utils.log_h2("builder archive deploy")
dest = "builder-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
"builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("builder archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
exit(1)
return
#
# Windows
#
def make_windows():
global package_version, arch
utils.set_cwd("document-builder-package")
global package_version, sign, machine, arch, source_dir, base_dir, \
innosetup_file, portable_zip_file, isxdl_file
base_dir = "base"
isxdl_file = "exe/scripts/isxdl/isxdl.dll"
package_version = common.version + "." + common.build
arch = {
"windows_x64": "x64",
"windows_x86": "x86"
}[common.platform]
set_cwd(get_abspath(git_dir, build_dir))
if common.clean:
utils.log_h2("builder clean")
utils.delete_dir("build")
utils.delete_dir("zip")
if 'clean' in targets:
log("\n=== Clean\n")
delete_dir(base_dir)
delete_files(isxdl_file)
delete_files("exe/*.exe")
delete_files("zip/*.zip")
if make_prepare():
make_zip()
make_wheel()
else:
utils.set_summary("builder zip build", False)
utils.set_summary("builder python wheel build", False)
package_version = version + '.' + build
sign = 'sign' in targets
utils.set_cwd(common.workspace_dir)
for target in targets:
if not (target.startswith('innosetup') or target.startswith('portable')):
continue
machine = get_platform(target)['machine']
arch = get_platform(target)['arch']
suffix = arch
source_prefix = "win_" + machine
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
log("\n=== Copy arifacts\n")
create_dir(base_dir)
copy_dir_content(source_dir, base_dir + '\\')
if target.startswith('innosetup'):
download_isxdl()
innosetup_file = "exe/%s_%s_%s.exe" % (package_name, package_version, suffix)
make_innosetup()
if target.startswith('portable'):
portable_zip_file = "zip/%s_%s_%s.zip" % (package_name, package_version, suffix)
make_win_portable()
return
def make_prepare():
args = [
"-Version", package_version,
"-Arch", arch
]
if common.sign:
args += ["-Sign"]
utils.log_h2("builder prepare")
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("builder prepare", ret)
return ret
def make_zip():
args = [
"-Version", package_version,
"-Arch", arch
]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("builder zip build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("builder zip build", ret)
if common.deploy and ret:
utils.log_h2("builder zip deploy")
ret = s3_upload(utils.glob_path("zip/*.zip"), "builder/win/generic/")
utils.set_summary("builder zip deploy", ret)
def download_isxdl():
log("\n=== Download isxdl\n")
log("--- " + isxdl_file)
if is_file(isxdl_file):
log("! file exist, skip")
return
create_dir(get_dirname(isxdl_file))
download_file(isxdl_link, isxdl_file)
return
def make_macos_linux():
utils.set_cwd("document-builder-package")
make_tar()
make_wheel()
utils.set_cwd(common.workspace_dir)
def make_innosetup():
log("\n=== Build innosetup project\n")
iscc_args = ["/DVERSION=" + package_version]
if not onlyoffice:
iscc_args.append("/DBRANDING_DIR=" + get_abspath(git_dir, branding, build_dir, "exe"))
if sign:
iscc_args.append("/DSIGN")
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
log("--- " + innosetup_file)
if is_file(innosetup_file):
log("! file exist, skip")
return
set_cwd("exe")
cmd("iscc", iscc_args + ["builder.iss"])
set_cwd("..")
return
def make_tar():
utils.log_h2("builder tar build")
make_args = ["tar"]
if common.platform == "darwin_arm64":
make_args += ["-e", "UNAME_M=arm64"]
if common.platform == "darwin_x86_64":
make_args += ["-e", "UNAME_M=x86_64"]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("builder tar build", ret)
if common.deploy:
utils.log_h2("builder tar deploy")
if utils.is_macos():
s3_dest = "builder/mac/generic/"
elif utils.is_linux():
s3_dest = "builder/linux/generic/"
ret = s3_upload(utils.glob_path("tar/*.tar.xz"), s3_dest)
utils.set_summary("builder tar deploy", ret)
return
def make_wheel():
platform_tags = {
"windows_x64": "win_amd64",
"windows_x86": "win32",
"darwin_arm64": "macosx_11_0_arm64",
"darwin_x86_64": "macosx_10_9_x86_64",
"linux_x86_64": "manylinux_2_23_x86_64",
"linux_aarch64": "manylinux_2_23_aarch64"
}
if not common.platform in platform_tags: return
utils.log_h2("builder python wheel build")
builder_dir = "build"
if utils.is_linux():
builder_dir = "build/opt/onlyoffice/documentbuilder"
utils.delete_dir("python")
utils.copy_dir("../onlyoffice/build_tools/packaging/docbuilder/resources", "python")
utils.copy_dir(builder_dir, "python/docbuilder/lib", True, True)
desktop_dir = "../desktop-apps/macos/build/ONLYOFFICE.app/Contents/Resources/converter"
if utils.is_macos() and "desktop" in common.targets and utils.is_exist(desktop_dir):
for f in utils.glob_path(desktop_dir + "/*.dylib") + [desktop_dir + "/x2t"]:
utils.copy_file(f, builder_dir + "/" + utils.get_basename(f))
old_cwd = utils.get_cwd()
utils.set_cwd("python/docbuilder")
if not utils.is_file("docbuilder.py"):
utils.copy_file("lib/docbuilder.py", "docbuilder.py")
# fix docbuilder.py
content = ""
with open("docbuilder.py", "r") as file:
content = file.read()
old_line = "builder_path = os.path.dirname(os.path.realpath(__file__))"
new_line = "builder_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"lib\")"
content = content.replace(old_line, new_line)
with open("docbuilder.py", "w") as file:
file.write(content)
# remove unnecessary files
utils.set_cwd("lib")
utils.delete_dir("include")
utils.delete_file("build.date")
utils.delete_file("docbuilder.jar")
utils.delete_file("docbuilder.py")
if utils.is_windows():
utils.delete_file("doctrenderer.lib")
utils.delete_file("docbuilder.com.dll")
utils.delete_file("docbuilder.net.dll")
utils.delete_file("docbuilder.jni.dll")
elif utils.is_macos():
if (utils.is_file("libdocbuilder.jni.dylib")):
utils.delete_file("libdocbuilder.jni.dylib")
if (utils.is_dir("docbuilder.jni.framework")):
utils.delete_file("docbuilder.jni.framework")
utils.remove_all_symlinks(".")
elif utils.is_linux():
utils.delete_file("libdocbuilder.jni.so")
utils.set_env("DOCBUILDER_VERSION", common.version + "." + common.build)
platform = "linux_64"
utils.set_cwd("../..")
plat_name = platform_tags[common.platform]
ret = utils.sh("python setup.py bdist_wheel --plat-name " + plat_name + " --python-tag py2.py3", verbose=True)
utils.set_summary("builder python wheel build", ret)
if common.deploy and ret:
utils.log_h2("builder python wheel deploy")
if utils.is_windows():
s3_dest = "builder/win/python/"
elif utils.is_macos():
s3_dest = "builder/mac/python/"
elif utils.is_linux():
s3_dest = "builder/linux/python/"
ret = s3_upload(utils.glob_path("dist/*.whl"), s3_dest)
utils.set_summary("builder python wheel deploy", ret)
utils.set_cwd(old_cwd)
def make_win_portable():
log("\n=== Build portable\n")
log("--- " + portable_zip_file)
if is_file(portable_zip_file):
log("! file exist, skip")
return
cmd("7z", ["a", "-y", portable_zip_file, get_path(base_dir, "*")])
return

View File

@ -1,17 +0,0 @@
#!/usr/bin/env python
platformPrefixes = {
"windows_x64": "win_64",
"windows_x86": "win_32",
"windows_arm64": "win_arm64",
"windows_x64_xp": "win_64_xp",
"windows_x86_xp": "win_32_xp",
"darwin_arm64": "mac_arm64",
"darwin_x86_64": "mac_64",
"darwin_x86_64_v8": "mac_64",
"linux_x86_64": "linux_64",
"linux_aarch64": "linux_arm64",
}
out_dir = "build_tools/out"
tsa_server = "http://timestamp.digicert.com"

View File

@ -1,93 +0,0 @@
#!/usr/bin/env python
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
utils.log_h1("CORE")
if not (utils.is_windows() or utils.is_macos() or utils.is_linux()):
utils.log("Unsupported host OS")
return
if common.deploy:
make_archive()
return
def make_archive():
utils.set_cwd(utils.get_path(
"build_tools/out/" + common.prefix + "/" + branding.company_name.lower()))
utils.log_h2("core archive build")
utils.delete_file("core.7z")
args = ["7z", "a", "-y", "core.7z", "./core/*"]
if utils.is_windows():
ret = utils.cmd(*args, verbose=True)
else:
ret = utils.sh(" ".join(args), verbose=True)
utils.set_summary("core archive build", ret)
utils.log_h2("core archive deploy")
dest = "core-" + common.prefix.replace("_","-") + ".7z"
dest_latest = "archive/%s/latest/%s" % (common.branch, dest)
dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest)
ret = utils.s3_upload(
"core.7z", "s3://" + branding.s3_bucket + "/" + dest_version)
utils.set_summary("core archive deploy", ret)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + dest_version)
utils.s3_copy(
"s3://" + branding.s3_bucket + "/" + dest_version,
"s3://" + branding.s3_bucket + "/" + dest_latest)
utils.log("URL: " + branding.s3_base_url + "/" + dest_latest)
utils.set_cwd(common.workspace_dir)
return
def deploy_closuremaps_sdkjs(license):
if not common.deploy: return
utils.log_h1("SDKJS CLOSURE MAPS")
maps = utils.glob_path("sdkjs/build/maps/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("sdkjs closure maps %s deploy" % license, False)
return
utils.log_h2("sdkjs closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_basename(f)
key = "closure-maps/sdkjs/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("sdkjs closure maps %s deploy" % license, ret)
return
def deploy_closuremaps_webapps(license):
if not common.deploy: return
utils.log_h1("WEB-APPS CLOSURE MAPS")
maps = utils.glob_path("web-apps/deploy/web-apps/apps/*/*/*.js.map") \
+ utils.glob_path("web-apps/deploy/web-apps/apps/*/mobile/dist/js/*.js.map")
if maps:
for m in maps: utils.log("- " + m)
else:
utils.log_err("files do not exist")
utils.set_summary("web-apps closure maps %s deploy" % license, False)
return
utils.log_h2("web-apps closure maps %s deploy" % license)
ret = True
for f in maps:
base = utils.get_relpath(f, "web-apps/deploy/web-apps/apps").replace("/", "_")
key = "closure-maps/web-apps/%s/%s/%s/%s" % (license, common.version, common.build, base)
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
ret &= upload
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("web-apps closure maps %s deploy" % license, ret)
return

View File

@ -2,172 +2,245 @@
# -*- coding: utf-8 -*-
import os
import re
import package_utils as utils
import package_common as common
import package_branding as branding
from package_utils import *
from package_branding import *
def make():
utils.log_h1("DESKTOP")
if utils.is_windows():
if system == 'windows':
make_windows()
elif utils.is_macos():
elif system == 'darwin':
make_macos()
elif utils.is_linux():
make_linux()
elif system == 'linux':
if 'packages' in targets:
set_cwd(build_dir)
log("Clean")
cmd("make", ["clean"])
log("Build packages")
cmd("make", ["packages"])
else:
utils.log("Unsupported host OS")
exit(1)
return
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
#
# Windows
#
def make_windows():
global package_name, package_version, arch, xp
utils.set_cwd("desktop-apps\\win-linux\\package\\windows")
global package_version, sign, machine, arch, xp, iscc_args, source_dir, \
innosetup_file, innosetup_update_file, advinst_file, portable_zip_file, \
portable_evb_file
package_name = branding.desktop_package_name
package_version = common.version + "." + common.build
arch = {
"windows_x64": "x64",
"windows_x64_xp": "x64",
"windows_x86": "x86",
"windows_x86_xp": "x86",
"windows_arm64": "arm64"
}[common.platform]
xp = common.platform.endswith("_xp")
set_cwd(get_abspath(git_dir, build_dir))
if common.clean:
utils.log_h2("desktop clean")
utils.delete_dir("DesktopEditors-cache")
utils.delete_files("*.exe")
utils.delete_files("*.msi")
utils.delete_files("*.aic")
utils.delete_files("*.tmp")
utils.delete_files("*.zip")
utils.delete_files("data\\*.exe")
if 'clean' in targets:
log("\n=== Clean\n")
delete_dir(get_path("data/vcredist"))
delete_dir("DesktopEditors-cache")
delete_files("*.exe")
delete_files("*.msi")
delete_files("*.aic")
delete_files("*.tmp")
delete_files("*.zip")
delete_files(get_path("portable/*.exe"))
delete_files(get_path("update/*.exe"))
delete_files(get_path("update/*.xml"))
delete_files(get_path("update/*.html"))
if not xp:
make_prepare()
make_zip()
if branding.onlyoffice:
make_inno()
make_inno("standalone")
if arch != "arm64":
make_inno("update")
package_version = version + '.' + build
sign = 'sign' in targets
for target in targets:
if not (target.startswith('innosetup') or target.startswith('advinst') or
target.startswith('portable')):
continue
machine = get_platform(target)['machine']
arch = get_platform(target)['arch']
xp = get_platform(target)['xp']
suffix = arch + ("_xp" if xp else "")
source_prefix = "win_" + machine + ("_xp" if xp else "")
source_dir = get_path("%s/%s/%s/%s" % (out_dir, source_prefix, company_name_l, product_name_s))
if target.startswith('innosetup'):
for year in vcredist_list:
download_vcredist(year)
innosetup_file = "%s_%s_%s.exe" % (package_name, package_version, suffix)
make_innosetup()
if 'winsparkle-update' in targets:
innosetup_update_file = get_path("update/editors_update_%s.exe" % suffix)
make_innosetup_update()
if 'winsparkle-files' in targets:
make_winsparkle_files()
if target.startswith('advinst'):
advinst_file = "%s_%s_%s.msi" % (package_name, package_version, suffix)
make_advinst()
make_prepare("commercial")
make_zip("commercial")
make_inno("commercial")
make_advinst("commercial")
if target.startswith('portable-zip'):
portable_zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix)
make_portable_zip()
if target.startswith('portable-evb'):
portable_evb_file = get_path("portable/DesktopEditorsPortable.exe")
make_portable_evb()
return
def download_vcredist(year):
log("\n=== Download vcredist " + year + "\n")
vcredist = get_path("data/vcredist/vcredist_%s_%s.exe" % (year, arch))
log("--- " + vcredist)
if is_file(vcredist):
log("! file exist, skip")
return
create_dir(get_dirname(vcredist))
download_file(vcredist_links[year][machine], vcredist)
return
def make_innosetup():
log("\n=== Build innosetup project\n")
global iscc_args
iscc_args = [
"/Qp",
"/DsAppVersion=" + package_version,
"/DDEPLOY_PATH=" + source_dir,
"/D_ARCH=" + machine
]
if onlyoffice:
iscc_args.append("/D_ONLYOFFICE=1")
else:
make_prepare("xp")
make_zip("xp")
make_inno("xp")
utils.set_cwd(common.workspace_dir)
return
def make_prepare(edition = "opensource"):
args = [
"-Version", package_version,
"-Arch", arch,
"-Target", edition,
"-CompanyName", branding.company_name
]
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop prepare " + edition)
ret = utils.ps1("make.ps1", args, verbose=True)
utils.set_summary("desktop prepare " + edition, ret)
return
def make_zip(edition = "opensource"):
if edition == "commercial": zip_file = "%s-Enterprise-%s-%s.zip"
elif edition == "xp": zip_file = "%s-XP-%s-%s.zip"
else: zip_file = "%s-%s-%s.zip"
zip_file = zip_file % (package_name, package_version, arch)
args = [
"-Version", package_version,
"-Arch", arch,
"-Target", edition,
"-CompanyName", branding.company_name
]
# if common.sign:
# args += ["-Sign"]
utils.log_h2("desktop zip " + edition + " build")
ret = utils.ps1("make_zip.ps1", args, verbose=True)
utils.set_summary("desktop zip " + edition + " build", ret)
if common.deploy and ret:
utils.log_h2("desktop zip " + edition + " deploy")
ret = s3_upload([zip_file], "desktop/win/generic/")
utils.set_summary("desktop zip " + edition + " deploy", ret)
return
def make_inno(edition = "opensource"):
if edition == "commercial": inno_file = "%s-Enterprise-%s-%s.exe"
elif edition == "standalone": inno_file = "%s-Standalone-%s-%s.exe"
elif edition == "update": inno_file = "%s-Update-%s-%s.exe"
elif edition == "xp": inno_file = "%s-XP-%s-%s.exe"
else: inno_file = "%s-%s-%s.exe"
inno_file = inno_file % (package_name, package_version, arch)
args = [
"-Version", package_version,
"-Arch", arch,
"-Target", edition
]
if common.sign:
args += ["-Sign"]
iscc_args.append("/DsBrandingFolder=" + get_abspath(git_dir, branding_dir))
if xp:
args += ["-TimestampServer", "http://timestamp.comodoca.com/authenticode"]
utils.log_h2("desktop inno " + edition + " build")
ret = utils.ps1("make_inno.ps1", args, verbose=True)
utils.set_summary("desktop inno " + edition + " build", ret)
if common.deploy and ret:
utils.log_h2("desktop inno " + edition + " deploy")
ret = s3_upload([inno_file], "desktop/win/inno/")
utils.set_summary("desktop inno " + edition + " deploy", ret)
iscc_args.append("/D_WIN_XP=1")
if sign:
iscc_args.append("/DENABLE_SIGNING=1")
iscc_args.append("/Sbyparam=signtool.exe sign /v /n $q" + cert_name + "$q /t " + tsa_server + " $f")
log("--- " + innosetup_file)
if is_file(innosetup_file):
log("! file exist, skip")
return
cmd("iscc", iscc_args + ["common.iss"])
return
def make_advinst(edition = "opensource"):
if edition == "commercial": advinst_file = "%s-Enterprise-%s-%s.msi"
else: advinst_file = "%s-%s-%s.msi"
advinst_file = advinst_file % (package_name, package_version, arch)
args = [
"-Version", package_version,
"-Arch", arch,
"-Target", edition
def make_innosetup_update():
log("\n=== Build innosetup update project\n")
log("--- " + innosetup_update_file)
if is_file(innosetup_update_file):
log("! file exist, skip")
return
cmd("iscc", iscc_args + ["/DTARGET_NAME=" + innosetup_file, "update_common.iss"])
return
def make_winsparkle_files():
log("\n=== Build winsparkle files\n")
awk_branding = "update/branding.awk"
if not onlyoffice:
build_branding_dir = get_abspath(git_dir, branding_dir, "win-linux/package/windows")
else:
build_branding_dir = get_path(".")
awk_args = [
"-v", "Version=" + version,
"-v", "Build=" + build,
"-v", "Timestamp=" + timestamp,
"-i", get_path(build_branding_dir, awk_branding)
]
if common.sign:
args += ["-Sign"]
utils.log_h2("desktop advinst " + edition + " build")
ret = utils.ps1("make_advinst.ps1", args, verbose=True)
utils.set_summary("desktop advinst " + edition + " build", ret)
appcast = get_path("update/appcast.xml")
log("--- " + appcast)
if is_file(appcast):
log("! file exist, skip")
else:
command = "env LANG=en_US.UTF-8 awk " + ' '.join(awk_args) + \
" -f " + appcast + ".awk"
appcast_result = proc_open(command)
if appcast_result['stderr'] != "":
log("! error: " + appcast_result['stderr'])
write_file(appcast, appcast_result['stdout'])
if common.deploy and ret:
utils.log_h2("desktop advinst " + edition + " deploy")
ret = s3_upload([advinst_file], "desktop/win/advinst/")
utils.set_summary("desktop advinst " + edition + " deploy", ret)
changes_dir = get_path(build_branding_dir, "update/changes", version)
for lang, base in update_changes_list.items():
changes = get_path("update/" + base + ".html")
if lang == 'en': encoding = 'en_US.UTF-8'
elif lang == 'ru': encoding = 'ru_RU.UTF-8'
log("--- " + changes)
if is_file(changes):
log("! file exist, skip")
else:
command = "env LANG=" + encoding + " awk " + ' '.join(awk_args) + \
" -f update\\changes.html.awk " + changes_dir + "\\" + lang + ".html"
changes_result = proc_open(command)
if changes_result['stderr'] != "":
log("! error: " + changes_result['stderr'])
write_file(changes, changes_result['stdout'])
return
def make_advinst():
log("\n=== Build advanced installer project\n")
log("--- " + advinst_file)
if is_file(advinst_file):
log("! file exist, skip")
return
aic_content = [";aic"]
if not onlyoffice:
copy_dir_content(
get_abspath(git_dir, branding_dir, "win-linux/package/windows/data"),
"data", ".bmp")
copy_dir_content(
get_abspath(git_dir, branding_dir, "win-linux/package/windows/data"),
"data", ".png")
aic_content += [
"SetProperty ProductName=\"%s\"" % product_name_full,
"SetProperty Manufacturer=\"%s\"" % publisher_name.replace('"', '""'),
"SetProperty ARPURLINFOABOUT=\"%s\"" % info_about_url,
"SetProperty ARPURLUPDATEINFO=\"%s\"" % update_info_url,
"SetProperty ARPHELPLINK=\"%s\"" % help_url,
"SetProperty ARPHELPTELEPHONE=\"%s\"" % help_phone,
"SetProperty ARPCONTACT=\"%s\"" % publisher_address,
"DelLanguage 1029 -buildname DefaultBuild",
"DelLanguage 1031 -buildname DefaultBuild",
"DelLanguage 1041 -buildname DefaultBuild",
"DelLanguage 1046 -buildname DefaultBuild",
"DelLanguage 2070 -buildname DefaultBuild",
"DelLanguage 1060 -buildname DefaultBuild",
"DelLanguage 1036 -buildname DefaultBuild",
"DelLanguage 3082 -buildname DefaultBuild",
"DelLanguage 1033 -buildname DefaultBuild"
]
if not sign: aic_content.append("ResetSig")
if machine == '32': aic_content.append("SetPackageType x86")
aic_content += [
"AddOsLc -buildname DefaultBuild -arch " + arch,
"NewSync APPDIR " + source_dir + " -existingfiles delete",
"UpdateFile APPDIR\\DesktopEditors.exe " + get_path(source_dir, "DesktopEditors.exe"),
"SetVersion " + package_version,
"SetPackageName " + advinst_file + " -buildname DefaultBuild",
"Rebuild -buildslist DefaultBuild"
]
write_file("DesktopEditors.aic", "\r\n".join(aic_content), 'utf-8-sig')
cmd("AdvancedInstaller.com",
["/execute", "DesktopEditors.aip", "DesktopEditors.aic", "-nofail"])
return
def make_portable_zip():
log("\n=== Build portable zip\n")
log("--- " + portable_zip_file)
if is_file(portable_zip_file):
log("! file exist, skip")
return
cmd("7z", ["a", "-y", portable_zip_file, get_path(source_dir, "*")])
return
def make_portable_evb():
log("\n=== Build portable evb\n")
log("--- " + portable_evb_file)
if is_file(portable_evb_file):
log("! file exist, skip")
return
cmd('start', ["%s\%s" % (get_abspath(git_dir, build_dir), "portable_" + source_prefix + ".bat")])
return
#
@ -175,171 +248,88 @@ def make_advinst(edition = "opensource"):
#
def make_macos():
global package_name, build_dir, branding_dir, updates_dir, changes_dir, \
suffix, lane, scheme, source_dir, released_updates_dir
package_name = branding.desktop_package_name
build_dir = branding.desktop_build_dir
branding_dir = branding.desktop_branding_dir
updates_dir = branding.desktop_updates_dir
changes_dir = branding.desktop_changes_dir
suffix = {
"darwin_x86_64": "x86_64",
"darwin_x86_64_v8": "v8",
"darwin_arm64": "arm"
}[common.platform]
lane = "release_" + suffix
scheme = package_name + "-" + suffix
sparkle_updates = False
global suffix, lane, scheme
utils.set_cwd(branding_dir)
set_cwd(git_dir + "/" + branding_build_dir)
if common.clean:
utils.log_h2("clean")
utils.delete_dir(utils.get_env("HOME") + "/Library/Developer/Xcode/Archives")
utils.delete_dir(utils.get_env("HOME") + "/Library/Caches/Sparkle_generate_appcast")
for target in targets:
if not target.startswith('diskimage'):
continue
utils.log_h2("build")
source_dir = "%s/build_tools/out/%s/%s" \
% (common.workspace_dir, common.prefix, branding.company_name)
if branding.onlyoffice:
for path in utils.glob_path(source_dir \
+ "/desktopeditors/editors/web-apps/apps/*/main/resources/help"):
utils.delete_dir(path)
if target.startswith('diskimage'):
if (target == 'diskimage-x64'): suffix = 'x86_64'
elif (target == 'diskimage-x64-v8'): suffix = 'v8'
elif (target == 'diskimage-arm64'): suffix = 'arm'
else: exit(1)
lane = "release_" + suffix
scheme = package_name + '-' + suffix
if utils.get_env("ARCHIVES_DIR"):
sparkle_updates = True
released_updates_dir = "%s/%s/_updates" % (utils.get_env("ARCHIVES_DIR"), scheme)
plistbuddy = "/usr/libexec/PlistBuddy"
plist_path = "%s/%s/ONLYOFFICE/Resources/%s-%s/Info.plist" \
% (common.workspace_dir, branding_dir, package_name, suffix)
make_diskimage(target)
appcast = utils.sh_output('%s -c "Print :SUFeedURL" %s' \
% (plistbuddy, plist_path), verbose=True).rstrip()
appcast = released_updates_dir + "/" + appcast[appcast.rfind("/")+1:]
release_version_string = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:shortVersionString\']/text()" ' + appcast,
verbose=True).rstrip()
release_version = utils.sh_output(
'xmllint --xpath "/rss/channel/item[1]/*[name()=\'sparkle:version\']/text()" ' + appcast,
verbose=True).rstrip()
bundle_version = str(int(release_version) + 1)
help_url = "https://download.onlyoffice.com/install/desktop/editors/help/v" + common.version + "/apps"
utils.sh('%s -c "Set :CFBundleShortVersionString %s" %s' \
% (plistbuddy, common.version, plist_path), verbose=True)
utils.sh('%s -c "Set :CFBundleVersion %s" %s' \
% (plistbuddy, bundle_version, plist_path), verbose=True)
utils.sh('%s -c "Set :ASCBundleBuildNumber %s" %s' \
% (plistbuddy, common.build, plist_path), verbose=True)
utils.sh('%s -c "Add :ASCWebappsHelpUrl string %s" %s' \
% (plistbuddy, help_url, plist_path), verbose=True)
utils.log("RELEASE=" + release_version_string + "(" + release_version + ")" \
+ "\nCURRENT=" + common.version + "(" + bundle_version + ")")
dmg = make_dmg()
if dmg and sparkle_updates:
make_sparkle_updates()
if common.platform != "darwin_x86_64_v8":
make_dmg("commercial")
utils.set_cwd(common.workspace_dir)
if ('sparkle-updates' in targets):
make_sparkle_updates()
return
def make_dmg(target = "opensource"):
utils.log_h2("desktop dmg " + target + " build")
utils.log_h3("build/" + package_name + ".app")
args = ["bundler", "exec", "fastlane", lane, "skip_git_bump:true"]
if target == "commercial":
args += ["edition:Enterprise"]
dmg = utils.sh(" ".join(args), verbose=True)
utils.set_summary("desktop dmg " + target + " build", dmg)
if common.deploy and dmg:
utils.log_h2("desktop dmg " + target + " deploy")
ret = s3_upload(
utils.glob_path("build/*.dmg"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop dmg deploy", ret)
if common.deploy and dmg and target != "commercial":
utils.log_h2("desktop zip " + target + " deploy")
ret = s3_upload(
["build/%s-%s.zip" % (scheme, common.version)],
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop zip " + target + " deploy", ret)
return dmg
def make_diskimage(target):
log("\n=== Build package " + scheme + "\n")
log("--- build/" + package_name + ".app")
cmd("bundler", ["exec", "fastlane", lane, "skip_git_bump:true"])
return
def make_sparkle_updates():
utils.log_h2("desktop sparkle files build")
log("\n=== Build sparkle updates\n")
zip_filename = scheme + '-' + common.version
app_version = proc_open("/usr/libexec/PlistBuddy \
-c 'print :CFBundleShortVersionString' \
build/" + package_name + ".app/Contents/Info.plist")['stdout']
zip_filename = scheme + '-' + app_version
macos_zip = "build/" + zip_filename + ".zip"
utils.create_dir(updates_dir)
utils.copy_file(macos_zip, updates_dir)
utils.sh(
"ls -1t " + released_updates_dir + "/*.zip" \
+ " | head -n 3" \
+ " | while read f; do cp -fv \"$f\" " + updates_dir + "/; done",
verbose=True)
updates_storage_dir = "%s/%s/_updates" % (get_env('ARCHIVES_DIR'), scheme)
create_dir(updates_dir)
copy_dir_content(updates_storage_dir, updates_dir, ".zip")
copy_dir_content(updates_storage_dir, updates_dir, ".html")
copy_file(macos_zip, updates_dir)
for ext in [".html", ".ru.html"]:
changes_src = changes_dir + "/" + common.version + "/changes" + ext
changes_dst = updates_dir + "/" + zip_filename + ext
if not utils.copy_file(changes_src, changes_dst):
utils.write_file(changes_dst, "<!DOCTYPE html>placeholder")
for lang, base in update_changes_list.items():
notes_src = "%s/%s/%s.html" % (changes_dir, app_version, base)
notes_dst = "%s/%s.html" % (updates_dir, zip_filename)
if lang == 'en':
encoding = 'en_US.UTF-8'
cur_date = proc_open("env LC_ALL=" + encoding + " date -u \"+%B %e, %Y\"")['stdout']
elif lang == 'ru':
encoding = 'ru_RU.UTF-8'
cur_date = proc_open("env LC_ALL=" + encoding + " date -u \"+%e %B %Y\"")['stdout']
if is_file(notes_src):
copy_file(notes_src, notes_dst)
replace_in_file(notes_dst,
r"(<span class=\"releasedate\">).+(</span>)",
"\\1 - " + cur_date + "\\2")
# else:
# write_file(notes_dst, "placeholder\n")
cmd(git_dir + "/" + build_dir + "/Vendor/Sparkle/bin/generate_appcast", [updates_dir])
sparkle_base_url = "%s/%s/updates/" % (branding.sparkle_base_url, suffix)
ret = utils.sh(
common.workspace_dir \
+ "/desktop-apps/macos/Vendor/Sparkle/bin/generate_appcast " \
+ updates_dir \
+ " --download-url-prefix " + sparkle_base_url \
+ " --release-notes-url-prefix " + sparkle_base_url,
verbose=True
)
utils.set_summary("desktop sparkle files build", ret)
log("\n=== Edit Sparkle appcast links\n")
appcast_url = sparkle_base_url + "/" + suffix
appcast = "%s/%s.xml" % (updates_dir, package_name.lower())
if common.deploy:
utils.log_h2("desktop sparkle files deploy")
ret = s3_upload(
utils.glob_path("build/update/*.delta") \
+ utils.glob_path("build/update/*.xml") \
+ utils.glob_path("build/update/*.html"),
"desktop/mac/%s/%s/%s/" % (suffix, common.version, common.build))
utils.set_summary("desktop sparkle files deploy", ret)
return
#
# Linux
#
def make_linux():
utils.set_cwd("desktop-apps/win-linux/package/linux")
for edition in ["opensource", "commercial"]:
utils.log_h2("desktop " + edition + " build")
make_args = [t["make"] for t in branding.desktop_make_targets]
if edition == "commercial":
make_args += ["-e", "PACKAGE_EDITION=commercial"]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("desktop " + edition + " build", ret)
if common.deploy:
for t in branding.desktop_make_targets:
utils.log_h2("desktop " + edition + " " + t["make"] + " deploy")
uploads = []
for f in utils.glob_path(t["src"]):
if "help" in f and not \
("x86_64" in common.platform and edition == "opensource"): continue
uploads.append(f)
ret = s3_upload(uploads, t["dst"])
utils.set_summary("desktop " + edition + " " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
for lang, base in update_changes_list.items():
if base == "ReleaseNotes":
replace_in_file(appcast,
r"(<sparkle:releaseNotesLink>)(?:.+" + package_name + \
"-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)",
"\\1" + appcast_url + "/updates/changes/\\2/" + base + ".html\\3")
else:
replace_in_file(appcast,
r"(<sparkle:releaseNotesLink xml:lang=\"" + lang + "\">)(?:" + package_name + \
"-(?:x86|x86_64|v8|arm)-([0-9.]+)\..+)(</sparkle:releaseNotesLink>)",
"\\1" + appcast_url + "/updates/changes/\\2/" + base + ".html\\3")
replace_in_file(appcast,
r"(url=\")(?:.+/)(" + package_name + ".+\")",
"\\1" + appcast_url + "/updates/\\2")
log("\n=== Delete unnecessary files\n")
for file in os.listdir(updates_dir):
if (-1 == file.find(app_version)) and (file.endswith(".zip") or
file.endswith(".html")):
delete_file(updates_dir + '/' + file)
return

View File

@ -1,38 +0,0 @@
#!/usr/bin/env python
import package_utils as utils
import package_common as common
import package_branding as branding
def make():
utils.log_h1("MOBILE")
if not utils.is_linux():
utils.log("Unsupported host OS")
return
make_mobile()
return
def make_mobile():
utils.set_cwd("build_tools/out")
zip_file = "build-" + common.version + "-" + common.build + ".zip"
if common.clean:
utils.log_h2("mobile clean")
utils.sh("rm -rfv *.zip", verbose=True)
utils.log_h2("mobile build")
ret = utils.sh("zip -r " + zip_file + " ./android ./ios", verbose=True)
utils.set_summary("mobile build", ret)
if common.deploy:
if ret:
utils.log_h2("mobile deploy")
key = "mobile/android/" + zip_file
ret = utils.s3_upload(zip_file, "s3://" + branding.s3_bucket + "/" + key)
if ret:
utils.log("URL: " + branding.s3_base_url + "/" + key)
utils.set_summary("mobile deploy", ret)
utils.set_cwd(common.workspace_dir)
return

View File

@ -1,82 +1,25 @@
#!/usr/bin/env python
#!/usr/bin/env python3
import package_utils as utils
import package_common as common
import package_branding as branding
import base
import os
def make(platform, targets):
base_dir = base.get_script_dir() + "/../out"
git_dir = base.get_script_dir() + "/../.."
package_dir = os.path.abspath(git_dir + "/document-server-package")
if ("windows" == platform) or ("linux" == platform):
if ("packages" in targets):
print("Make clean")
base.cmd_in_dir(package_dir, "make", ["clean"])
print("Make packages")
base.cmd_in_dir(package_dir, "make", ["packages"])
def make(edition):
utils.log_h1("SERVER (" + edition.upper() + ")")
if utils.is_windows():
make_windows(edition)
elif utils.is_linux():
make_linux(edition)
else:
utils.log("Unsupported host OS")
return
def s3_upload(files, dst):
if not files: return False
ret = True
for f in files:
key = dst + utils.get_basename(f) if dst.endswith("/") else dst
upload = utils.s3_upload(f, "s3://" + branding.s3_bucket + "/" + key)
if upload:
utils.log("URL: " + branding.s3_base_url + "/" + key)
ret &= upload
return ret
def make_windows(edition):
if edition == "enterprise":
product_name = "DocumentServer-EE"
elif edition == "developer":
product_name = "DocumentServer-DE"
else:
product_name = "DocumentServer"
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
ret = utils.cmd("make", "clean", verbose=True)
if edition == "prerequisites":
make_args = ["exe-pr"]
else:
make_args = ["exe", "-e", "PRODUCT_NAME=" + product_name]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
ret &= utils.cmd("make", *make_args, verbose=True)
utils.set_summary("server " + edition + " build", ret)
if common.deploy and ret:
utils.log_h2("server " + edition + " inno deploy")
ret = s3_upload(utils.glob_path("exe/*.exe"), "server/win/inno/")
utils.set_summary("server " + edition + " inno deploy", ret)
utils.set_cwd(common.workspace_dir)
return
def make_linux(edition):
if edition == "enterprise":
product_name = "documentserver-ee"
elif edition == "developer":
product_name = "documentserver-de"
else:
product_name = "documentserver"
utils.set_cwd("document-server-package")
utils.log_h2("server " + edition + " build")
make_args = [t["make"] for t in branding.server_make_targets]
make_args += ["-e", "PRODUCT_NAME=" + product_name]
if common.platform == "linux_aarch64":
make_args += ["-e", "UNAME_M=aarch64"]
if not branding.onlyoffice:
make_args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"]
ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True)
utils.set_summary("server " + edition + " build", ret)
if common.deploy:
for t in branding.server_make_targets:
utils.log_h2("server " + edition + " " + t["make"] + " deploy")
ret = s3_upload(utils.glob_path(t["src"]), t["dst"])
utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret)
utils.set_cwd(common.workspace_dir)
exit(1)
return

View File

@ -1,9 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import codecs
import glob
import hashlib
import os
import platform
import re
@ -11,84 +11,71 @@ import shutil
import subprocess
import sys
import time
import package_common as common
import base
def parse():
parser = argparse.ArgumentParser(description="Build packages.")
parser.add_argument('-P', '--product', dest='product', type=str,
action='store', help="Defines product")
parser.add_argument('-S', '--system', dest='system', type=str,
action='store', help="Defines system")
parser.add_argument('-R', '--branding', dest='branding', type=str,
action='store', help="Provides branding path")
parser.add_argument('-V', '--version', dest='version', type=str,
action='store', help="Defines version")
parser.add_argument('-B', '--build', dest='build', type=str,
action='store', help="Defines build")
parser.add_argument('-T', '--targets', dest='targets', type=str, nargs='+',
action='store', help="Defines targets")
args = parser.parse_args()
global product, system, targets, version, build, branding, sign, clean
product = args.product
system = args.system if (args.system is not None) else host_platform()
targets = args.targets
version = args.version if (args.version is not None) else get_env('PRODUCT_VERSION', '0.0.0')
build = args.build if (args.build is not None) else get_env('BUILD_NUMBER', '0')
branding = args.branding
return
def host_platform():
return platform.system().lower()
def is_windows():
return host_platform() == "windows"
def is_macos():
return host_platform() == "darwin"
def is_linux():
return host_platform() == "linux"
def log(string, end='\n'):
sys.stdout.write(string + end)
def log(string, end='\n', bold=False):
if bold:
out = '\033[1m' + string + '\033[0m' + end
else:
out = string + end
sys.stdout.write(out)
sys.stdout.flush()
return
def log_h1(string):
line = "#" * (len(string) + 8)
log("\n" + line + "\n### " + string + " ###\n" + line + "\n")
def get_env(name, default=''):
return os.getenv(name, default)
def set_env(name, value):
os.environ[name] = value
return
def log_h2(string):
log("\n### " + string + "\n")
def set_cwd(dir):
log("- change working dir: " + dir)
os.chdir(dir)
return
def log_h3(string):
log("# " + string)
return
def get_path(*paths):
arr = []
for path in paths:
if host_platform() == 'windows':
arr += path.split('/')
else:
arr += [path]
return os.path.join(*arr)
def log_err(string):
log("!!! " + string)
return
def get_timestamp():
return "%.f" % time.time()
def get_env(key, default=None):
return os.getenv(key, default)
def set_env(key, value):
os.environ[key] = value
return
def get_cwd():
return os.getcwd()
def set_cwd(path, verbose=True):
if verbose:
log("- change working dir:")
log(" path: " + path)
os.chdir(path)
return
def get_path(path):
if is_windows():
return path.replace("/", "\\")
return path
def get_relpath(path, rel_path):
return os.path.relpath(get_path(path), get_path(rel_path))
def get_abspath(path):
return os.path.abspath(get_path(path))
def get_basename(path):
return os.path.basename(path)
def get_dirname(path):
return os.path.dirname(path)
def get_file_size(path):
return os.path.getsize(path)
def get_script_dir(path):
return get_dirname(os.path.realpath(path))
def get_abspath(*paths):
arr = []
for path in paths:
arr += path.split('/')
return os.path.abspath(os.path.join(*arr))
def is_file(path):
return os.path.isfile(path)
@ -101,309 +88,192 @@ def is_exist(path):
return True
return False
def glob_path(path):
return glob.glob(path)
def get_dirname(path):
return os.path.dirname(path)
def glob_file(path):
if glob.glob(path) and is_file(glob.glob(path)[0]):
return glob.glob(path)[0]
return
def get_hash_sha256(path):
if os.path.exists(path):
h = hashlib.sha256()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_sha1(path):
if os.path.exists(path):
h = hashlib.sha1()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def get_hash_md5(path):
if os.path.exists(path):
h = hashlib.md5()
h.update(open(path, "rb").read())
return h.hexdigest()
return
def create_dir(path, verbose=True):
if verbose:
log("- create_dir:")
log(" path: " + path)
def create_dir(path):
log("- create dir: " + path)
if not is_exist(path):
os.makedirs(path)
else:
log_err("dir exist")
log("! dir exist")
return
def write_file(path, data, encoding='utf-8', verbose=True):
def write_file(path, data, encoding='utf-8'):
if is_file(path):
delete_file(path)
if verbose:
log("- write_file:")
log(" path: " + path)
log(" encoding: " + encoding)
log(" data: |\n" + data)
log("- write file: " + path)
with codecs.open(path, 'w', encoding) as file:
file.write(data)
return
def replace_in_file(path, pattern, text_replace, encoding='utf-8', verbose=True):
if verbose:
log("- replace_in_file:")
log(" path: " + path)
log(" pattern: " + pattern)
log(" replace: " + text_replace)
log(" encoding: " + encoding)
file_data = ""
with codecs.open(get_path(path), "r", encoding) as file:
file_data = file.read()
file_data = re.sub(pattern, text_replace, file_data)
delete_file(path)
with codecs.open(get_path(path), "w", encoding) as file:
file.write(file_data)
def write_template(src, dst, encoding='utf-8', **kwargs):
template = Template(open(src).read())
if is_file(dst):
os.remove(dst)
log("- write template: " + dst + " < " + src)
with codecs.open(dst, 'w', encoding) as file:
file.write(template.render(**kwargs))
return
def copy_file(src, dst, verbose=True):
if verbose:
log("- copy_file:")
log(" src: " + src)
log(" dst: " + dst)
def replace_in_file(path, pattern, textReplace, encoding='utf-8'):
log("- replace in file: " + path + \
"\n pattern: " + pattern + \
"\n replace: " + textReplace)
filedata = ""
with codecs.open(get_path(path), "r", encoding) as file:
filedata = file.read()
filedata = re.sub(pattern, textReplace, filedata)
delete_file(path)
with codecs.open(get_path(path), "w", encoding) as file:
file.write(filedata)
return
def copy_file(src, dst):
log("- copy file: " + dst + " < " + src)
if is_file(dst):
delete_file(dst, False)
delete_file(dst)
if not is_file(src):
log_err("file not exist: " + src)
log("! file not exist: " + src)
return
return shutil.copy2(get_path(src), get_path(dst))
def copy_files(src, dst, override=True, verbose=True):
if verbose:
log("- copy_files:")
log(" src: " + src)
log(" dst: " + dst)
log(" override: " + str(override))
def copy_files(src, dst, override=True):
log("- copy files: " + dst + " < " + src)
for file in glob.glob(src):
file_name = os.path.basename(file)
if is_file(file):
if override and is_file(dst + "/" + file_name):
delete_file(dst + "/" + file_name)
if not is_file(dst + "/" + file_name):
if verbose:
log(file + " : " + get_path(dst))
shutil.copy2(file, get_path(dst))
copy_file(file, dst)
elif is_dir(file):
if not is_dir(dst + "/" + file_name):
create_dir(dst + "/" + file_name)
copy_files(file + "/*", dst + "/" + file_name, override)
return
def copy_dir(src, dst, verbose=True, symlinks=False):
if verbose:
log("- copy_dir:")
log(" src: " + src)
log(" dst: " + dst)
shutil.copytree(src, dst, symlinks=symlinks)
def copy_dir(src, dst):
if is_dir(dst):
delete_dir(dst)
try:
shutil.copytree(get_path(src), get_path(dst))
except OSError as e:
log('! Directory not copied. Error: %s' % e)
return
def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True):
if verbose:
log("- copy_dir_content:")
log(" src: " + src)
log(" dst: " + dst)
log(" include: " + filter_include)
log(" exclude: " + filter_exclude)
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if ("" != filter_include) and (-1 == item.find(filter_include)):
def copy_dir_content(src, dst, filterInclude = "", filterExclude = ""):
log("- copy dir content: " + src + " " + dst + " " + filterInclude + " " + filterExclude)
src_folder = src
if ("/" != src[-1:]):
src_folder += "/"
src_folder += "*"
for file in glob.glob(src_folder):
basename = os.path.basename(file)
if ("" != filterInclude) and (-1 == basename.find(filterInclude)):
continue
if ("" != filter_exclude) and (-1 != item.find(filter_exclude)):
if ("" != filterExclude) and (-1 != basename.find(filterExclude)):
continue
if os.path.isdir(s):
shutil.copytree(s, d)
else:
shutil.copy2(s, d)
log(item)
if is_file(file):
copy_file(file, dst)
elif is_dir(file):
copy_dir(file, dst + "/" + basename)
return
def delete_file(path, verbose=True):
if verbose:
log("- delete_file:")
log(" path: " + path)
def delete_file(path):
log("- delete file: " + path)
if not is_file(path):
log_err("file not exist")
log("! file not exist")
return
return os.remove(path)
def delete_dir(path, verbose=True):
if verbose:
log("- delete_dir:")
log(" path: " + path)
def delete_dir(path):
log("- delete dir: " + path)
if not is_dir(path):
log_err("dir not exist")
log("! dir not exist")
return
shutil.rmtree(path, ignore_errors=True)
return
def delete_files(src, verbose=True):
if verbose:
log("- delete_files:")
log(" pattern: " + src)
def delete_files(src):
for path in glob.glob(src):
if verbose:
log(path)
if is_file(path):
os.remove(path)
delete_file(path)
elif is_dir(path):
shutil.rmtree(path, ignore_errors=True)
delete_dir(path)
return
def remove_all_symlinks(dir):
for root, dirs, files in os.walk(dir, topdown=True, followlinks=False):
for name in files:
path = os.path.join(root, name)
if os.path.islink(path):
os.unlink(path)
for name in list(dirs):
path = os.path.join(root, name)
if os.path.islink(path):
os.unlink(path)
dirs.remove(name)
def download_file(url, path):
log("- download file: " + path + " < " + url)
if is_file(path):
os.remove(path)
powershell(["Invoke-WebRequest", url, "-OutFile", path])
return
def set_summary(target, status):
common.summary.append({target: status})
return
def cmd(*args, **kwargs):
if kwargs.get("verbose"):
log("- cmd:")
log(" command: " + " ".join(args))
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
log_err("creates exist")
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
[i for i in args], stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
def proc_open(command):
log("- open process: " + command)
popen = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
ret = {'stdout' : '', 'stderr' : ''}
try:
stdout, stderr = popen.communicate()
popen.wait()
ret['stdout'] = stdout.strip().decode('utf-8', errors='ignore')
ret['stderr'] = stderr.strip().decode('utf-8', errors='ignore')
finally:
popen.stdout.close()
popen.stderr.close()
return ret
def cmd_output(*args, **kwargs):
if kwargs.get("verbose"):
log("- cmd_output:")
log(" command: " + " ".join(args))
return subprocess.check_output(
[i for i in args], stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
def powershell(*args, **kwargs):
if kwargs.get("verbose"):
log("- powershell:")
log(" command: " + " ".join(args))
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return False
args = ["powershell", "-Command"] + [i for i in args]
ret = subprocess.call(
args, stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def ps1(file, args=[], **kwargs):
if kwargs.get("verbose"):
log("- ps1: " + file + " " + " ".join(args))
if kwargs.get("creates") and is_exist(kwargs["creates"]):
return True
ret = subprocess.call(
["powershell", "-ExecutionPolicy", "ByPass", "-File", file] + args,
stderr=subprocess.STDOUT, shell=True
) == 0
return ret
def sh(command, **kwargs):
if kwargs.get("verbose"):
log("- sh:")
log(" command: " + command)
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("creates"):
log(" creates: " + kwargs["creates"])
if kwargs.get("creates") and is_exist(kwargs["creates"]):
log_err("creates exist")
return False
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.call(
command, stderr=subprocess.STDOUT, shell=True
) == 0
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def sh_output(command, **kwargs):
if kwargs.get("verbose"):
log("- sh_output:")
log(" command: " + command)
if kwargs.get("chdir"):
log(" chdir: " + kwargs["chdir"])
if kwargs.get("chdir") and is_dir(kwargs["chdir"]):
oldcwd = get_cwd()
set_cwd(kwargs["chdir"], verbose=False)
ret = subprocess.check_output(
command, stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
log(ret)
if kwargs.get("chdir") and oldcwd:
set_cwd(oldcwd, verbose=False)
return ret
def s3_upload(src, dst, **kwargs):
if not is_file(src):
log_err("file not exist: " + src)
return False
metadata = "sha256=" + get_hash_sha256(src) \
+ ",sha1=" + get_hash_sha1(src) \
+ ",md5=" + get_hash_md5(src)
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
args += ["--metadata", metadata, src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
def cmd(prog, args=[], is_no_errors=False):
log("- cmd: " + prog + " " + ' '.join(args))
ret = 0
if host_platform() == 'windows':
sub_args = args[:]
sub_args.insert(0, get_path(prog))
ret = subprocess.call(sub_args, stderr=subprocess.STDOUT, shell=True)
else:
ret = sh(" ".join(args), verbose=True)
command = prog
for arg in args:
command += (" \"%s\"" % arg)
ret = subprocess.call(command, stderr=subprocess.STDOUT, shell=True)
if ret != 0 and True != is_no_errors:
sys.exit("! error (" + prog + "): " + str(ret))
return ret
def s3_copy(src, dst, **kwargs):
args = ["aws"]
if kwargs.get("endpoint_url"):
args += ["--endpoint-url", kwargs["endpoint_url"]]
args += ["s3", "cp", "--no-progress"]
if kwargs.get("acl"):
args += ["--acl", kwargs["acl"]]
args += [src, dst]
if is_windows():
ret = cmd(*args, verbose=True)
else:
ret = sh(" ".join(args), verbose=True)
def powershell(cmd):
log("- pwsh: " + ' '.join(cmd))
ret = subprocess.call(['powershell', '-Command'] + cmd,
stderr=subprocess.STDOUT, shell=True)
if ret != 0:
sys.exit("! error: " + str(ret))
return ret
def get_platform(target):
xp = (-1 != target.find('-xp'))
if (-1 != target.find('-x64')):
return {'machine': "64", 'arch': "x64", 'xp': xp}
elif (-1 != target.find('-x86')):
return {'machine': "32", 'arch': "x86", 'xp': xp}
return
global git_dir, out_dir, tsa_server, vcredist_links
git_dir = get_abspath(get_dirname(__file__), '../..')
out_dir = get_abspath(get_dirname(__file__), '../out')
timestamp = "%.f" % time.time()
tsa_server = "http://timestamp.digicert.com"
vcredist_links = {
'2022': {
'64': "https://aka.ms/vs/17/release/vc_redist.x64.exe",
'32': "https://aka.ms/vs/17/release/vc_redist.x86.exe"
},
'2015': {
'64': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x64.exe",
'32': "https://download.microsoft.com/download/9/3/F/93FCF1E7-E6A4-478B-96E7-D4B285925B00/vc_redist.x86.exe"
},
'2013': {
'64': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x64.exe",
'32': "https://download.microsoft.com/download/2/E/6/2E61CFA4-993B-4DD4-91DA-3737CD5CD6E3/vcredist_x86.exe"
}
}
isxdl_link = "https://raw.githubusercontent.com/jrsoftware/ispack/is-5_6_1/isxdlfiles/isxdl.dll"

View File

@ -1,179 +0,0 @@
#!/usr/bin/env python
import os
import sys
__dir__name__ = os.path.dirname(__file__)
sys.path.append(__dir__name__ + '/core_common/modules/android')
import base
import config
import android_ndk
import multiprocessing
def get_make_file_suffix(platform):
suffix = platform
if config.check_option("config", "debug"):
suffix += "_debug_"
suffix += config.option("branding")
return suffix
def get_j_num():
if ("0" != config.option("multiprocess")):
return ["-j" + str(multiprocessing.cpu_count())]
return []
def check_support_platform(platform):
qt_dir = base.qt_setup(platform)
if not base.is_file(qt_dir + "/bin/qmake") and not base.is_file(qt_dir + "/bin/qmake.exe") and not base.is_file(qt_dir + "/bin/qmake.bat"):
return False
return True
def make(platform, project, qmake_config_addon="", is_no_errors=False):
# check platform
if not check_support_platform(platform):
print("THIS PLATFORM IS NOT SUPPORTED")
return
old_env = dict(os.environ)
# qt
qt_dir = base.qt_setup(platform)
base.set_env("OS_DEPLOY", platform)
# pro & makefile
file_pro = os.path.abspath(project)
pro_dir = os.path.dirname(file_pro)
if (pro_dir.endswith("/.")):
pro_dir = pro_dir[:-2]
if (pro_dir.endswith("/")):
pro_dir = pro_dir[:-1]
makefile_name = "Makefile." + get_make_file_suffix(platform)
makefile = pro_dir + "/" + makefile_name
stash_file = pro_dir + "/.qmake.stash"
old_cur = os.getcwd()
os.chdir(pro_dir)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
if (base.is_file(makefile)):
base.delete_file(makefile)
base.set_env("DEST_MAKEFILE_NAME", "./" + makefile_name)
# setup android env
if (-1 != platform.find("android")):
base.set_env("ANDROID_NDK_HOST", android_ndk.host["arch"])
base.set_env("ANDROID_NDK_PLATFORM", "android-" + android_ndk.get_sdk_api())
base.set_env("PATH", qt_dir + "/bin:" + android_ndk.toolchain_dir() + "/bin:" + base.get_env("PATH"))
# setup ios env
if (-1 != platform.find("ios")):
base.hack_xcode_ios()
if base.is_file(makefile):
base.delete_file(makefile)
config_param = base.qt_config(platform)
if ("" != qmake_config_addon):
config_param += (" " + qmake_config_addon)
# qmake ADDON
qmake_addon = []
if ("" != config.option("qmake_addon")):
qmake_addon = config.option("qmake_addon").split()
clean_params = ["clean", "-f", makefile]
distclean_params = ["distclean", "-f", makefile]
build_params = ["-nocache", file_pro] + base.qt_config_as_param(config_param) + qmake_addon
qmake_app = qt_dir + "/bin/qmake"
# non windows platform
if not base.is_windows():
if base.is_file(qt_dir + "/onlyoffice_qt.conf"):
build_params.append("-qtconf")
build_params.append(qt_dir + "/onlyoffice_qt.conf")
if "1" == config.option("use-clang"):
build_params.append("-spec")
build_params.append("linux-clang-libc++")
if "" != config.option("sysroot"):
sysroot_path = config.option("sysroot_" + platform)
os.environ['QMAKE_CUSTOM_SYSROOT'] = sysroot_path
os.environ['QMAKE_CUSTOM_SYSROOT_BIN'] = config.get_custom_sysroot_bin(platform)
os.environ['PKG_CONFIG_PATH'] = config.get_custom_sysroot_lib(platform, True) + "/pkgconfig"
os.environ['PKG_CONFIG_SYSROOT_DIR'] = sysroot_path
base.cmd_exe(qmake_app, build_params)
if "" != config.option("sysroot"):
base.set_sysroot_env(platform)
base.correct_makefile_after_qmake(platform, makefile)
if ("1" == config.option("clean")):
base.cmd_and_return_cwd("make", clean_params, True)
base.cmd_and_return_cwd("make", distclean_params, True)
base.cmd(qmake_app, build_params)
base.correct_makefile_after_qmake(platform, makefile)
base.cmd_and_return_cwd("make", ["-f", makefile] + get_j_num(), is_no_errors)
if "" != config.option("sysroot"):
base.restore_sysroot_env()
else:
config_params_array = base.qt_config_as_param(config_param)
config_params_string = ""
for item in config_params_array:
config_params_string += (" \"" + item + "\"")
qmake_addon_string = " ".join(qmake_addon)
if ("" != qmake_addon_string):
qmake_addon_string = " " + qmake_addon_string
vcvarsall_arch = "x64"
if base.platform_is_32(platform):
vcvarsall_arch = "x86"
if (platform == "win_arm64"):
vcvarsall_arch = "x64_arm64"
qmake_env_addon = base.get_env("QT_QMAKE_ADDON")
if (qmake_env_addon != ""):
qmake_env_addon += " "
qmake_bat = []
qmake_bat.append("call \"" + config.option("vs-path") + "/vcvarsall.bat\" " + vcvarsall_arch)
qmake_addon_string = ""
if ("" != config.option("qmake_addon")):
qmake_addon_string = " " + (" ").join(["\"" + addon + "\"" for addon in qmake_addon])
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + qmake_env_addon + file_pro + config_params_string + qmake_addon_string)
if ("1" == config.option("clean")):
qmake_bat.append("call nmake " + " ".join(clean_params))
qmake_bat.append("call nmake " + " ".join(distclean_params))
qmake_bat.append("call \"" + qmake_app + "\" -nocache " + file_pro + config_params_string + qmake_addon_string)
if ("0" != config.option("multiprocess")):
qmake_bat.append("set CL=/MP")
qmake_bat.append("call nmake -f " + makefile)
base.run_as_bat(qmake_bat, is_no_errors)
if (base.is_file(stash_file)):
base.delete_file(stash_file)
os.chdir(old_cur)
os.environ.clear()
os.environ.update(old_env)
return
def make_all_platforms(project, qmake_config_addon=""):
platforms = config.option("platform").split()
for platform in platforms:
if not platform in config.platforms:
continue
print("------------------------------------------")
print("BUILD_PLATFORM: " + platform)
print("------------------------------------------")
make(platform, project, qmake_config_addon)
return

View File

@ -1,11 +1,9 @@
#!/usr/bin/env python
import os
import shutil
import re
import argparse
def readFile(path):
with open(path, "r", errors='replace') as file:
with open(path, "r") as file:
filedata = file.read()
return filedata
@ -13,7 +11,7 @@ def writeFile(path, content):
if (os.path.isfile(path)):
os.remove(path)
with open(path, "w", encoding='utf-8') as file:
with open(path, "w") as file:
file.write(content)
return
@ -48,12 +46,12 @@ class EditorApi(object):
if -1 != retParam.find("[]"):
isArray = True
retParam = retParam.replace("[]", "")
retType = retParam.replace("|", " ").replace(".", " ").split(" ")[0]
retType = retParam.replace("|", " ").split(" ")[0]
retTypeLower = retType.lower()
retValue = ""
if -1 != retType.find("\""):
retValue = "\"\""
elif "boolean" == retTypeLower or "bool" == retTypeLower:
elif "bool" == retTypeLower:
retValue = "true"
elif "string" == retTypeLower:
retValue = "\"\""
@ -63,12 +61,6 @@ class EditorApi(object):
retValue = "undefined"
elif "null" == retTypeLower:
retValue = "null"
elif "array" == retTypeLower:
retValue = "[]"
elif "base64img" == retTypeLower:
retValue = "base64img"
elif "error" == retTypeLower:
retValue = "undefined"
else:
retValue = "new " + retType + "()"
if isArray:
@ -80,42 +72,30 @@ class EditorApi(object):
rec = rec.replace("\t", "")
rec = rec.replace('\n ', '\n')
indexEndDecoration = rec.find("*/")
indexOfStartPropName = rec.find('Object.defineProperty(')
if indexOfStartPropName != -1:
propName = re.search(r'"([^\"]*)"', rec[indexOfStartPropName:])[0]
else:
propName = None
decoration = "/**" + rec[0:indexEndDecoration + 2]
decoration = decoration.replace("Api\n", "ApiInterface\n")
decoration = decoration.replace("Api ", "ApiInterface ")
decoration = decoration.replace("{Api}", "{ApiInterface}")
decoration = decoration.replace("@return ", "@returns ")
decoration = decoration.replace("@returns {?", "@returns {")
decoration = decoration.replace("?}", "}")
if -1 != decoration.find("@name ApiInterface"):
self.append_record(decoration, "var ApiInterface = function() {};\nvar Api = new ApiInterface();\n", True)
return
code = rec[indexEndDecoration + 2:]
code = code.replace("=\n", "= ").strip("\t\n\r ")
code = code.strip("\t\n\r ")
lines = code.split("\n")
codeCorrect = ""
sMethodName = re.search(r'.prototype.(.*)=', code)
sFuncName = ""
is_found_function = False
addon_for_func = "{}"
if -1 != decoration.find("@return"):
addon_for_func = "{ return null; }"
for line in lines:
line = line.strip("\t\n\r ")
line = line.replace("{", "")
line = line.replace("}", "")
lineWithoutSpaces = line.replace(" ", "")
if not is_found_function and 0 == line.find("function "):
if -1 == decoration.find("@constructor"):
return
codeCorrect += (line + addon_for_func + "\n")
is_found_function = True
if not is_found_function and -1 != line.find(".prototype."):
@ -127,20 +107,6 @@ class EditorApi(object):
codeCorrect += (line + "\n")
codeCorrect = codeCorrect.replace("Api.prototype", "ApiInterface.prototype")
self.append_record(decoration, codeCorrect)
className = codeCorrect[0:codeCorrect.find('.')]
# если свойство определено сразу под методом (без декорации)
if propName is not None and sMethodName is not None:
prop_define = f'{className}.prototype.{propName[1:-1]} = {className}.prototype.{sMethodName.group(1)}();\n'
self.append_record(decoration, prop_define)
#иначе
elif propName is not None:
className = re.search(r'.defineProperty\((.*).prototype', code).group(1).strip()
returnValue = 'undefined' if decoration.find('@return') == -1 else self.getReturnValue(decoration)
if (returnValue != 'undefined'):
returnValue = re.search(r'{ return (.*); }', returnValue).group(1).strip()
prop_define = f'{className}.prototype.{propName[1:-1]} = {returnValue};\n'
self.append_record(decoration, prop_define)
return
def append_record(self, decoration, code, init=False):
@ -160,12 +126,6 @@ class EditorApi(object):
editors_support = decoration[index_type_editors:index_type_editors_end]
if -1 == editors_support.find(self.type):
return
decoration = "\n".join(
line for line in decoration.splitlines()
if "@typeofeditors" not in line and "@see" not in line
)
# optimizations for first file
if 0 == self.numfile:
self.records.append(decoration + "\n" + code + "\n")
@ -187,7 +147,7 @@ class EditorApi(object):
def generate(self):
for file in self.files:
file_content = readFile(f'{sdkjs_dir}/{file}')
file_content = readFile(file)
arrRecords = file_content.split("/**")
arrRecords = arrRecords[1:-1]
for record in arrRecords:
@ -195,8 +155,8 @@ class EditorApi(object):
self.numfile += 1
correctContent = ''.join(self.records)
correctContent += "\n"
os.mkdir(args.destination + self.folder)
writeFile(args.destination + self.folder + "/api.js", correctContent)
os.mkdir('deploy/api_builder/' + self.folder)
writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent)
return
def convert_to_interface(arrFiles, sEditorType):
@ -205,27 +165,12 @@ def convert_to_interface(arrFiles, sEditorType):
editor.generate()
return
sdkjs_dir = "../../../sdkjs"
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default="../../../web-apps/vendor/monaco/libs/" # Default value
)
args = parser.parse_args()
old_cur = os.getcwd()
if True == os.path.isdir(args.destination):
shutil.rmtree(args.destination, ignore_errors=True)
os.mkdir(args.destination)
convert_to_interface(["word/apiBuilder.js", "../sdkjs-forms/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)
old_cur = os.getcwd()
os.chdir("../../../sdkjs")
if True == os.path.isdir('deploy/api_builder'):
shutil.rmtree('deploy/api_builder', ignore_errors=True)
os.mkdir('deploy/api_builder')
convert_to_interface(["word/apiBuilder.js"], "word")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide")
convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell")
os.chdir(old_cur)

View File

@ -1,138 +0,0 @@
# Documentation Generation Guide
This guide explains how to generate documentation for Onlyoffice Builder
and Plugins (Methods/Events) API using the following Python scripts:
- `office-api/generate_docs_json.py`
- `office-api/generate_docs_md.py`
- `plugins/generate_docs_methods_json.py`
- `plugins/generate_docs_methods_md.py`
- `plugins/generate_docs_events_json.py`
- `plugins/generate_docs_events_md.py`
## Requirements
```bash
Node.js v20 and above
Python v3.10 and above
```
## Installation
```bash
git clone https://github.com/ONLYOFFICE/build_tools.git
cd build_tools/scripts/sdkjs_common/jsdoc
npm install
```
## Scripts Overview
### `office-api/generate_docs_json.py`
This script generates JSON documentation based on the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation
will be saved. If not specified, the default path is
`../../../../office-js-api-declarations/office-js-api`.
### `office-api/generate_docs_md.py`
This script generates Markdown documentation from the `apiBuilder.js` files.
- **Usage**:
```bash
python generate_docs_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation
will be saved. If not specified, the default path is
`../../../../office-js-api/`.
### `plugins/generate_docs_methods_json.py`
This script generates JSON documentation based on the `api_plugins.js` files.
- **Usage**:
```bash
python generate_docs_methods_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation
will be saved. If not specified, the default path is
`../../../../office-js-api-declarations/office-js-api-plugins`.
### `plugins/generate_docs_events_json.py`
This script generates JSON documentation based on the `plugin-events.js` files.
- **Usage**:
```bash
python generate_docs_events_json.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the JSON documentation
will be saved. If not specified, the default path is
`../../../../office-js-api-declarations/office-js-api-plugins`.
### `plugins/generate_docs_methods_md.py`
This script generates Markdown documentation from the `api_plugins.js` files.
- **Usage**:
```bash
python generate_docs_methods_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation
will be saved. If not specified, the default path is
`../../../../office-js-api/`.
### `plugins/generate_docs_events_md.py`
This script generates Markdown documentation from the `plugin-events.js` files.
- **Usage**:
```bash
python generate_docs_events_md.py output_path
```
- **Parameters**:
- `output_path` (optional): The directory where the Markdown documentation
will be saved. If not specified, the default path is
`../../../../office-js-api/`.
## Example
To generate JSON documentation with the default output path:
```bash
python generate_docs_json.py /path/to/save/json
```
To generate Markdown documentation and specify a custom output path:
```bash
python generate_docs_md.py /path/to/save/markdown
```
## Notes
- Make sure to have all necessary permissions to run these scripts and write
to the specified directories.
- The output directories will be created if they do not exist.

View File

@ -1,39 +0,0 @@
import subprocess
def fetch_branches():
#Fetch all branches without tags from the remote.
subprocess.run(['git', 'fetch', '--no-tags', 'origin', '+refs/heads/*:refs/remotes/origin/*'], check=True)
def get_branches():
#Get list of branches in the repository."""
result = subprocess.run(['git', 'branch', '-r'], capture_output=True, text=True)
return [line.strip() for line in result.stdout.splitlines()]
def parse_version(version_str):
#Parse version string and return a tuple of integers (major, minor, patch).
try:
return tuple(map(int, version_str.lstrip('v').split('.')))
except ValueError:
return (0, 0, 0) # Default for non-parsable versions
def get_max_version_branch(branches):
#Find the branch with the highest version.
max_branch = None
max_version = (0, 0, 0)
for branch in branches:
parts = branch.split('/')
if len(parts) >= 2 and (parts[1] == 'hotfix' or parts[1] == 'release'):
version = parse_version(parts[2])
if version > max_version:
max_version = version
max_branch = parts
return max_branch
if __name__ == "__main__":
fetch_branches() # Fetch branches without tags
branches = get_branches()
max_version_branch = get_max_version_branch(branches)
if max_version_branch:
print('/'.join(max_version_branch[1:])) # Print only the branch name without origin

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js", "../../../../../sdkjs/cell/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,221 +0,0 @@
exports.handlers = {
processingComplete: function(e) {
// array for filtered doclets
let filteredDoclets = [];
const cleanName = name => name ? name.replace('<anonymous>~', '').replaceAll('"', '') : name;
const classesDocletsMap = {}; // doclets for classes write at the end
let passedClasses = []; // passed classes for current editor
// Remove dublicates doclets
const latestDoclets = {};
e.doclets.forEach(doclet => {
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') {
latestDoclets[doclet.longname] = doclet;
}
});
e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets));
// check available classess for current editor
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAdd =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' &&
(!isMethod || hasTypeofEditorsTag);
if (shouldAdd) {
if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) {
passedClasses.push(cleanName(doclet.memberof));
}
}
else if (doclet.kind == 'class') {
classesDocletsMap[cleanName(doclet.name)] = doclet;
}
}
// remove unavailave classes in current editor
passedClasses = passedClasses.filter(className => {
const doclet = classesDocletsMap[className];
if (!doclet) {
return true;
}
const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors'));
// class is passes if there is no editor tag or the current editor is among the tags
const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR));
return isPassed;
});
for (let i = 0; i < e.doclets.length; i++) {
const doclet = e.doclets[i];
const isMethod = doclet.kind === 'function' || doclet.kind === 'method';
const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR));
const shouldAddMethod =
doclet.kind !== 'member' &&
(!doclet.longname || doclet.longname.search('private') === -1) &&
doclet.scope !== 'inner' && hasTypeofEditorsTag;
if (shouldAddMethod) {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.memberof))) {
continue;
}
// We leave only the necessary fields
doclet.memberof = cleanName(doclet.memberof);
doclet.longname = cleanName(doclet.longname);
doclet.name = cleanName(doclet.name);
// skip inherited methods if ovveriden in child class
if (doclet.inherited && filteredDoclets.find((addedDoclet) => addedDoclet['name'] == doclet['name'] && addedDoclet['memberof'] == doclet['memberof'])) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
memberof: cleanName(doclet.memberof),
params: doclet.params ? doclet.params.map(param => ({
type: param.type ? {
names: param.type.names,
parsedType: param.type.parsedType
} : param.type,
name: param.name,
description: param.description,
optional: param.optional,
defaultvalue: param.defaultvalue
})) : doclet.params,
returns: doclet.returns ? doclet.returns.map(returnObj => ({
type: {
names: returnObj.type.names,
parsedType: returnObj.type.parsedType
}
})) : doclet.returns,
name: doclet.name,
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: doclet.scope,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
see: doclet.see
};
// Add the filtered doclet to the array
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'class') {
// if the class is not in our map, then we deleted it ourselves -> not available in the editor
if (false == passedClasses.includes(cleanName(doclet.name))) {
continue;
}
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
augments: doclet.augments || undefined,
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see || undefined
};
filteredDoclets.push(filteredDoclet);
}
else if (doclet.kind == 'typedef') {
const filteredDoclet = {
comment: doclet.comment,
description: doclet.description,
name: cleanName(doclet.name),
longname: cleanName(doclet.longname),
kind: doclet.kind,
scope: "global",
meta: doclet.meta ? {
lineno: doclet.meta.lineno,
columnno: doclet.meta.columnno
} : doclet.meta,
properties: doclet.properties ? doclet.properties.map(property => ({
type: property.type ? {
names: property.type.names,
parsedType: property.type.parsedType
} : property.type,
name: property.name,
description: property.description,
optional: property.optional,
defaultvalue: property.defaultvalue
})) : doclet.properties,
see: doclet.see,
type: doclet.type ? {
names: doclet.type.names,
parsedType: doclet.type.parsedType
} : doclet.type
};
filteredDoclets.push(filteredDoclet);
}
}
// Replace doclets with a filtered array
e.doclets.splice(0, e.doclets.length, ...filteredDoclets);
}
};

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs-forms/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../../sdkjs/pdf/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../../sdkjs/word/apiBuilder.js", "../../../../../sdkjs/slide/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,16 +0,0 @@
{
"source": {
"include": ["../../../../../sdkjs/word/apiBuilder.js"]
},
"plugins": ["./correct_doclets.js"],
"opts": {
"destination": "./out",
"recurse": true,
"encoding": "utf8"
},
"templates": {
"json": {
"pretty": true
}
}
}

View File

@ -1,113 +0,0 @@
import os
import subprocess
import json
import argparse
import re
import platform
script_path = os.path.abspath(__file__)
root = os.path.abspath(os.path.join(os.path.dirname(script_path), '../../../../..'))
# Configuration files
configs = [
"./config/word.json",
"./config/cell.json",
"./config/slide.json",
"./config/forms.json"
]
editors_maps = {
"word": "CDE",
"cell": "CSE",
"slide": "CPE",
"forms": "CFE"
}
def generate(output_dir, md=False):
os.chdir(os.path.dirname(script_path))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Generate JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
command_set_env = "export"
if (platform.system().lower() == "windows"):
command_set_env = "set"
command = f"{command_set_env} EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}"
print(f"Generating {editor_name}.json: {command}")
subprocess.run(command, shell=True)
# Append examples to JSON documentation
for config in configs:
editor_name = config.split('/')[-1].replace('.json', '')
output_file = os.path.join(output_dir, editor_name + ".json")
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as f:
data = json.load(f)
# Modify JSON data
for doclet in data:
if 'see' in doclet:
if doclet['see'] is not None:
if editor_name == 'forms':
doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word')
else:
doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title())
file_path = f'{root}/' + doclet['see'][0]
if os.path.exists(file_path):
with open(file_path, 'r', encoding='utf-8') as see_file:
example_content = see_file.read()
# Extract the first line as a comment if it exists
lines = example_content.split('\n')
if lines[0].startswith('//'):
comment = lines[0] + '\n'
code_content = '\n'.join(lines[1:])
else:
comment = ''
code_content = example_content
if md == True:
doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```"
if md == False:
document_type = editor_name
if "forms" == document_type:
document_type = "pdf"
doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```'
# Write the modified JSON file back
with open(output_file, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print("Documentation generation for builder completed.")
def remove_builder_lines(text):
lines = text.splitlines() # Split text into lines
filtered_lines = [line for line in lines if not line.strip().startswith("builder.")]
return "\n".join(filtered_lines)
def remove_js_comments(text):
# Remove single-line comments, leaving text after //
text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE)
# Remove multi-line comments, leaving text after /*
text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL)
return text.strip()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate documentation")
parser.add_argument(
"destination",
type=str,
help="Destination directory for the generated documentation",
nargs='?', # Indicates the argument is optional
default=f"{root}/office-js-api-declarations/office-js-api"
)
args = parser.parse_args()
generate(args.destination)

Some files were not shown because too many files have changed in this diff Show More