diff --git a/make_package.py b/make_package.py index c5491c9..1577255 100755 --- a/make_package.py +++ b/make_package.py @@ -31,10 +31,11 @@ common.sign = "sign" in args.targets common.deploy = "deploy" in args.targets common.version = args.version if (args.version is not None) else utils.get_env("PRODUCT_VERSION", "1.0.0") common.build = args.build if (args.build is not None) else utils.get_env("BUILD_NUMBER", "1") +common.release_branch = utils.get_env("RELEASE_BRANCH", "experimental") common.branding = args.branding common.timestamp = utils.get_timestamp() -common.summary = {} -common.deploy_list = [] +common.summary = [] +common.deploy_data = [] utils.log("workspace_dir: " + common.workspace_dir) utils.log("os_family: " + common.os_family) utils.log("platform: " + str(common.platform)) @@ -59,17 +60,18 @@ import package_builder # build utils.set_cwd(common.workspace_dir, verbose=True) +utils.delete_file("deploy.json") if "core" in common.targets: package_core.make() if "desktop" in common.targets: package_desktop.make() if "builder" in common.targets: package_builder.make() -if "server-ce" in common.targets: +if "server-community" in common.targets: package_server.make("community") -if "server-ee" in common.targets: +if "server-enterprise" in common.targets: package_server.make("enterprise") -if "server-de" in common.targets: +if "server-developer" in common.targets: package_server.make("developer") # if "mobile" in common.targets: # package_mobile.make() @@ -77,11 +79,11 @@ if "server-de" in common.targets: # summary utils.log_h1("Build summary") exitcode = 0 -for task, rc in common.summary.items(): - if rc == 0: - utils.log("[ OK ] " + task) +for i in common.summary: + if list(i.values())[0]: + utils.log("[ OK ] " + list(i.keys())[0]) else: - utils.log("[FAILED] " + task) + utils.log("[FAILED] " + list(i.keys())[0]) exitcode = 1 exit(exitcode) diff --git a/scripts/package_builder.py b/scripts/package_builder.py index 4201188..46c32ce 100644 --- a/scripts/package_builder.py +++ b/scripts/package_builder.py @@ -15,8 +15,22 @@ def make(): utils.log("Unsupported host OS") return +def aws_s3_upload(local, key, ptype = None): + if common.os_family == "windows": + rc = utils.cmd( + "aws", "s3", "cp", "--acl", "public-read", "--no-progress", + local, "s3://" + common.s3_bucket + "/" + key, + verbose=True + ) + else: + rc = utils.sh("aws s3 cp --acl public-read --no-progress " \ + + local + " s3://" + common.s3_bucket + "/" + key, verbose=True) + if rc == 0 and ptype is not None: + utils.add_deploy_data("builder", ptype, local, key) + return rc + def make_windows(): - global inno_file, zip_file + global inno_file, zip_file, key_prefix utils.set_cwd("document-builder-package") prefix = common.platforms[common.platform]["prefix"] @@ -34,9 +48,11 @@ def make_windows(): suffix = suffixes[common.platform] zip_file = "%s_%s_%s.zip" % (package_name, package_version, suffix) inno_file = "%s_%s_%s.exe" % (package_name, package_version, suffix) + key_prefix = "%s/%s/windows/builder/%s/%s" % (branding.company_name_l, \ + common.release_branch, common.version, common.build) if common.clean: - utils.log_h1("clean") + utils.log_h2("builder clean") utils.delete_dir("build") utils.log_h1("copy arifacts") @@ -52,31 +68,22 @@ def make_windows(): return def make_zip(): - common.summary["builder zip build"] = 1 - utils.log_h1("zip build " + zip_file) + utils.log_h2("builder zip build") + utils.log_h2(zip_file) rc = utils.cmd("7z", "a", "-y", zip_file, ".\\app\\*", chdir="build", creates="build\\" + zip_file, verbose=True) - common.summary["builder zip build"] = rc + utils.set_summary("builder zip build", rc == 0) - # common.summary["zip deploy"] = 1 - # if rc == 0: - # utils.log_h1("zip deploy " + zip_file) - # dest = "s3://" + common.s3_bucket + "/onlyoffice/experimental/windows/builder/" \ - # + common.version + "/" + common.build + "/" - # rc = utils.cmd( - # "aws", "s3", "cp", "--acl", "public-read", "--no-progress", - # "build\\" + zip_file, dest, - # verbose=True - # ) - # common.summary["zip deploy"] = rc + if rc == 0: + utils.log_h2("builder zip deploy") + zip_key = key_prefix + "/" + zip_file + rc = aws_s3_upload("build\\" + zip_file, zip_key, "Portable") + utils.set_summary("builder zip deploy", rc == 0) return def make_inno(): - common.summary["builder inno build"] = 1 - utils.log_h1("inno build " + inno_file) - # if utils.is_file(inno_file): - # utils.log("! file exist, skip") - # return + utils.log_h2("builder inno build") + utils.log_h2(inno_file) args = ["-Version " + common.version, "-Build " + common.build] if not branding.onlyoffice: args.append("-Branding '..\\..\\%s\\document-builder-package\\exe'" % common.branding) @@ -85,34 +92,55 @@ def make_inno(): args.append("-CertName '%s'" % branding.cert_name) rc = utils.ps1(".\\make_inno.ps1", args, creates="build\\" + inno_file, verbose=True) - common.summary["builder inno build"] = rc + utils.set_summary("builder inno build", rc == 0) - # common.summary["inno deploy"] = 1 - # if rc == 0: - # utils.log_h1("inno deploy " + inno_file) - # dest = "s3://" + common.s3_bucket + "/onlyoffice/experimental/windows/builder/" \ - # + common.version + "/" + common.build + "/" - # rc = utils.cmd( - # "aws", "s3", "cp", "--acl", "public-read", "--no-progress", - # "build\\" + inno_file, dest, - # verbose=True - # ) - # common.summary["inno deploy"] = rc + if rc == 0: + utils.log_h2("builder inno deploy") + inno_key = key_prefix + "/" + inno_file + rc = aws_s3_upload("build\\" + inno_file, inno_key, "Installer") + utils.set_summary("builder inno deploy", rc == 0) return def make_linux(): utils.set_cwd("document-builder-package") + utils.log_h2("builder clean") rc = utils.sh("make clean", verbose=True) - common.summary["builder clean"] = rc + utils.set_summary("builder clean", rc == 0) + utils.log_h2("builder build") args = [] if common.platform == "linux_aarch64": args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-builder-package"] rc = utils.sh("make packages " + " ".join(args), verbose=True) - common.summary["builder build"] = rc + utils.set_summary("builder build", rc == 0) + + key_prefix = branding.company_name_l + "/" + common.release_branch + if rc == 0: + utils.log_h2("builder tar deploy") + tar_file = utils.glob_file("tar/*.tar.gz") + tar_key = key_prefix + "/linux/" + utils.get_basename(tar_file) + rc = aws_s3_upload(tar_file, tar_key, "Portable") + utils.set_summary("builder tar deploy", rc == 0) + + utils.log_h2("builder deb deploy") + deb_file = utils.glob_file("deb/*.deb") + deb_key = key_prefix + "/ubuntu/" + utils.get_basename(deb_file) + rc = aws_s3_upload(deb_file, deb_key, "Ubuntu") + utils.set_summary("builder deb deploy", rc == 0) + + utils.log_h2("builder rpm deploy") + rpm_file = utils.glob_file("rpm/**/*.rpm") + rpm_key = key_prefix + "/centos/" + utils.get_basename(rpm_file) + rc = aws_s3_upload(rpm_file, rpm_key, "CentOS") + utils.set_summary("builder rpm deploy", rc == 0) + + else: + utils.set_summary("builder tar deploy", False) + utils.set_summary("builder deb deploy", False) + utils.set_summary("builder rpm deploy", False) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_common.py b/scripts/package_common.py index cdf0d0f..3ddb68c 100644 --- a/scripts/package_common.py +++ b/scripts/package_common.py @@ -1,21 +1,20 @@ #!/usr/bin/env python platforms = { - "windows_x64": { "title": "Windows x64", "prefix": "win_64", "arch": "x64" }, - "windows_x64_xp": { "title": "Windows x64 XP", "prefix": "win_64_xp", "arch": None }, - "windows_x86": { "title": "Windows x86", "prefix": "win_32", "arch": "x86" }, - "windows_x86_xp": { "title": "Windows x86 XP", "prefix": "win_32_xp", "arch": None }, - "darwin_x86_64": { "title": "macOS x86_64", "prefix": "mac_64", "arch": "x64" }, - "darwin_x86_64_v8": { "title": "macOS x86_64 V8", "prefix": "mac_64", "arch": None }, - "darwin_arm64": { "title": "macOS arm64", "prefix": "mac_arm64", "arch": None }, - "linux_x86_64": { "title": "Linux x86_64", "prefix": "linux_64", "arch": "x64" }, - "linux_aarch64": { "title": "Linux aarch64", "prefix": "linux_arm64", "arch": None }, + "windows_x64": { "title": "Windows x64", "prefix": "win_64" }, + "windows_x64_xp": { "title": "Windows x64 XP", "prefix": "win_64_xp" }, + "windows_x86": { "title": "Windows x86", "prefix": "win_32" }, + "windows_x86_xp": { "title": "Windows x86 XP", "prefix": "win_32_xp" }, + "darwin_x86_64": { "title": "macOS x86_64", "prefix": "mac_64" }, + "darwin_x86_64_v8": { "title": "macOS x86_64 V8", "prefix": "mac_64" }, + "darwin_arm64": { "title": "macOS arm64", "prefix": "mac_arm64" }, + "linux_x86_64": { "title": "Linux x86_64", "prefix": "linux_64" }, + "linux_aarch64": { "title": "Linux aarch64", "prefix": "linux_arm64" }, "android": { "title": "Android" } } out_dir = "build_tools/out" -# s3_bucket = "repo-doc-onlyoffice-com" -s3_bucket = "deploytest-static.teamlab.com" +s3_bucket = "repo-doc-onlyoffice-com" s3_region = "eu-west-1" tsa_server = "http://timestamp.digicert.com" vcredist_links = { diff --git a/scripts/package_core.py b/scripts/package_core.py index 5268cfd..c02294f 100644 --- a/scripts/package_core.py +++ b/scripts/package_core.py @@ -17,44 +17,42 @@ def make_core(): prefix = common.platforms[common.platform]["prefix"] company = branding.company_name.lower() repos = { - "windows": "windows", - "darwin": "mac", - "linux": "linux" + "windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build }, + "windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build }, + "darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build }, + "linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build }, } + repo = repos[common.platform] branch = utils.get_env("BRANCH_NAME") - if branch is None: - utils.log("BRANCH_NAME variable is undefined") - return - arch = common.platforms[common.platform]["arch"] - if utils.is_windows(): - version = common.version + "." + common.build - else: - version = common.version + "-" + common.build - src = "build_tools/out/%s/%s/core/core.7z" % (prefix, company) - dest = common.s3_bucket + "/" + repos[common.os_family] + "/core/" \ - + branch + "/%s/" + arch + "/" + core_7z = utils.get_path("build_tools/out/%s/%s/core/core.7z" % (prefix, company)) + dest_version = "%s/core/%s/%s/%s/" % (repo["repo"], branch, repo["version"], repo["arch"]) + dest_latest = "%s/core/%s/%s/%s/" % (repo["repo"], branch, "latest", repo["arch"]) - utils.log_h1("core deploy") - common.summary["core deploy"] = 1 - ret = utils.cmd( + if branch is None: + utils.log_err("BRANCH_NAME variable is undefined") + utils.set_summary("core deploy", False) + return + if not utils.is_file(core_7z): + utils.log_err("core.7z does not exist") + utils.set_summary("core deploy", False) + return + + utils.log_h2("core deploy") + rc = utils.cmd( "aws", "s3", "cp", "--acl", "public-read", "--no-progress", - utils.get_path(src), "s3://" + dest % version, + core_7z, + "s3://" + common.s3_bucket + "/" + dest_version + "core.7z", verbose=True ) - if ret == 0: - common.deploy_list.append({ - "product": "core", - "platform": common.platform, - "section": "Archive", - "path": dest % version + "core.7z", - "size": utils.get_file_size(utils.get_path(src)) - }) - ret = utils.cmd( + if rc == 0: + utils.add_deploy_data("core", "Archive", core_7z, dest_version + "core.7z") + rc = utils.cmd( "aws", "s3", "sync", "--delete", "--acl", "public-read", "--no-progress", - "s3://" + dest % version, "s3://" + dest % "latest", + "s3://" + common.s3_bucket + "/" + dest_version, + "s3://" + common.s3_bucket + "/" + dest_latest, verbose=True ) - common.summary["core deploy"] = ret + utils.set_summary("core deploy", rc == 0) return diff --git a/scripts/package_desktop.py b/scripts/package_desktop.py index 7feda75..a47af82 100644 --- a/scripts/package_desktop.py +++ b/scripts/package_desktop.py @@ -18,13 +18,27 @@ def make(): utils.log("Unsupported host OS") return +def aws_s3_upload(local, key, ptype = None): + if common.os_family == "windows": + rc = utils.cmd( + "aws", "s3", "cp", "--acl", "public-read", "--no-progress", + local, "s3://" + common.s3_bucket + "/" + key, + verbose=True + ) + else: + rc = utils.sh("aws s3 cp --acl public-read --no-progress " \ + + local + " s3://" + common.s3_bucket + "/" + key, verbose=True) + if rc == 0 and ptype is not None: + utils.add_deploy_data("desktop", ptype, local, key) + return rc + # # Windows # def make_windows(): global package_version, iscc_args, source_dir, arch_list, inno_arch_list, \ - inno_file, inno_update_file, msi_file, zip_file + inno_file, inno_update_file, msi_file, zip_file, key_prefix utils.set_cwd("desktop-apps\\win-linux\\package\\windows") prefix = common.platforms[common.platform]["prefix"] @@ -51,6 +65,8 @@ def make_windows(): inno_file = "%s_%s_%s.exe" % (package_name, package_version, suffix) inno_update_file = "update\\editors_update_%s.exe" % suffix msi_file = "%s_%s_%s.msi" % (package_name, package_version, suffix) + key_prefix = "%s/%s/windows/desktop/%s/%s" % (branding.company_name_l, \ + common.release_branch, common.version, common.build) if common.clean: utils.log_h2("desktop clean") @@ -73,9 +89,9 @@ def make_windows(): vc_total = False if not vc_total: - common.summary["desktop inno build"] = 1 - common.summary["desktop inno update build"] = 1 - common.summary["desktop advinst build"] = 1 + utils.set_summary("desktop inno build", False) + utils.set_summary("desktop inno update build", False) + utils.set_summary("desktop advinst build", False) utils.set_cwd(common.workspace_dir) return @@ -92,15 +108,23 @@ def make_windows(): return def make_zip(): - utils.log_h1("desktop zip build") - utils.log_h2(zip_file) - rc = utils.cmd("7z", "a", "-y", zip_file, source_dir + "\\*", - creates=zip_file, verbose=True) - common.summary["desktop zip build"] = rc + utils.log_h2("desktop zip build") + rc = utils.cmd( + "7z", "a", "-y", zip_file, source_dir + "\\*", + creates=zip_file, + verbose=True + ) + utils.set_summary("desktop zip build", rc == 0) + + if rc == 0: + utils.log_h2("desktop zip deploy") + zip_key = key_prefix + "/" + utils.get_basename(zip_file) + rc = aws_s3_upload(zip_file, zip_key, "Portable") + utils.set_summary("desktop zip deploy", rc == 0) return def download_vcredist(year): - utils.log_h1("download vcredist " + year) + utils.log_h2("vcredist " + year + " download") arch = arch_list[common.platform] link = common.vcredist_links[year][arch]["url"] @@ -110,12 +134,12 @@ def download_vcredist(year): utils.log_h2(vcredist_file) utils.create_dir(utils.get_dirname(vcredist_file)) rc = utils.download_file(link, vcredist_file, md5, verbose=True) - common.summary["desktop vcredist download"] = rc + utils.set_summary("vcredist " + year + " download", rc == 0) return rc def make_inno(): global iscc_args - utils.log_h1("innosetup project build") + utils.log_h2("desktop inno build") utils.log_h2(inno_file) iscc_args = [ @@ -137,20 +161,32 @@ def make_inno(): branding.cert_name + "$q /t " + common.tsa_server + " $f") args = ["iscc"] + iscc_args + ["common.iss"] rc = utils.cmd(*args, creates=inno_file, verbose=True) - common.summary["desktop inno build"] = rc + utils.set_summary("desktop inno build", rc == 0) + + if rc == 0: + utils.log_h2("desktop inno deploy") + inno_key = key_prefix + "/" + utils.get_basename(inno_file) + rc = aws_s3_upload(inno_file, inno_key, "Installer") + utils.set_summary("desktop inno deploy", rc == 0) return def make_inno_update(): - utils.log_h1("build innosetup update project") + utils.log_h2("desktop inno update build") utils.log_h2(inno_update_file) args = ["iscc"] + iscc_args + ["/DTARGET_NAME=" + inno_file, "update_common.iss"] rc = utils.cmd(*args, creates=inno_update_file, verbose=True) - common.summary["desktop inno update build"] = rc + utils.set_summary("desktop inno update build", rc == 0) + + if rc == 0: + utils.log_h2("desktop inno update deploy") + inno_update_key = key_prefix + "/" + utils.get_basename(inno_update_file) + rc = aws_s3_upload(inno_update_file, inno_update_key, "WinSparkle") + utils.set_summary("desktop inno update deploy", rc == 0) return def make_winsparkle_files(): - utils.log_h1("winsparkle files build") + utils.log_h2("desktop winsparkle files build") if branding.onlyoffice: awk_branding = "update/branding.awk" @@ -160,6 +196,7 @@ def make_winsparkle_files(): awk_args = [ "-v", "Version=" + common.version, "-v", "Build=" + common.build, + "-v", "Branch=" + common.release_branch, "-v", "Timestamp=" + common.timestamp, "-i", awk_branding ] @@ -176,7 +213,7 @@ def make_winsparkle_files(): args = ["env", "LANG=en_US.UTF-8", "awk", "-v", "Prod=1"] + \ awk_args + ["-f", "update/appcast.xml.awk"] appcast_result = utils.cmd_output(*args, verbose=True) - utils.write_file(appcast, appcast_result) + utils.write_file(appcast_prod, appcast_result) if branding.onlyoffice: changes_dir = "update/changes/" + common.version @@ -198,28 +235,87 @@ def make_winsparkle_files(): utils.write_file(changes, changes_result) else: utils.log("! file not exist: " + changes_file) + + utils.log_h2("desktop winsparkle files deploy") + rc = 0 + + appcast_key = key_prefix + "/" + utils.get_basename(appcast) + rc_appcast = aws_s3_upload(appcast, appcast_key, "WinSparkle") + rc += rc_appcast + + appcast_prod_key = key_prefix + "/" + utils.get_basename(appcast_prod) + rc_appcast_prod = aws_s3_upload(appcast_prod, appcast_prod_key, "WinSparkle") + rc += rc_appcast_prod + + for lang, base in branding.desktop_update_changes_list.items(): + changes_file = "update/%s.html" % base + changes_key = key_prefix + "/" + utils.get_basename(changes_file) + if utils.is_exist(changes_file): + rc_changes = aws_s3_upload(changes_file, changes_key, "WinSparkle") + rc += rc_changes + + utils.set_summary("desktop winsparkle files deploy", rc == 0) return def make_msi(): - utils.log_h1("advanced installer project build") + utils.log_h2("desktop msi build") utils.log_h2(msi_file) arch = arch_list[common.platform] - aic_content = [";aic"] if not branding.onlyoffice: - utils.copy_dir_content("..\\..\\..\\..\\" + common.branding + \ - "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".bmp") - utils.copy_dir_content("..\\..\\..\\..\\" + common.branding + \ - "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".png") + branding_path = common.workspace_dir + "\\" + common.branding + utils.copy_dir_content( + branding_path + "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".bmp") + utils.copy_dir_content( + branding_path + "\\desktop-apps\\win-linux\\package\\windows\\data", "data", ".png") + utils.copy_dir_content( + branding_path + "\\desktop-apps\\win-linux\\extras\\projicons\\res", + "..\\..\\extras\\projicons\\res", ".ico") + utils.copy_file( + branding_path + "\\desktop-apps\\win-linux\\package\\windows\\dictionary.ail", + "dictionary.ail") + utils.copy_file( + branding_path + "\\desktop-apps\\common\\package\\license\\eula_" + common.branding + ".rtf", + "..\\..\\..\\common\\package\\license\\agpl-3.0.rtf") + utils.copy_file( + branding_path + "\\multimedia\\videoplayer\\icons\\" + common.branding + ".ico", + "..\\..\\extras\\projicons\\res\\media.ico") + utils.copy_file( + branding_path + "\\multimedia\\imageviewer\\icons\\ico\\" + common.branding + ".ico", + "..\\..\\extras\\projicons\\res\\gallery.ico") + + aic_content = [";aic"] + if not common.sign: + aic_content += [ + "ResetSig" + ] + if arch == "x86": + aic_content += [ + "SetPackageType x86", + "SetAppdir -buildname DefaultBuild -path [ProgramFilesFolder][MANUFACTURER_INSTALL_FOLDER]\\[PRODUCT_INSTALL_FOLDER]", + 'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x64)"' + ] + if not branding.onlyoffice: + aic_content += [ + 'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x64)"' + ] + if arch == "x64": + aic_content += [ + 'DelPrerequisite "Microsoft Visual C++ 2015-2022 Redistributable (x86)"' + ] + if not branding.onlyoffice: + aic_content += [ + 'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x86)"' + ] + if branding.onlyoffice: + aic_content += [ + 'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x86)"', + 'DelPrerequisite "Microsoft Visual C++ 2013 Redistributable (x64)"', + "DelFolder CUSTOM_PATH" + ] + else: aic_content += [ - "SetProperty ProductName=\"%s\"" % branding.desktop_product_name_full, - "SetProperty Manufacturer=\"%s\"" % branding.publisher_name.replace('"', '""'), - "SetProperty ARPURLINFOABOUT=\"%s\"" % branding.info_about_url, - "SetProperty ARPURLUPDATEINFO=\"%s\"" % branding.update_info_url, - "SetProperty ARPHELPLINK=\"%s\"" % branding.help_url, - "SetProperty ARPHELPTELEPHONE=\"%s\"" % branding.help_phone, - "SetProperty ARPCONTACT=\"%s\"" % branding.publisher_address, "DelLanguage 1029 -buildname DefaultBuild", "DelLanguage 1031 -buildname DefaultBuild", "DelLanguage 1041 -buildname DefaultBuild", @@ -228,13 +324,14 @@ def make_msi(): "DelLanguage 1060 -buildname DefaultBuild", "DelLanguage 1036 -buildname DefaultBuild", "DelLanguage 3082 -buildname DefaultBuild", - "DelLanguage 1033 -buildname DefaultBuild" + "DelLanguage 1033 -buildname DefaultBuild", + "NewSync CUSTOM_PATH " + source_dir + "\\..\\MediaViewer", + "UpdateFile CUSTOM_PATH\\ImageViewer.exe " + source_dir + "\\..\\MediaViewer\\ImageViewer.exe", + "UpdateFile CUSTOM_PATH\\VideoPlayer.exe " + source_dir + "\\..\\MediaViewer\\VideoPlayer.exe" ] - if not common.sign: aic_content.append("ResetSig") - if arch == "x86": aic_content.append("SetPackageType x86") aic_content += [ "AddOsLc -buildname DefaultBuild -arch " + arch, - "NewSync APPDIR " + source_dir + " -existingfiles delete", + "NewSync APPDIR " + source_dir, "UpdateFile APPDIR\\DesktopEditors.exe " + source_dir + "\\DesktopEditors.exe", "SetVersion " + package_version, "SetPackageName " + msi_file + " -buildname DefaultBuild", @@ -242,8 +339,14 @@ def make_msi(): ] utils.write_file("DesktopEditors.aic", "\r\n".join(aic_content), "utf-8-sig") rc = utils.cmd("AdvancedInstaller.com", "/execute", \ - "DesktopEditors.aip", "DesktopEditors.aic", "-nofail", verbose=True) - common.summary["desktop advinst build"] = rc + "DesktopEditors.aip", "DesktopEditors.aic", verbose=True) + utils.set_summary("desktop msi build", rc == 0) + + if rc == 0: + utils.log_h2("desktop msi deploy") + msi_key = key_prefix + "/" + utils.get_basename(msi_file) + rc = aws_s3_upload(msi_file, msi_key, "Installer") + utils.set_summary("desktop msi deploy", rc == 0) return # @@ -252,7 +355,7 @@ def make_msi(): def make_macos(): global package_name, build_dir, branding_dir, updates_dir, changes_dir, \ - update_changes_list, suffix, lane, scheme + update_changes_list, suffix, lane, scheme, key_prefix package_name = branding.desktop_package_name build_dir = branding.desktop_build_dir branding_dir = branding.desktop_branding_dir @@ -260,30 +363,37 @@ def make_macos(): changes_dir = branding.desktop_changes_dir update_changes_list = branding.desktop_update_changes_list suffixes = { - "macos_x86_64": "x86_64", + "macos_x86_64": "x86_64", "macos_x86_64_v8": "v8", - "macos_aarch64": "aarch64" + "macos_arm64": "arm64" } suffix = suffixes[common.platform] lane = "release_" + suffix scheme = package_name + "-" + suffix + key_prefix = "%s/%s/macos/%s/%s/%s" % (branding.company_name_l, \ + common.release_branch, suffix, common.version, common.build) utils.set_cwd(build_dir) - # utils.sh_output(" \ - # url=" + branding.sparkle_base_url + "/" + suffix + "/onlyoffice.xml; \ - # appcast=$(curl -s $url 2> /dev/null); \ - # path=desktop-apps/macos/ONLYOFFICE/Resources/ONLYOFFICE-" + suffix + "/Info.plist; \ - # echo -n \"RELEASE_MACOS_VERSION=\"; \ - # echo $appcast | xmllint --xpath \"/rss/channel/item[1]/enclosure/@*[name()='sparkle:shortVersionString']\" - | cut -f 2 -d \\\\\"; \ - # echo -n \"RELEASE_MACOS_BUILD=\"; \ - # echo $appcast | xmllint --xpath \"/rss/channel/item[1]/enclosure/@*[name()='sparkle:version']\" - | cut -f 2 -d \\\\\"; \ - # echo -n \"CURRENT_MACOS_VERSION=\"; \ - # /usr/libexec/PlistBuddy -c 'print :CFBundleShortVersionString' $path; \ - # echo -n \"CURRENT_MACOS_BUILD=\"; \ - # /usr/libexec/PlistBuddy -c 'print :CFBundleVersion' $path", - # verbose=True - # ) + script = ''' + appcast=$(curl -s ''' + branding.sparkle_base_url + '''/''' + suffix + '''/onlyoffice.xml 2> /dev/null) + echo -n \"RELEASE_MACOS_VERSION=\" + echo $appcast \ + | xmllint --xpath \"/rss/channel/item[1]/enclosure/@*[name()='sparkle:shortVersionString']\" - \ + | cut -f 2 -d \\\\\" + echo -n \"RELEASE_MACOS_BUILD=\" + echo $appcast \ + | xmllint --xpath \"/rss/channel/item[1]/enclosure/@*[name()='sparkle:version']\" - \ + | cut -f 2 -d \\\\\" + + path=desktop-apps/macos/ONLYOFFICE/Resources/ONLYOFFICE-''' + suffix + '''/Info.plist + echo -n \"CURRENT_MACOS_VERSION=\" + /usr/libexec/PlistBuddy -c 'print :CFBundleShortVersionString' $path + echo -n \"CURRENT_MACOS_BUILD=\" + /usr/libexec/PlistBuddy -c 'print :CFBundleVersion' $path + ''' + utils.sh_output(script, verbose=True) + make_dmg() # if : make_sparkle_updates() @@ -292,15 +402,25 @@ def make_macos(): return def make_dmg(): - utils.log_h1(scheme + " build") + utils.log_h2("desktop dmg build") + utils.log_h2(scheme) utils.log_h2("build/" + package_name + ".app") - rc = utils.sh("bundler exec fastlane " + lane + \ - " git_bump:false notarization:false", verbose=True) - common.summary["desktop build"] = rc + rc = utils.sh( + "bundler exec fastlane " + lane + " git_bump:false", + verbose=True + ) + utils.set_summary("desktop dmg build", rc == 0) + + if rc == 0: + utils.log_h2("desktop dmg deploy") + dmg_file = utils.glob_file("build/*.dmg") + dmg_key = key_prefix + "/" + utils.get_basename(dmg_file) + rc = aws_s3_upload(dmg_file, dmg_key, "Disk Image") + utils.set_summary("desktop msi deploy", rc == 0) return def make_sparkle_updates(): - utils.log_h1("sparkle updates build") + utils.log_h2("desktop sparkle files build") app_version = utils.sh_output("/usr/libexec/PlistBuddy \ -c 'print :CFBundleShortVersionString' \ @@ -332,7 +452,7 @@ def make_sparkle_updates(): utils.sh(common.workspace_dir + \ "/desktop-apps/macos/Vendor/Sparkle/bin/generate_appcast " + updates_dir) - utils.log_h1("edit sparkle appcast links") + utils.log_h3("edit sparkle appcast links") appcast_url = sparkle_base_url + "/" + suffix appcast = "%s/%s.xml" % (updates_dir, package_name.lower()) @@ -351,11 +471,45 @@ def make_sparkle_updates(): r"(url=\")(?:.+/)(" + package_name + ".+\")", "\\1" + appcast_url + "/updates/\\2") - utils.log_h1("delete unnecessary files") + utils.log_h3("delete unnecessary files") for file in os.listdir(updates_dir): if (-1 == file.find(app_version)) and (file.endswith(".zip") or file.endswith(".html")): utils.delete_file(updates_dir + '/' + file) + + utils.log_h3("generate checksums") + utils.sh( + "md5 *.zip *.delta > md5sums.txt && " \ + + "shasum -a 256 *.zip *.delta > sha256sums.txt", + chdir="build/update", + verbose=True + ) + + utils.log_h2("desktop sparkle files deploy") + rc = 0 + + zip_key = key_prefix + "/" + utils.get_basename(macos_zip) + rc_zip = aws_s3_upload(macos_zip, zip_key, "Sparkle") + rc += rc_zip + + for path in utils.glob_files("build/update/*.delta") \ + + utils.glob_files("build/update/*.xml") \ + + utils.glob_files("build/update/*.html"): + sparkle_key = key_prefix + "/" + utils.get_basename(path) + rc_sparkle = aws_s3_upload(path, sparkle_key, "Sparkle") + rc += rc_sparkle + + utils.set_summary("desktop sparkle files deploy", rc == 0) + + utils.log_h2("desktop checksums deploy") + rc = 0 + + for path in utils.glob_files("build/update/*.txt"): + checksums_key = key_prefix + "/" + utils.get_basename(path) + rc_checksums = aws_s3_upload(path, checksums_key, "Checksums") + rc += rc_checksums + + utils.set_summary("desktop checksums deploy", rc == 0) return # @@ -366,7 +520,7 @@ def make_linux(): utils.set_cwd("desktop-apps/win-linux/package/linux") rc = utils.sh("make clean", verbose=True) - common.summary["desktop clean"] = rc + utils.set_summary("desktop clean", rc == 0) args = [] if common.platform == "linux_aarch64": @@ -374,7 +528,62 @@ def make_linux(): if not branding.onlyoffice: args += ["-e", "BRANDING_DIR=../../../../" + common.branding + "/desktop-apps/win-linux/package/linux"] rc = utils.sh("make packages " + " ".join(args), verbose=True) - common.summary["desktop build"] = rc + utils.set_summary("desktop build", rc == 0) + + key_prefix = branding.company_name_l + "/" + common.release_branch + if rc == 0: + utils.log_h2("desktop tar deploy") + tar_file = utils.glob_file("tar/**/*.tar.gz") + tar_key = key_prefix + "/linux/" + utils.get_basename(tar_file) + rc = aws_s3_upload(tar_file, tar_key, "Portable") + utils.set_summary("desktop tar deploy", rc == 0) + + utils.log_h2("desktop deb deploy") + deb_file = utils.glob_file("deb/*.deb") + deb_key = key_prefix + "/ubuntu/" + utils.get_basename(deb_file) + rc = aws_s3_upload(deb_file, deb_key, "Ubuntu") + utils.set_summary("desktop deb deploy", rc == 0) + + utils.log_h2("desktop rpm deploy") + rpm_file = utils.glob_file("rpm/**/*.rpm") + rpm_key = key_prefix + "/centos/" + utils.get_basename(rpm_file) + rc = aws_s3_upload(rpm_file, rpm_key, "CentOS") + utils.set_summary("desktop rpm deploy", rc == 0) + + utils.log_h2("desktop apt-rpm deploy") + apt_rpm_file = utils.glob_file("apt-rpm/**/*.rpm") + apt_rpm_key = key_prefix + "/altlinux/" + utils.get_basename(apt_rpm_file) + rc = aws_s3_upload(apt_rpm_file, apt_rpm_key, "AltLinux") + utils.set_summary("desktop apt-rpm deploy", rc == 0) + + utils.log_h2("desktop urpmi deploy") + urpmi_file = utils.glob_file("urpmi/**/*.rpm") + urpmi_key = key_prefix + "/rosa/" + utils.get_basename(urpmi_file) + rc = aws_s3_upload(urpmi_file, urpmi_key, "Rosa") + utils.set_summary("desktop urpmi deploy", rc == 0) + + utils.log_h2("desktop suse-rpm deploy") + suse_rpm_file = utils.glob_file("suse-rpm/**/*.rpm") + suse_rpm_key = key_prefix + "/suse/" + utils.get_basename(suse_rpm_file) + rc = aws_s3_upload(suse_rpm_file, suse_rpm_key, "SUSE Linux") + utils.set_summary("desktop suse-rpm deploy", rc == 0) + + if not branding.onlyoffice: + utils.log_h2("desktop deb-astra deploy") + deb_astra_file = utils.glob_file("deb-astra/*.deb") + deb_astra_key = key_prefix + "/" + utils.get_basename(deb_astra_file) + rc = aws_s3_upload(deb_astra_file, deb_astra_key, "AstraLinux Signed") + utils.set_summary("desktop deb-astra deploy", rc == 0) + + else: + utils.set_summary("desktop tar deploy", False) + utils.set_summary("desktop deb deploy", False) + utils.set_summary("desktop rpm deploy", False) + utils.set_summary("desktop apt-rpm deploy", False) + utils.set_summary("desktop urpmi deploy", False) + utils.set_summary("desktop suse-rpm deploy", False) + if not branding.onlyoffice: + utils.set_summary("desktop deb-astra deploy", False) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_server.py b/scripts/package_server.py index e3c164b..55182d2 100644 --- a/scripts/package_server.py +++ b/scripts/package_server.py @@ -14,6 +14,20 @@ def make(edition): utils.log("Unsupported host OS") return +def aws_s3_upload(local, key, edition, ptype = None): + if common.os_family == "windows": + rc = utils.cmd( + "aws", "s3", "cp", "--acl", "public-read", "--no-progress", + local, "s3://" + common.s3_bucket + "/" + key, + verbose=True + ) + else: + rc = utils.sh("aws s3 cp --acl public-read --no-progress " \ + + local + " s3://" + common.s3_bucket + "/" + key, verbose=True) + if rc == 0 and ptype is not None: + utils.add_deploy_data("server_" + edition, ptype, local, key) + return rc + def make_windows(edition): if edition == "enterprise": product_name = "DocumentServer-EE" @@ -23,14 +37,25 @@ def make_windows(edition): product_name = "DocumentServer" utils.set_cwd("document-server-package") + utils.log_h2("server " + edition + " clean") rc = utils.cmd("make", "clean", verbose=True) - common.summary["server " + edition + " clean"] = rc + utils.set_summary("server " + edition + " clean", rc == 0) + utils.log_h2("server " + edition + " build") args = ["-e", "PRODUCT_NAME=" + product_name] if not branding.onlyoffice: args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"] rc = utils.cmd("make", "packages", *args, verbose=True) - common.summary["server " + edition + " build"] = rc + utils.set_summary("server " + edition + " build", rc == 0) + + key_prefix = "%s/%s/windows/server/%s/%s" % (branding.company_name_l, \ + common.release_branch, common.version, common.build) + if rc == 0: + utils.log_h2("server " + edition + " inno deploy") + inno_file = utils.glob_file("exe/*.exe") + inno_key = key_prefix + "/" + utils.get_basename(inno_file) + rc = aws_s3_upload(inno_file, inno_key, edition, "Installer") + utils.set_summary("server " + edition + " inno deploy", rc == 0) utils.set_cwd(common.workspace_dir) return @@ -44,16 +69,50 @@ def make_linux(edition): product_name = "documentserver" utils.set_cwd("document-server-package") + utils.log_h2("server " + edition + " clean") rc = utils.sh("make clean", verbose=True) - common.summary["server " + edition + " clean"] = rc + utils.set_summary("server " + edition + " clean", rc == 0) + utils.log_h2("server " + edition + " build") args = ["-e", "PRODUCT_NAME=" + product_name] if common.platform == "linux_aarch64": args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: args += ["-e", "BRANDING_DIR=../" + common.branding + "/document-server-package"] rc = utils.sh("make packages " + " ".join(args), verbose=True) - common.summary["server " + edition + " build"] = rc + utils.set_summary("server " + edition + " build", rc == 0) + + key_prefix = branding.company_name_l + "/" + common.release_branch + if rc == 0: + utils.log_h2("server " + edition + " tar deploy") + tar_file = utils.glob_file("*.tar.gz") + tar_key = key_prefix + "/linux/" + utils.get_basename(tar_file) + rc = aws_s3_upload(tar_file, tar_key, edition, "Portable") + utils.set_summary("server " + edition + " tar deploy", rc == 0) + + utils.log_h2("server " + edition + " deb deploy") + deb_file = utils.glob_file("deb/*.deb") + deb_key = key_prefix + "/ubuntu/" + utils.get_basename(deb_file) + rc = aws_s3_upload(deb_file, deb_key, edition, "Ubuntu") + utils.set_summary("server " + edition + " deb deploy", rc == 0) + + utils.log_h2("server " + edition + " rpm deploy") + rpm_file = utils.glob_file("rpm/**/*.rpm") + rpm_key = key_prefix + "/centos/" + utils.get_basename(rpm_file) + rc = aws_s3_upload(rpm_file, rpm_key, edition, "CentOS") + utils.set_summary("server " + edition + " rpm deploy", rc == 0) + + utils.log_h2("server " + edition + " apt-rpm deploy") + alt_rpm_file = utils.glob_file("apt-rpm/**/*.rpm") + alt_rpm_key = key_prefix + "/altlinux/" + utils.get_basename(alt_rpm_file) + rc = aws_s3_upload(alt_rpm_file, alt_rpm_key, edition, "AltLinux") + utils.set_summary("server " + edition + " apt-rpm deploy", rc == 0) + + else: + utils.set_summary("server " + edition + " tar deploy", False) + utils.set_summary("server " + edition + " deb deploy", False) + utils.set_summary("server " + edition + " rpm deploy", False) + utils.set_summary("server " + edition + " alt-rpm deploy", False) utils.set_cwd(common.workspace_dir) return \ No newline at end of file diff --git a/scripts/package_utils.py b/scripts/package_utils.py index 58a6df2..2ce5da0 100644 --- a/scripts/package_utils.py +++ b/scripts/package_utils.py @@ -4,6 +4,7 @@ import codecs import glob import hashlib +import json import os import platform import re @@ -11,6 +12,7 @@ import shutil import subprocess import sys import time +import package_common as common def host_platform(): return platform.system().lower() @@ -30,12 +32,20 @@ def log(string, end='\n'): return def log_h1(string): - line = "-" * (len(string) + 8) - log("\n" + line + "\n--- " + string + " ---\n" + line + "\n") + line = "#" * (len(string) + 8) + log("\n" + line + "\n### " + string + " ###\n" + line + "\n") return def log_h2(string): - log("--- " + string) + log("\n### " + string + "\n") + return + +def log_h3(string): + log("# " + string) + return + +def log_err(string): + log("!!! " + string) return def get_timestamp(): @@ -53,7 +63,8 @@ def get_cwd(): def set_cwd(path, verbose=True): if verbose: - log_h2("change working dir: " + path) + log_h3("change working dir:") + log_h3(" path: " + path) os.chdir(path) return @@ -65,6 +76,9 @@ def get_path(path): def get_abspath(path): return os.path.abspath(get_path(path)) +def get_basename(path): + return os.path.basename(path) + def get_dirname(path): return os.path.dirname(path) @@ -85,6 +99,14 @@ def is_exist(path): return True return False +def glob_path(path): + return glob.glob(path) + +def glob_file(path): + if glob.glob(path) and is_file(glob.glob(path)[0]): + return glob.glob(path)[0] + return + def get_md5(path): if os.path.exists(path): md5_hash = hashlib.md5() @@ -92,122 +114,173 @@ def get_md5(path): return md5_hash.hexdigest() return -def create_dir(path): - log("- create dir: " + path) +def create_dir(path, verbose=True): + if verbose: + log_h3("create_dir:") + log_h3(" path:" + path) if not is_exist(path): os.makedirs(path) else: - log("! dir exist") + log_err("dir exist") return -def write_file(path, data, encoding='utf-8'): +def write_file(path, data, encoding='utf-8', verbose=True): if is_file(path): delete_file(path) - log("- write file: " + path) + if verbose: + log_h3("write_file:") + log_h3(" path: " + path) + log_h3(" encoding: " + encoding) + log_h3(" data: |\n" + data) with codecs.open(path, 'w', encoding) as file: file.write(data) return -def write_template(src, dst, encoding='utf-8', **kwargs): - template = Template(open(src).read()) - if is_file(dst): - os.remove(dst) - log("- write template: " + dst + " < " + src) - with codecs.open(dst, 'w', encoding) as file: - file.write(template.render(**kwargs)) - return - -def replace_in_file(path, pattern, textReplace, encoding='utf-8'): - log("- replace in file: " + path + \ - "\n pattern: " + pattern + \ - "\n replace: " + textReplace) - filedata = "" +def replace_in_file(path, pattern, text_replace, encoding='utf-8', verbose=True): + if verbose: + log_h3("replace_in_file:") + log_h3(" path: " + path) + log_h3(" pattern: " + pattern) + log_h3(" replace: " + text_replace) + log_h3(" encoding: " + encoding) + file_data = "" with codecs.open(get_path(path), "r", encoding) as file: - filedata = file.read() - filedata = re.sub(pattern, textReplace, filedata) + file_data = file.read() + file_data = re.sub(pattern, text_replace, file_data) delete_file(path) with codecs.open(get_path(path), "w", encoding) as file: - file.write(filedata) + file.write(file_data) return -def copy_file(src, dst): - log("- copy file: " + dst + " < " + src) +def copy_file(src, dst, verbose=True): + if verbose: + log_h3("copy_file:") + log_h3(" src: " + src) + log_h3(" dst: " + dst) if is_file(dst): delete_file(dst) if not is_file(src): - log("! file not exist: " + src) + log_err("file not exist: " + src) return return shutil.copy2(get_path(src), get_path(dst)) -def copy_files(src, dst, override=True): - log("- copy files: " + dst + " < " + src) +def copy_files(src, dst, override=True, verbose=True): + if verbose: + log_h3("copy_files:") + log_h3(" src: " + src) + log_h3(" dst: " + dst) + log_h3(" override: " + str(override)) for file in glob.glob(src): file_name = os.path.basename(file) if is_file(file): if override and is_file(dst + "/" + file_name): delete_file(dst + "/" + file_name) if not is_file(dst + "/" + file_name): - copy_file(file, dst) + if verbose: + log(file + " : " + get_path(dst)) + shutil.copy2(file, get_path(dst)) elif is_dir(file): if not is_dir(dst + "/" + file_name): create_dir(dst + "/" + file_name) copy_files(file + "/*", dst + "/" + file_name, override) return -def copy_dir(src, dst): +def copy_dir(src, dst, override=True, verbose=True): + if verbose: + log_h3("copy_dir:") + log_h3(" src: " + src) + log_h3(" dst: " + dst) + log_h3(" override: " + str(override)) if is_dir(dst): delete_dir(dst) try: shutil.copytree(get_path(src), get_path(dst)) except OSError as e: - log('! Directory not copied. Error: %s' % e) + log_err('directory not copied. Error: %s' % e) return -def copy_dir_content(src, dst, filterInclude = "", filterExclude = ""): - log("- copy dir content: " + src + " " + dst + " " + filterInclude + " " + filterExclude) +def copy_dir_content(src, dst, filter_include = "", filter_exclude = "", verbose=True): + if verbose: + log_h3("copy_dir_content:") + log_h3(" src: " + src) + log_h3(" dst: " + dst) + log_h3(" include: " + filter_include) + log_h3(" exclude: " + filter_exclude) src_folder = src if ("/" != src[-1:]): src_folder += "/" src_folder += "*" for file in glob.glob(src_folder): basename = os.path.basename(file) - if ("" != filterInclude) and (-1 == basename.find(filterInclude)): + if ("" != filter_include) and (-1 == basename.find(filter_include)): continue - if ("" != filterExclude) and (-1 != basename.find(filterExclude)): + if ("" != filter_exclude) and (-1 != basename.find(filter_exclude)): continue if is_file(file): - copy_file(file, dst) + copy_file(file, dst, verbose=False) elif is_dir(file): copy_dir(file, dst + "/" + basename) return -def delete_file(path): - log("- delete file: " + path) +def delete_file(path, verbose=True): + if verbose: + log_h3("delete_file:") + log_h3(" path: " + path) if not is_file(path): - log("! file not exist") + log_err("file not exist") return return os.remove(path) -def delete_dir(path): - log("- delete dir: " + path) +def delete_dir(path, verbose=True): + if verbose: + log_h3("delete_dir:") + log_h3(" path: " + path) if not is_dir(path): - log("! dir not exist") + log_err("dir not exist") return shutil.rmtree(path, ignore_errors=True) return -def delete_files(src): +def delete_files(src, verbose=True): + if verbose: + log_h3("delete_files:") + log_h3(" pattern: " + src) for path in glob.glob(src): + if verbose: + log(path) if is_file(path): - delete_file(path) + os.remove(path) elif is_dir(path): - delete_dir(path) + shutil.rmtree(path, ignore_errors=True) + return + +def set_summary(target, status): + common.summary.append({target: status}) + return + +def add_deploy_data(product, ptype, src, dst): + common.deploy_data.append({ + "platform": common.platforms[common.platform]["title"], + "product": product, + "type": ptype, + # "local": get_path(src), + "size": get_file_size(get_path(src)), + "remote": dst + }) + f = open(get_path(common.workspace_dir + "/deploy.json"), "wb") + f.write(json.dumps(common.deploy_data, sort_keys=True, indent=4)) return def cmd(*args, **kwargs): if kwargs.get("verbose"): - log_h2("cmd: " + " ".join(args)) + log_h3("cmd:") + log_h3(" command: " + " ".join(args)) + if kwargs.get("chdir"): + log_h3(" chdir: " + kwargs["chdir"]) + if kwargs.get("creates"): + log_h3(" creates: " + kwargs["creates"]) if kwargs.get("creates") and is_exist(kwargs["creates"]): + log_err("creates exist") return 0 if kwargs.get("chdir") and is_dir(kwargs["chdir"]): oldcwd = get_cwd() @@ -221,14 +294,20 @@ def cmd(*args, **kwargs): def cmd_output(*args, **kwargs): if kwargs.get("verbose"): - log_h2("cmd output: " + " ".join(args)) + log_h3("cmd_output:") + log_h3(" command: " + " ".join(args)) return subprocess.check_output( [i for i in args], stderr=subprocess.STDOUT, shell=True ).decode("utf-8") def powershell(*args, **kwargs): if kwargs.get("verbose"): - log_h2("powershell: " + " ".join(args)) + log_h3("powershell:") + log_h3(" command: " + " ".join(args)) + if kwargs.get("chdir"): + log_h3(" chdir: " + kwargs["chdir"]) + if kwargs.get("creates"): + log_h3(" creates: " + kwargs["creates"]) if kwargs.get("creates") and is_exist(kwargs["creates"]): return 0 args = ["powershell", "-Command"] + [i for i in args] @@ -249,29 +328,38 @@ def ps1(file, args=[], **kwargs): def download_file(url, path, md5, verbose=False): if verbose: - log("download file: %s < %s (%s)" % (path, url, md5)) + log_h3("download_file:") + log_h3(" url: " + path) + log_h3(" path: " + url) + log_h3(" md5: " + md5) if is_file(path): if get_md5(path) == md5: - log("! file already exist (match checksum)") + log_err("file already exist (match checksum)") return 0 else: - log("! wrong checksum (%s), delete" % md5) + log_err("wrong checksum (%s), delete" % md5) os.remove(path) ret = powershell("(New-Object System.Net.WebClient).DownloadFile('%s','%s')" % (url, path), verbose=True) md5_new = get_md5(path) if md5 != md5_new: - log("! checksum didn't match (%s != %s)" % (md5, md5_new)) + log_err("checksum didn't match (%s != %s)" % (md5, md5_new)) return 1 return ret def sh(command, **kwargs): if kwargs.get("verbose"): - log_h2("sh: " + command) + log_h3("sh:") + log_h3(" command: " + command) + if kwargs.get("chdir"): + log_h3(" chdir: " + kwargs["chdir"]) + if kwargs.get("creates"): + log_h3(" creates: " + kwargs["creates"]) return subprocess.call(command, stderr=subprocess.STDOUT, shell=True) def sh_output(command, **kwargs): if kwargs.get("verbose"): - log_h2("sh output: " + command) + log_h3("sh_output:") + log_h3(" command: " + command) return subprocess.check_output( command, stderr=subprocess.STDOUT, shell=True ).decode("utf-8")