diff --git a/.gitignore b/.gitignore index 37983f69..fa6a77f0 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ tests/puppeteer/node_modules tests/puppeteer/work_directory tests/puppeteer/package.json tests/puppeteer/package-lock.json +scripts/sdkjs_common/jsdoc/node_modules +scripts/sdkjs_common/jsdoc/package-lock.json diff --git a/make_package.py b/make_package.py index c65cc864..238b0b50 100755 --- a/make_package.py +++ b/make_package.py @@ -10,15 +10,17 @@ # parse parser = argparse.ArgumentParser(description="Build packages.") parser.add_argument("-P", "--platform", dest="platform", type=str, - action="store", help="Defines platform", required=True) -parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+", - action="store", help="Defines targets", required=True) -parser.add_argument("-R", "--branding", dest="branding", type=str, - action="store", help="Provides branding path") + action="store", help="Defines platform", required=True) +parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+", + action="store", help="Defines targets", required=True) parser.add_argument("-V", "--version", dest="version", type=str, - action="store", help="Defines version") + action="store", help="Defines version") parser.add_argument("-B", "--build", dest="build", type=str, - action="store", help="Defines build") + action="store", help="Defines build") +parser.add_argument("-H", "--branch", dest="branch", type=str, + action="store", help="Defines branch") +parser.add_argument("-R", "--branding", dest="branding", type=str, + action="store", help="Provides branding path") args = parser.parse_args() # vars @@ -29,8 +31,16 @@ common.clean = "clean" in args.targets common.sign = "sign" in args.targets common.deploy = "deploy" in args.targets -common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0") -common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0") +if args.version: common.version = args.version +else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0") +utils.set_env("PRODUCT_VERSION", common.version) +utils.set_env("BUILD_VERSION", common.version) +if args.build: common.build = args.build +else: common.build = utils.get_env("BUILD_NUMBER", "0") +utils.set_env("BUILD_NUMBER", common.build) +if args.branch: common.branch = args.branch +else: common.branch = utils.get_env("BRANCH_NAME", "null") +utils.set_env("BRANCH_NAME", common.branch) common.branding = args.branding common.timestamp = utils.get_timestamp() common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..") diff --git a/scripts/base.py b/scripts/base.py index afcc7789..86df1472 100644 --- a/scripts/base.py +++ b/scripts/base.py @@ -384,7 +384,7 @@ def cmd2(prog, args=[], is_no_errors=False): sys.exit("Error (" + prog + "): " + str(ret)) return ret -def cmd_exe(prog, args): +def cmd_exe(prog, args, is_no_errors=False): prog_dir = os.path.dirname(prog) env_dir = os.environ if ("linux" == host_platform()): @@ -406,7 +406,7 @@ def cmd_exe(prog, args): command += (" \"" + arg + "\"") process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir) ret = process.wait() - if ret != 0: + if ret != 0 and True != is_no_errors: sys.exit("Error (" + prog + "): " + str(ret)) return ret @@ -426,12 +426,13 @@ def cmd_and_return_cwd(prog, args=[], is_no_errors=False): def run_command(sCommand): popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - result = {'stdout' : '', 'stderr' : ''} + result = {'stdout' : '', 'stderr' : '', 'returncode' : 0} try: stdout, stderr = popen.communicate() popen.wait() result['stdout'] = stdout.strip().decode('utf-8', errors='ignore') result['stderr'] = stderr.strip().decode('utf-8', errors='ignore') + result['returncode'] = popen.returncode finally: popen.stdout.close() popen.stderr.close() @@ -1046,15 +1047,15 @@ def web_apps_addons_param(): def download(url, dst): return cmd_exe("curl", ["-L", "-o", dst, url]) -def extract(src, dst): +def extract(src, dst, is_no_errors=False): app = "7za" if ("mac" == host_platform()) else "7z" - return cmd_exe(app, ["x", "-y", src, "-o" + dst]) + return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors) -def extract_unicode(src, dst): +def extract_unicode(src, dst, is_no_errors=False): if "windows" == host_platform(): run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"]) return - return extract(src, dst) + return extract(src, dst, is_no_errors) def archive_folder(src, dst): app = "7za" if ("mac" == host_platform()) else "7z" diff --git a/scripts/build_js.py b/scripts/build_js.py index 0b24fa3d..25a2c729 100644 --- a/scripts/build_js.py +++ b/scripts/build_js.py @@ -146,15 +146,25 @@ def build_sdk_native(directory, minimize=True): _run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + addons) return -def build_js_develop(root_dir): - #_run_npm_cli(root_dir + "/sdkjs/build") + +def build_sdkjs_develop(root_dir): external_folder = config.option("--external-folder") if (external_folder != ""): external_folder = "/" + external_folder - + _run_npm_ci(root_dir + external_folder + "/sdkjs/build") _run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param()) _run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param()) + + +def build_js_develop(root_dir): + #_run_npm_cli(root_dir + "/sdkjs/build") + external_folder = config.option("--external-folder") + if (external_folder != ""): + external_folder = "/" + external_folder + + build_sdkjs_develop(root_dir) + _run_npm(root_dir + external_folder + "/web-apps/build") _run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites") _run_grunt(root_dir + external_folder + "/web-apps/build/sprites", []) diff --git a/scripts/deploy_core.py b/scripts/deploy_core.py index 7a3d974b..9f7ad71c 100644 --- a/scripts/deploy_core.py +++ b/scripts/deploy_core.py @@ -39,6 +39,7 @@ def make(): base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File") base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile") base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer") + base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell") base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t") @@ -61,13 +62,9 @@ def make(): base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester") + base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester") # dictionaries base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False) - - if base.is_file(archive_dir + ".7z"): - base.delete_file(archive_dir + ".7z") - base.archive_folder(archive_dir + "/*", archive_dir + ".7z") - return diff --git a/scripts/develop/config_server.py b/scripts/develop/config_server.py index 425b7c6a..f2eb67e6 100644 --- a/scripts/develop/config_server.py +++ b/scripts/develop/config_server.py @@ -5,8 +5,9 @@ import os import json -def get_core_url(arch, branch): - return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z" +def get_core_url(platform, branch): + return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/archive/" \ + + branch + "/latest/core-" + platform.replace("_", "-") + ".7z" def make(): git_dir = base.get_script_dir() + "/../.." @@ -20,14 +21,21 @@ def make(): arch = "x64" arch2 = "_64" - if ("windows" == base.host_platform()) and not base.host_platform_is64(): + if base.is_windows() and not base.host_platform_is64(): arch = "x86" arch2 = "_32" + if base.is_os_arm(): + arch2 = "_arm64" + platform = "" + if base.is_windows(): + platform = "win" + arch2 + else: + platform = base.host_platform() + arch2 - url = get_core_url(arch, config.option("branch")) + url = get_core_url(platform, config.option("branch")) data_url = base.get_file_last_modified_url(url) if (data_url == "" and config.option("branch") != "develop"): - url = get_core_url(arch, "develop") + url = get_core_url(platform, "develop") data_url = base.get_file_last_modified_url(url) old_data_url = base.readFile("./core.7z.data") @@ -49,12 +57,6 @@ def make(): base.extract("./core.7z", "./") base.writeFile("./core.7z.data", data_url) - platform = "" - if ("windows" == base.host_platform()): - platform = "win" + arch2 - else: - platform = base.host_platform() + arch2 - base.copy_files("./core/*", "./") else: print("-----------------------------------------------------------") diff --git a/scripts/develop/run_server.py b/scripts/develop/run_server.py index f841f4d4..c7428880 100644 --- a/scripts/develop/run_server.py +++ b/scripts/develop/run_server.py @@ -49,7 +49,18 @@ def run_integration_example(): def start_linux_services(): base.print_info('Restart MySQL Server') - + + +def update_config(args): + platform = base.host_platform() + branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout'] + + if ("linux" == platform): + base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args) + else: + base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args) + + def make_start(): base.configure_common_apps() @@ -64,15 +75,8 @@ def make_start(): start_linux_services() def make_configure(args): - platform = base.host_platform() - branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout'] - base.print_info('Build modules') - if ("linux" == platform): - base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args) - else: - base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-name', 'onlyoffice', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args) - + update_config(args) base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py']) def make_install(): platform = base.host_platform() diff --git a/scripts/package_branding.py b/scripts/package_branding.py index ec902e42..5a838955 100644 --- a/scripts/package_branding.py +++ b/scripts/package_branding.py @@ -30,6 +30,59 @@ builder_product_name = "Document Builder" if utils.is_linux(): - desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"] - builder_make_targets = ["deb", "rpm"] # tar - server_make_targets = ["deb", "rpm", "tar"] + builder_make_targets = [ + { + "make": "tar", + "src": "tar/*.tar*", + "dst": "builder/linux/generic/" + }, + { + "make": "deb", + "src": "deb/*.deb", + "dst": "builder/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/build/RPMS/*/*.rpm", + "dst": "builder/linux/rhel/" + } + ] + desktop_make_targets = [ + { + "make": "tar", + "src": "tar/*.tar*", + "dst": "desktop/linux/generic/" + }, + { + "make": "deb", + "src": "deb/*.deb", + "dst": "desktop/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/build/RPMS/*/*.rpm", + "dst": "desktop/linux/rhel/" + }, + { + "make": "rpm-suse", + "src": "rpm-suse/build/RPMS/*/*.rpm", + "dst": "desktop/linux/suse/" + } + ] + server_make_targets = [ + { + "make": "deb", + "src": "deb/*.deb", + "dst": "server/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/builddir/RPMS/*/*.rpm", + "dst": "server/linux/rhel/" + }, + { + "make": "tar", + "src": "*.tar*", + "dst": "server/linux/snap/" + } + ] diff --git a/scripts/package_builder.py b/scripts/package_builder.py index d7beeeea..b8d7dffe 100644 --- a/scripts/package_builder.py +++ b/scripts/package_builder.py @@ -7,14 +7,17 @@ def make(): utils.log_h1("BUILDER") + if not (utils.is_windows() or utils.is_macos() or utils.is_linux()): + utils.log("Unsupported host OS") + return + if common.deploy: + make_archive() if utils.is_windows(): make_windows() elif utils.is_macos(): make_macos() elif utils.is_linux(): make_linux() - else: - utils.log("Unsupported host OS") return def s3_upload(files, dst): @@ -29,6 +32,37 @@ def s3_upload(files, dst): ret &= upload return ret +def make_archive(): + utils.set_cwd(utils.get_path( + "build_tools/out/" + common.prefix + "/" + branding.company_name.lower())) + + utils.log_h2("builder archive build") + utils.delete_file("builder.7z") + args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"] + if utils.is_windows(): + ret = utils.cmd(*args, verbose=True) + else: + ret = utils.sh(" ".join(args), verbose=True) + utils.set_summary("builder archive build", ret) + + utils.log_h2("builder archive deploy") + dest = "builder-" + common.prefix.replace("_","-") + ".7z" + dest_latest = "archive/%s/latest/%s" % (common.branch, dest) + dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest) + ret = utils.s3_upload( + "builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version) + utils.set_summary("builder archive deploy", ret) + if ret: + utils.log("URL: " + branding.s3_base_url + "/" + dest_version) + utils.add_deploy_data(dest_version) + utils.s3_copy( + "s3://" + branding.s3_bucket + "/" + dest_version, + "s3://" + branding.s3_bucket + "/" + dest_latest) + utils.log("URL: " + branding.s3_base_url + "/" + dest_latest) + + utils.set_cwd(common.workspace_dir) + return + def make_windows(): global inno_file, zip_file, suffix, key_prefix utils.set_cwd("document-builder-package") @@ -137,7 +171,7 @@ def make_linux(): utils.set_cwd("document-builder-package") utils.log_h2("builder build") - make_args = branding.builder_make_targets + make_args = [t["make"] for t in branding.builder_make_targets] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -146,32 +180,10 @@ def make_linux(): utils.set_summary("builder build", ret) if common.deploy: - if ret: - if "tar" in branding.builder_make_targets: - utils.log_h2("builder tar deploy") - ret = s3_upload( - utils.glob_path("tar/*.tar.gz"), - "builder/linux/generic/") - utils.set_summary("builder tar deploy", ret) - if "deb" in branding.builder_make_targets: - utils.log_h2("builder deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "builder/linux/debian/") - utils.set_summary("builder deb deploy", ret) - if "rpm" in branding.builder_make_targets: - utils.log_h2("builder rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/*/*.rpm"), - "builder/linux/rhel/") - utils.set_summary("builder rpm deploy", ret) - else: - if "tar" in branding.builder_make_targets: - utils.set_summary("builder tar deploy", False) - if "deb" in branding.builder_make_targets: - utils.set_summary("builder deb deploy", False) - if "rpm" in branding.builder_make_targets: - utils.set_summary("builder rpm deploy", False) + for t in branding.builder_make_targets: + utils.log_h2("builder " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("builder " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_core.py b/scripts/package_core.py index 8bf2e181..1855f159 100644 --- a/scripts/package_core.py +++ b/scripts/package_core.py @@ -10,47 +10,38 @@ def make(): utils.log("Unsupported host OS") return if common.deploy: - make_core() + make_archive() return -def make_core(): - prefix = common.platformPrefixes[common.platform] - company = branding.company_name.lower() - repos = { - "windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build }, - "windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build }, - "darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build }, - "darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build }, - "linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build }, - } - repo = repos[common.platform] - branch = utils.get_env("BRANCH_NAME") - core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company)) - dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"]) - dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"]) +def make_archive(): + utils.set_cwd(utils.get_path( + "build_tools/out/" + common.prefix + "/" + branding.company_name.lower())) - if branch is None: - utils.log_err("BRANCH_NAME variable is undefined") - utils.set_summary("core deploy", False) - return - if not utils.is_file(core_7z): - utils.log_err("file not exist: " + core_7z) - utils.set_summary("core deploy", False) - return + utils.log_h2("core archive build") + utils.delete_file("core.7z") + args = ["7z", "a", "-y", "core.7z", "./core/*"] + if utils.is_windows(): + ret = utils.cmd(*args, verbose=True) + else: + ret = utils.sh(" ".join(args), verbose=True) + utils.set_summary("core archive build", ret) - utils.log_h2("core deploy") + utils.log_h2("core archive deploy") + dest = "core-" + common.prefix.replace("_","-") + ".7z" + dest_latest = "archive/%s/latest/%s" % (common.branch, dest) + dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest) ret = utils.s3_upload( - core_7z, - "s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z") + "core.7z", "s3://" + branding.s3_bucket + "/" + dest_version) + utils.set_summary("core archive deploy", ret) if ret: - utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z") - utils.add_deploy_data(dest_version + "/core.7z") - ret = utils.s3_sync( - "s3://" + branding.s3_bucket + "/" + dest_version + "/", - "s3://" + branding.s3_bucket + "/" + dest_latest + "/", - delete=True) - utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z") - utils.set_summary("core deploy", ret) + utils.log("URL: " + branding.s3_base_url + "/" + dest_version) + utils.add_deploy_data(dest_version) + utils.s3_copy( + "s3://" + branding.s3_bucket + "/" + dest_version, + "s3://" + branding.s3_bucket + "/" + dest_latest) + utils.log("URL: " + branding.s3_base_url + "/" + dest_latest) + + utils.set_cwd(common.workspace_dir) return def deploy_closuremaps_sdkjs(license): diff --git a/scripts/package_desktop.py b/scripts/package_desktop.py index 5fc62370..c16af17d 100644 --- a/scripts/package_desktop.py +++ b/scripts/package_desktop.py @@ -329,7 +329,7 @@ def make_linux(): utils.set_cwd("desktop-apps/win-linux/package/linux") utils.log_h2("desktop build") - make_args = branding.desktop_make_targets + make_args = [t["make"] for t in branding.desktop_make_targets] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -337,68 +337,11 @@ def make_linux(): ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True) utils.set_summary("desktop build", ret) - rpm_arch = "*" - if common.platform == "linux_aarch64": rpm_arch = "aarch64" - if common.deploy: - if ret: - utils.log_h2("desktop tar deploy") - if "tar" in branding.desktop_make_targets: - ret = s3_upload( - utils.glob_path("tar/*.tar*"), - "desktop/linux/generic/") - utils.set_summary("desktop tar deploy", ret) - if "deb" in branding.desktop_make_targets: - utils.log_h2("desktop deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "desktop/linux/debian/") - utils.set_summary("desktop deb deploy", ret) - if "deb-astra" in branding.desktop_make_targets: - utils.log_h2("desktop deb-astra deploy") - ret = s3_upload( - utils.glob_path("deb-astra/*.deb"), - "desktop/linux/astra/") - utils.set_summary("desktop deb-astra deploy", ret) - if "rpm" in branding.desktop_make_targets: - utils.log_h2("desktop rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/rhel/") - utils.set_summary("desktop rpm deploy", ret) - if "suse-rpm" in branding.desktop_make_targets: - utils.log_h2("desktop suse-rpm deploy") - ret = s3_upload( - utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/suse/") - utils.set_summary("desktop suse-rpm deploy", ret) - if "apt-rpm" in branding.desktop_make_targets: - utils.log_h2("desktop apt-rpm deploy") - ret = s3_upload( - utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/altlinux/") - utils.set_summary("desktop apt-rpm deploy", ret) - if "urpmi" in branding.desktop_make_targets: - utils.log_h2("desktop urpmi deploy") - ret = s3_upload( - utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/rosa/") - utils.set_summary("desktop urpmi deploy", ret) - else: - if "tar" in branding.desktop_make_targets: - utils.set_summary("desktop tar deploy", False) - if "deb" in branding.desktop_make_targets: - utils.set_summary("desktop deb deploy", False) - if "deb-astra" in branding.desktop_make_targets: - utils.set_summary("desktop deb-astra deploy", False) - if "rpm" in branding.desktop_make_targets: - utils.set_summary("desktop rpm deploy", False) - if "suse-rpm" in branding.desktop_make_targets: - utils.set_summary("desktop suse-rpm deploy", False) - if "apt-rpm" in branding.desktop_make_targets: - utils.set_summary("desktop apt-rpm deploy", False) - if "urpmi" in branding.desktop_make_targets: - utils.set_summary("desktop urpmi deploy", False) + for t in branding.desktop_make_targets: + utils.log_h2("desktop " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("desktop " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_server.py b/scripts/package_server.py index 4c475fcd..a72874a9 100644 --- a/scripts/package_server.py +++ b/scripts/package_server.py @@ -61,7 +61,8 @@ def make_linux(edition): utils.set_cwd("document-server-package") utils.log_h2("server " + edition + " build") - make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name] + make_args = [t["make"] for t in branding.server_make_targets] + make_args += ["-e", "PRODUCT_NAME=" + product_name] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -70,40 +71,10 @@ def make_linux(edition): utils.set_summary("server " + edition + " build", ret) if common.deploy: - if ret: - if "deb" in branding.server_make_targets: - utils.log_h2("server " + edition + " deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "server/linux/debian/") - utils.set_summary("server " + edition + " deb deploy", ret) - if "rpm" in branding.server_make_targets: - utils.log_h2("server " + edition + " rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/*/*.rpm"), - "server/linux/rhel/") - utils.set_summary("server " + edition + " rpm deploy", ret) - if "apt-rpm" in branding.server_make_targets: - utils.log_h2("server " + edition + " apt-rpm deploy") - ret = s3_upload( - utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"), - "server/linux/altlinux/") - utils.set_summary("server " + edition + " apt-rpm deploy", ret) - if "tar" in branding.server_make_targets: - utils.log_h2("server " + edition + " snap deploy") - ret = s3_upload( - utils.glob_path("*.tar.gz"), - "server/linux/snap/") - utils.set_summary("server " + edition + " snap deploy", ret) - else: - if "deb" in branding.server_make_targets: - utils.set_summary("server " + edition + " deb deploy", False) - if "rpm" in branding.server_make_targets: - utils.set_summary("server " + edition + " rpm deploy", False) - if "apt-rpm" in branding.server_make_targets: - utils.set_summary("server " + edition + " apt-rpm deploy", False) - if "tar" in branding.server_make_targets: - utils.set_summary("server " + edition + " snap deploy", False) + for t in branding.server_make_targets: + utils.log_h2("server " + edition + " " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_utils.py b/scripts/package_utils.py index 75c7b6fc..1d427e54 100644 --- a/scripts/package_utils.py +++ b/scripts/package_utils.py @@ -385,15 +385,13 @@ def s3_upload(src, dst, **kwargs): ret = sh(" ".join(args), verbose=True) return ret -def s3_sync(src, dst, **kwargs): +def s3_copy(src, dst, **kwargs): args = ["aws"] if kwargs.get("endpoint_url"): args += ["--endpoint-url", kwargs["endpoint_url"]] - args += ["s3", "sync", "--no-progress"] + args += ["s3", "cp", "--no-progress"] if kwargs.get("acl"): args += ["--acl", kwargs["acl"]] - if kwargs.get("delete") and kwargs["delete"]: - args += ["--delete"] args += [src, dst] if is_windows(): ret = cmd(*args, verbose=True) diff --git a/scripts/sdkjs_common/generate_builder_intarface.py b/scripts/sdkjs_common/generate_builder_intarface.py index 2fafafd0..3bf1e42a 100644 --- a/scripts/sdkjs_common/generate_builder_intarface.py +++ b/scripts/sdkjs_common/generate_builder_intarface.py @@ -2,6 +2,8 @@ import os import shutil import re +import argparse + def readFile(path): with open(path, "r", errors='replace') as file: filedata = file.read() @@ -179,7 +181,7 @@ def append_record(self, decoration, code, init=False): def generate(self): for file in self.files: - file_content = readFile(file) + file_content = readFile(f'{sdkjs_dir}/{file}') arrRecords = file_content.split("/**") arrRecords = arrRecords[1:-1] for record in arrRecords: @@ -187,8 +189,8 @@ def generate(self): self.numfile += 1 correctContent = ''.join(self.records) correctContent += "\n" - os.mkdir('deploy/api_builder/' + self.folder) - writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent) + os.mkdir(args.destination + self.folder) + writeFile(args.destination + self.folder + "/api.js", correctContent) return def convert_to_interface(arrFiles, sEditorType): @@ -197,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType): editor.generate() return -old_cur = os.getcwd() -os.chdir("../../../sdkjs") -if True == os.path.isdir('deploy/api_builder'): - shutil.rmtree('deploy/api_builder', ignore_errors=True) -os.mkdir('deploy/api_builder') -convert_to_interface(["word/apiBuilder.js"], "word") -convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide") -convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell") -os.chdir(old_cur) +sdkjs_dir = "../../../sdkjs" + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value + ) + args = parser.parse_args() + + old_cur = os.getcwd() + + if True == os.path.isdir(args.destination): + shutil.rmtree(args.destination, ignore_errors=True) + os.mkdir(args.destination) + convert_to_interface(["word/apiBuilder.js"], "word") + convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide") + convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell") + os.chdir(old_cur) + + diff --git a/scripts/sdkjs_common/jsdoc/README.md b/scripts/sdkjs_common/jsdoc/README.md new file mode 100644 index 00000000..01c14063 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/README.md @@ -0,0 +1,74 @@ + +# Documentation Generation Guide + +This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors. + +## Prerequisites + +1. **Node.js and npm**: Ensure you have Node.js and npm installed on your machine. You can download them from [Node.js official website](https://nodejs.org/). + +2. **jsdoc**: The scripts use `jsdoc` to generate documentation. Install it using npm: + ```bash + npm install + ``` + +## Scripts Overview + +### `generate_docs_json.py` + +This script generates JSON documentation based on the `apiBuilder.js` files. + +- **Usage**: + ```bash + python generate_docs_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/document-builder-declarations/document-builder`. + +### `generate_docs_plugins_json.py` + +This script generates JSON documentation based on the `api_plugins.js` files. + +- **Usage**: + ```bash + python generate_docs_plugins_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/document-builder-declarations/document-builder-plugin`. + +### `generate_docs_md.py` + +This script generates Markdown documentation from the `apiBuilder.js` files. + +- **Usage**: + ```bash + python generate_docs_md.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `Onlyoffice/office-js-api`. + +## Example + +To generate JSON documentation with the default output path: +```bash +python generate_docs_json.py /path/to/save/json +``` + +To generate JSON documentation with the default output path: +```bash +python generate_docs_plugins_json.py /path/to/save/json +``` + +To generate Markdown documentation and specify a custom output path: +```bash +python generate_docs_md.py /path/to/save/markdown +``` + +## Notes + +- Make sure to have all necessary permissions to run these scripts and write to the specified directories. +- The output directories will be created if they do not exist. + diff --git a/scripts/sdkjs_common/jsdoc/config/builder/cell.json b/scripts/sdkjs_common/jsdoc/config/builder/cell.json new file mode 100644 index 00000000..2403a820 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/builder/cell.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js new file mode 100644 index 00000000..55b87d27 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js @@ -0,0 +1,216 @@ +exports.handlers = { + processingComplete: function(e) { + // array for filtered doclets + let filteredDoclets = []; + + const cleanName = name => name ? name.replace('~', '').replaceAll('"', '') : name; + + const classesDocletsMap = {}; // doclets for classes write at the end + let passedClasses = []; // passed classes for current editor + + // Remove dublicates doclets + const latestDoclets = {}; + e.doclets.forEach(doclet => { + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAddMethod = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && hasTypeofEditorsTag; + + if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') { + latestDoclets[doclet.longname] = doclet; + } + }); + e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets)); + + // check available classess for current editor + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAdd = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && + (!isMethod || hasTypeofEditorsTag); + + if (shouldAdd) { + if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) { + passedClasses.push(cleanName(doclet.memberof)); + } + } + else if (doclet.kind == 'class') { + classesDocletsMap[cleanName(doclet.name)] = doclet; + } + } + + // remove unavailave classes in current editor + passedClasses = passedClasses.filter(className => { + const doclet = classesDocletsMap[className]; + if (!doclet) { + return true; + } + + const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors')); + + // class is passes if there is no editor tag or the current editor is among the tags + const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR)); + return isPassed; + }); + + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAddMethod = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && hasTypeofEditorsTag; + + if (shouldAddMethod) { + // if the class is not in our map, then we deleted it ourselves -> not available in the editor + if (false == passedClasses.includes(cleanName(doclet.memberof))) { + continue; + } + + // We leave only the necessary fields + doclet.memberof = cleanName(doclet.memberof); + doclet.longname = cleanName(doclet.longname); + doclet.name = cleanName(doclet.name); + + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + memberof: cleanName(doclet.memberof), + + params: doclet.params ? doclet.params.map(param => ({ + type: param.type ? { + names: param.type.names, + parsedType: param.type.parsedType + } : param.type, + + name: param.name, + description: param.description, + optional: param.optional, + defaultvalue: param.defaultvalue + })) : doclet.params, + + returns: doclet.returns ? doclet.returns.map(returnObj => ({ + type: { + names: returnObj.type.names, + parsedType: returnObj.type.parsedType + } + })) : doclet.returns, + + name: doclet.name, + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: doclet.scope, + + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + see: doclet.see + }; + + // Add the filtered doclet to the array + filteredDoclets.push(filteredDoclet); + } + else if (doclet.kind == 'class') { + // if the class is not in our map, then we deleted it ourselves -> not available in the editor + if (false == passedClasses.includes(cleanName(doclet.name))) { + continue; + } + + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + name: cleanName(doclet.name), + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: "global", + augments: doclet.augments || undefined, + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + see: doclet.see || undefined + }; + + filteredDoclets.push(filteredDoclet); + } + else if (doclet.kind == 'typedef') { + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + name: cleanName(doclet.name), + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: "global", + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + see: doclet.see, + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type + }; + + filteredDoclets.push(filteredDoclet); + } + } + + // Replace doclets with a filtered array + e.doclets.splice(0, e.doclets.length, ...filteredDoclets); + } +}; diff --git a/scripts/sdkjs_common/jsdoc/config/builder/forms.json b/scripts/sdkjs_common/jsdoc/config/builder/forms.json new file mode 100644 index 00000000..d39d5319 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/builder/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/builder/slide.json b/scripts/sdkjs_common/jsdoc/config/builder/slide.json new file mode 100644 index 00000000..96b5dbf7 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/builder/slide.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/builder/word.json b/scripts/sdkjs_common/jsdoc/config/builder/word.json new file mode 100644 index 00000000..3b90c0ad --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/builder/word.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/cell.json b/scripts/sdkjs_common/jsdoc/config/plugins/cell.json new file mode 100644 index 00000000..b49b71ea --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/cell.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/cell/api_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/common.json b/scripts/sdkjs_common/jsdoc/config/plugins/common.json new file mode 100644 index 00000000..4bf510c2 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/common.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js new file mode 100644 index 00000000..bea3873c --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js @@ -0,0 +1,85 @@ +exports.handlers = { + processingComplete: function(e) { + const filteredDoclets = []; + + function checkNullProps(oDoclet) { + for (let key of Object.keys(oDoclet)) { + if (oDoclet[key] == null) { + delete oDoclet[key]; + } + if (typeof(oDoclet[key]) == "object") { + checkNullProps(oDoclet[key]); + } + } + } + + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + if (true == doclet.undocumented || doclet.kind == 'package') { + continue; + } + + const filteredDoclet = { + comment: doclet.comment, + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + kind: doclet.kind, + since: doclet.since, + name: doclet.name, + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type, + + description: doclet.description, + memberof: doclet.memberof, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + longname: doclet.longname, + scope: doclet.scope, + alias: doclet.alias, + + params: doclet.params ? doclet.params.map(param => ({ + type: param.type ? { + names: param.type.names, + parsedType: param.type.parsedType + } : param.type, + + name: param.name, + description: param.description, + optional: param.optional, + defaultvalue: param.defaultvalue + })) : doclet.params, + + returns: doclet.returns ? doclet.returns.map(returnObj => ({ + type: { + names: returnObj.type.names, + parsedType: returnObj.type.parsedType + } + })) : doclet.returns, + see: doclet.see + }; + + checkNullProps(filteredDoclet) + + filteredDoclets.push(filteredDoclet); + } + + e.doclets.splice(0, e.doclets.length, ...filteredDoclets); + } +}; \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/forms.json b/scripts/sdkjs_common/jsdoc/config/plugins/forms.json new file mode 100644 index 00000000..65cb6a2e --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs-forms/apiPlugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/slide.json b/scripts/sdkjs_common/jsdoc/config/plugins/slide.json new file mode 100644 index 00000000..d0151716 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/slide.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/slide/api_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/word.json b/scripts/sdkjs_common/jsdoc/config/plugins/word.json new file mode 100644 index 00000000..b06743ea --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/word.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py new file mode 100644 index 00000000..b920dba4 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -0,0 +1,144 @@ +import os +import subprocess +import json +import argparse +import re + +root = '../../../..' + +# Configuration files +configs = [ + "./config/builder/word.json", + "./config/builder/cell.json", + "./config/builder/slide.json", + "./config/builder/forms.json" +] + +editors_maps = { + "word": "CDE", + "cell": "CSE", + "slide": "CPE", + "forms": "CFE" +} + +def generate(output_dir, md=False): + missing_examples_file = f'{output_dir}/missing_examples.txt' + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + # Recreate missing_examples.txt file + with open(missing_examples_file, 'w', encoding='utf-8') as f: + f.write('') + + # Generate JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + command = f"set EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}" + print(f"Generating {editor_name}.json: {command}") + subprocess.run(command, shell=True) + + # Append examples to JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + + # Read the JSON file + with open(output_file, 'r', encoding='utf-8') as f: + data = json.load(f) + + # Modify JSON data + for doclet in data: + if 'see' in doclet: + if doclet['see'] is not None: + if editor_name == 'forms': + doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word') + else: + doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title()) + + file_path = f'{root}/' + doclet['see'][0] + + if os.path.exists(file_path): + with open(file_path, 'r', encoding='utf-8') as see_file: + example_content = see_file.read() + + # Extract the first line as a comment if it exists + lines = example_content.split('\n') + if lines[0].startswith('//'): + comment = lines[0] + '\n' + code_content = '\n'.join(lines[1:]) + else: + comment = '' + code_content = example_content + + if md == True: + doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" + + if md == False: + document_type = editor_name + if "forms" == document_type: + document_type = "pdf" + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```' + + else: + # Record missing examples in missing_examples.txt + with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: + missing_file.write(f"{file_path}\n") + + # Write the modified JSON file back + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(data, f, ensure_ascii=False, indent=4) + + print("Documentation generation for builder completed.") + +def remove_builder_lines(text): + lines = text.splitlines() # Split text into lines + filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] + return "\n".join(filtered_lines) + +def remove_js_comments(text): + # Remove single-line comments, leaving text after // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Remove multi-line comments, leaving text after /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +def get_current_branch(path): + try: + # Navigate to the specified directory and get the current branch name + result = subprocess.run( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], + cwd=path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True + ) + if result.returncode == 0: + return result.stdout.strip() + else: + print(f"Error: {result.stderr}") + return None + except Exception as e: + print(f"Exception: {e}") + return None + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default=f"{root}/document-builder-declarations/document-builder" # Default value + ) + args = parser.parse_args() + + branch_name = get_current_branch(f"{root}/sdkjs") + if branch_name: + index_last_name = branch_name.rfind("/") + if -1 != index_last_name: + branch_name = branch_name[index_last_name + 1:] + args.destination = f"{args.destination}/{branch_name}" + + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/generate_docs_md.py new file mode 100644 index 00000000..ea4dc39d --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_md.py @@ -0,0 +1,266 @@ +import os +import json +import re +import shutil +import argparse +import generate_docs_json + +# Configuration files +editors = [ + "word", + "cell", + "slide", + "forms" +] + +def load_json(file_path): + with open(file_path, 'r', encoding='utf-8') as f: + return json.load(f) + +def write_markdown_file(file_path, content): + with open(file_path, 'w', encoding='utf-8') as md_file: + md_file.write(content) + +def remove_js_comments(text): + # Remove single-line comments, leaving text after // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Remove multi-line comments, leaving text after /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +def correct_description(string): + if string is None: + return 'No description provided.' + + # Replace opening tag with ** + string = re.sub(r'', '**', string) + # Replace closing tag with ** + string = re.sub(r'', '**', string) + # Note + return re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) + +def correct_default_value(value, enumerations, classes): + if value is None: + return '' + + if value == True: + value = "true" + elif value == False: + value = "false" + else: + value = str(value) + + return generate_data_types_markdown([value], enumerations, classes) + +def remove_line_breaks(string): + return re.sub(r'[\r\n]', '', string) + +def generate_data_types_markdown(types, enumerations, classes, root='../../'): + param_types_md = ' |'.join(types) + + for enum in enumerations: + if enum['name'] in types: + param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)") + for cls in classes: + if cls in types: + param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)") + + def replace_with_links(match): + element = match.group(1).strip() + base_type = element.split('.')[0] # Take only the first part before the dot, if any + if any(enum['name'] == base_type for enum in enumerations): + return f"<[{element}](../../Enumeration/{base_type}.md)>" + elif base_type in classes: + return f"<[{element}](../../{base_type}/{base_type}.md)>" + return f"<{element}>" + + return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md) + +def generate_class_markdown(class_name, methods, properties, enumerations, classes): + content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" + + content += generate_properties_markdown(properties, enumerations, classes, '../') + + content += "## Methods\n\n" + for method in methods: + method_name = method['name'] + content += f"- [{method_name}](./Methods/{method_name}.md)\n" + return content + +def generate_method_markdown(method, enumerations, classes): + method_name = method['name'] + description = method.get('description', 'No description provided.') + description = correct_description(description) + params = method.get('params', []) + returns = method.get('returns', []) + example = method.get('example', '') + memberof = method.get('memberof', '') + + content = f"# {method_name}\n\n{description}\n\n" + + # Syntax section + param_list = ', '.join([param['name'] for param in params]) if params else '' + content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n" + if memberof: + content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n" + + content += "## Parameters\n\n" + + if params: + content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n" + content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n" + for param in params: + param_name = param.get('name', 'Unnamed') + param_types = param.get('type', {}).get('names', []) if param.get('type') else [] + param_types_md = generate_data_types_markdown(param_types, enumerations, classes) + param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'))) + param_required = "Required" if not param.get('optional') else "Optional" + param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) + + content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n" + else: + content += "This method doesn't have any parameters.\n" + + content += "\n## Returns\n\n" + if returns: + return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown' + + # Check for enumerations and classes in return type and add links if they exist + return_type_md = generate_data_types_markdown([return_type], enumerations, classes) + content += return_type_md + else: + content += "This method doesn't return any data." + + if example: + # Separate comment and code, and remove comment symbols + comment, code = example.split('```js', 1) + comment = remove_js_comments(comment) + content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n" + + return content + +def generate_properties_markdown(properties, enumerations, classes, root='../../'): + if (properties is None): + return '' + + content = "## Properties\n\n" + content += "| Name | Type | Description |\n" + content += "| ---- | ---- | ----------- |\n" + for prop in properties: + prop_name = prop['name'] + prop_description = prop.get('description', 'No description provided.') + prop_description = remove_line_breaks(correct_description(prop_description)) + param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root) + content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" + content += "\n" + + return content + + +def generate_enumeration_markdown(enumeration, enumerations, classes): + enum_name = enumeration['name'] + description = enumeration.get('description', 'No description provided.') + description = correct_description(description) + example = enumeration.get('example', '') + + content = f"# {enum_name}\n\n{description}\n\n" + + if 'TypeUnion' == enumeration['type']['parsedType']['type']: + content += "## Type\n\nEnumeration\n\n" + content += "## Values\n\n" + elements = enumeration['type']['parsedType']['elements'] + for element in elements: + element_name = element['name'] if element['type'] != 'NullLiteral' else 'null' + # Check if element is in enumerations or classes before adding link + if any(enum['name'] == element_name for enum in enumerations): + content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n" + elif element_name in classes: + content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n" + else: + content += f"- {element_name}\n" + elif enumeration['properties'] is not None: + content += "## Type\n\nObject\n\n" + content += generate_properties_markdown(enumeration['properties'], enumerations, classes) + else: + content += "## Type\n\n" + types = enumeration['type']['names'] + for t in types: + t = generate_data_types_markdown([t], enumerations, classes) + content += t + "\n\n" + + if example: + # Separate comment and code, and remove comment symbols + comment, code = example.split('```js', 1) + comment = remove_js_comments(comment) + content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n" + + return content + +def process_doclets(data, output_dir): + classes = {} + classes_props = {} + enumerations = [] + + for doclet in data: + if doclet['kind'] == 'class': + class_name = doclet['name'] + classes[class_name] = [] + classes_props[class_name] = doclet.get('properties', None) + elif doclet['kind'] == 'function': + class_name = doclet.get('memberof') + if class_name: + if class_name not in classes: + classes[class_name] = [] + classes[class_name].append(doclet) + elif doclet['kind'] == 'typedef': + enumerations.append(doclet) + + # Process classes + for class_name, methods in classes.items(): + class_dir = os.path.join(output_dir, class_name) + methods_dir = os.path.join(class_dir, 'Methods') + os.makedirs(methods_dir, exist_ok=True) + + # Write class file + class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes) + write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) + + # Write method files + for method in methods: + method_content = generate_method_markdown(method, enumerations, classes) + write_markdown_file(os.path.join(methods_dir, f"{method['name']}.md"), method_content) + + # Process enumerations + enum_dir = os.path.join(output_dir, 'Enumeration') + os.makedirs(enum_dir, exist_ok=True) + + for enum in enumerations: + enum_content = generate_enumeration_markdown(enum, enumerations, classes) + write_markdown_file(os.path.join(enum_dir, f"{enum['name']}.md"), enum_content) + +def generate(output_dir): + print('Generating Markdown documentation...') + + generate_docs_json.generate(output_dir + 'tmp_json', md=True) + for editor_name in editors: + input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json") + os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True) + + data = load_json(input_file) + process_doclets(data, output_dir + f'/{editor_name}') + + shutil.rmtree(output_dir + 'tmp_json') + print('Done') + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default="../../../../office-js-api/" # Default value + ) + args = parser.parse_args() + + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py new file mode 100644 index 00000000..bf1cff46 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py @@ -0,0 +1,137 @@ +import os +import subprocess +import json +import argparse +import re + +# Configuration files +configs = [ + "./config/plugins/common.json", + "./config/plugins/word.json", + "./config/plugins/cell.json", + "./config/plugins/slide.json", + "./config/plugins/forms.json" +] + +root = '../../../..' + +def generate(output_dir, md=False): + missing_examples_file = f'{output_dir}/missing_examples.txt' + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + # Recreate missing_examples.txt file + with open(missing_examples_file, 'w', encoding='utf-8') as f: + f.write('') + + # Generate JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + command = f"npx jsdoc -c {config} -X > {output_file}" + print(f"Generating {editor_name}.json: {command}") + subprocess.run(command, shell=True) + + # Append examples to JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + + # Read the JSON file + with open(output_file, 'r', encoding='utf-8') as f: + data = json.load(f) + + # Modify JSON data + for doclet in data: + if 'see' in doclet: + if doclet['see'] is not None: + if editor_name == 'forms': + doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word') + else: + doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title()) + + file_path = f'{root}/' + doclet['see'][0] + + if os.path.exists(file_path): + with open(file_path, 'r', encoding='utf-8') as see_file: + example_content = see_file.read() + + # Extract the first line as a comment if it exists + lines = example_content.split('\n') + if lines[0].startswith('//'): + comment = lines[0] + '\n' + code_content = '\n'.join(lines[1:]) + else: + comment = '' + code_content = example_content + + doclet['examples'] = [remove_js_comments(comment) + code_content] + + if md == False: + document_type = editor_name + if "forms" == document_type: + document_type = "pdf" + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```' + + else: + # Record missing examples in missing_examples.txt + with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: + missing_file.write(f"{file_path}\n") + + # Write the modified JSON file back + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(data, f, ensure_ascii=False, indent=4) + + print("Documentation generation for builder completed.") + +def remove_builder_lines(text): + lines = text.splitlines() # Split text into lines + filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] + return "\n".join(filtered_lines) + +def remove_js_comments(text): + # Remove single-line comments, leaving text after // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Remove multi-line comments, leaving text after /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +def get_current_branch(path): + try: + # Navigate to the specified directory and get the current branch name + result = subprocess.run( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], + cwd=path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True + ) + if result.returncode == 0: + return result.stdout.strip() + else: + print(f"Error: {result.stderr}") + return None + except Exception as e: + print(f"Exception: {e}") + return None + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default=f"{root}/document-builder-declarations/document-builder-plugin" # Default value + ) + args = parser.parse_args() + + branch_name = get_current_branch(f"{root}/sdkjs") + if branch_name: + index_last_name = branch_name.rfind("/") + if -1 != index_last_name: + branch_name = branch_name[index_last_name + 1:] + args.destination = f"{args.destination}/{branch_name}" + + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/package.json b/scripts/sdkjs_common/jsdoc/package.json new file mode 100644 index 00000000..3bda5f24 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/package.json @@ -0,0 +1,7 @@ +{ + "dependencies": { + "jsdoc-to-markdown": "7.1.1", + "dmd": "6.1.0", + "handlebars": "4.7.7" + } +} diff --git a/scripts/sln.py b/scripts/sln.py index fc7e10a9..5b578d47 100644 --- a/scripts/sln.py +++ b/scripts/sln.py @@ -14,6 +14,15 @@ def is_exist_in_array(projects, proj): return True return False +def get_full_projects_list(json_data, list): + result = [] + for rec in list: + if rec in json_data: + result += get_full_projects_list(json_data, json_data[rec]) + else: + result.append(rec) + return result + def adjust_project_params(params): ret_params = params @@ -86,13 +95,9 @@ def get_projects(pro_json_path, platform): # check aliases to modules records_src = data[module] - records = [] + records = get_full_projects_list(data, records_src) - for rec in records_src: - if rec in data: - records += data[rec] - else: - records.append(rec) + print(records) for rec in records: params = [] diff --git a/sln.json b/sln.json index ecd3f438..6a596c5f 100644 --- a/sln.json +++ b/sln.json @@ -1,6 +1,10 @@ { "root" : "../", + "spell" : [ + "[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro" + ], + "core" : [ "core/Common/3dParty/cryptopp/project/cryptopp.pro", @@ -51,10 +55,13 @@ "[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro", + "spell", + "[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro", - "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro" + "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro", + "[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro" ], @@ -70,13 +77,8 @@ "[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro" ], - "spell" : [ - "[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro" - ], - "desktop" : [ "core", - "spell", "multimedia", "core/DesktopEditor/xmlsec/src/ooxmlsignature.pro",