From 6338fd58c378928efb392e6a650f0bf62c363d68 Mon Sep 17 00:00:00 2001 From: Vladimir Privezenov Date: Tue, 19 Mar 2024 17:39:28 +0300 Subject: [PATCH 01/26] Split functions --- scripts/build_js.py | 16 +++++++++++++--- scripts/develop/run_server.py | 27 ++++++++++++++++++--------- 2 files changed, 31 insertions(+), 12 deletions(-) diff --git a/scripts/build_js.py b/scripts/build_js.py index 12c8bed6..ee5e6d3f 100644 --- a/scripts/build_js.py +++ b/scripts/build_js.py @@ -135,15 +135,25 @@ def build_sdk_native(directory, minimize=True): _run_grunt(directory, get_build_param(minimize) + ["--mobile=true"] + base.sdkjs_addons_param()) return -def build_js_develop(root_dir): - #_run_npm_cli(root_dir + "/sdkjs/build") + +def build_sdkjs_develop(root_dir): external_folder = config.option("--external-folder") if (external_folder != ""): external_folder = "/" + external_folder - + _run_npm_ci(root_dir + external_folder + "/sdkjs/build") _run_grunt(root_dir + external_folder + "/sdkjs/build", get_build_param(False) + base.sdkjs_addons_param()) _run_grunt(root_dir + external_folder + "/sdkjs/build", ["develop"] + base.sdkjs_addons_param()) + + +def build_js_develop(root_dir): + #_run_npm_cli(root_dir + "/sdkjs/build") + external_folder = config.option("--external-folder") + if (external_folder != ""): + external_folder = "/" + external_folder + + build_sdkjs_develop(root_dir) + _run_npm(root_dir + external_folder + "/web-apps/build") _run_npm_ci(root_dir + external_folder + "/web-apps/build/sprites") _run_grunt(root_dir + external_folder + "/web-apps/build/sprites", []) diff --git a/scripts/develop/run_server.py b/scripts/develop/run_server.py index 2e966135..b0caa4c1 100644 --- a/scripts/develop/run_server.py +++ b/scripts/develop/run_server.py @@ -49,7 +49,23 @@ def run_integration_example(): def start_linux_services(): base.print_info('Restart MySQL Server') - + + +def update_config(args): + platform = base.host_platform() + branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout'] + + if ("linux" == platform): + base.cmd_in_dir(base_dir + '/../../', 'python', + ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', + '--update', '1', '--update-light', '1', '--clean', '0'] + args) + else: + base.cmd_in_dir(base_dir + '/../../', 'python', + ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', + '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', + '--db-user', 'root', '--db-pass', 'onlyoffice'] + args) + + def make_start(): base.configure_common_apps() @@ -64,15 +80,8 @@ def make_start(): start_linux_services() def make_configure(args): - platform = base.host_platform() - branch = base.run_command('git rev-parse --abbrev-ref HEAD')['stdout'] - base.print_info('Build modules') - if ("linux" == platform): - base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0'] + args) - else: - base.cmd_in_dir(base_dir + '/../../', 'python', ['configure.py', '--branch', branch or 'develop', '--develop', '1', '--module', 'server', '--update', '1', '--update-light', '1', '--clean', '0', '--sql-type', 'mysql', '--db-port', '3306', '--db-user', 'root', '--db-pass', 'onlyoffice'] + args) - + update_config(args) base.cmd_in_dir(base_dir + '/../../', 'python', ['make.py']) def make_install(): platform = base.host_platform() From e46d73869cd9e42ee4772241ee365e794d8cdd9b Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Fri, 26 Jul 2024 18:32:49 +0700 Subject: [PATCH 02/26] Added documentation generation scripts --- .../generate_builder_intarface.py | 41 ++- scripts/sdkjs_common/jsdoc/README.md | 57 ++++ scripts/sdkjs_common/jsdoc/config/cell.json | 15 ++ .../jsdoc/config/correct_doclets.js | 110 ++++++++ scripts/sdkjs_common/jsdoc/config/forms.json | 16 ++ scripts/sdkjs_common/jsdoc/config/slide.json | 15 ++ scripts/sdkjs_common/jsdoc/config/word.json | 16 ++ .../sdkjs_common/jsdoc/generate_docs_json.py | 91 +++++++ .../sdkjs_common/jsdoc/generate_docs_md.py | 254 ++++++++++++++++++ scripts/sdkjs_common/jsdoc/package.json | 7 + 10 files changed, 610 insertions(+), 12 deletions(-) create mode 100644 scripts/sdkjs_common/jsdoc/README.md create mode 100644 scripts/sdkjs_common/jsdoc/config/cell.json create mode 100644 scripts/sdkjs_common/jsdoc/config/correct_doclets.js create mode 100644 scripts/sdkjs_common/jsdoc/config/forms.json create mode 100644 scripts/sdkjs_common/jsdoc/config/slide.json create mode 100644 scripts/sdkjs_common/jsdoc/config/word.json create mode 100644 scripts/sdkjs_common/jsdoc/generate_docs_json.py create mode 100644 scripts/sdkjs_common/jsdoc/generate_docs_md.py create mode 100644 scripts/sdkjs_common/jsdoc/package.json diff --git a/scripts/sdkjs_common/generate_builder_intarface.py b/scripts/sdkjs_common/generate_builder_intarface.py index 2fafafd0..3bf1e42a 100644 --- a/scripts/sdkjs_common/generate_builder_intarface.py +++ b/scripts/sdkjs_common/generate_builder_intarface.py @@ -2,6 +2,8 @@ import os import shutil import re +import argparse + def readFile(path): with open(path, "r", errors='replace') as file: filedata = file.read() @@ -179,7 +181,7 @@ def append_record(self, decoration, code, init=False): def generate(self): for file in self.files: - file_content = readFile(file) + file_content = readFile(f'{sdkjs_dir}/{file}') arrRecords = file_content.split("/**") arrRecords = arrRecords[1:-1] for record in arrRecords: @@ -187,8 +189,8 @@ def generate(self): self.numfile += 1 correctContent = ''.join(self.records) correctContent += "\n" - os.mkdir('deploy/api_builder/' + self.folder) - writeFile("deploy/api_builder/" + self.folder + "/api.js", correctContent) + os.mkdir(args.destination + self.folder) + writeFile(args.destination + self.folder + "/api.js", correctContent) return def convert_to_interface(arrFiles, sEditorType): @@ -197,12 +199,27 @@ def convert_to_interface(arrFiles, sEditorType): editor.generate() return -old_cur = os.getcwd() -os.chdir("../../../sdkjs") -if True == os.path.isdir('deploy/api_builder'): - shutil.rmtree('deploy/api_builder', ignore_errors=True) -os.mkdir('deploy/api_builder') -convert_to_interface(["word/apiBuilder.js"], "word") -convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide") -convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell") -os.chdir(old_cur) +sdkjs_dir = "../../../sdkjs" + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default="../../../onlyoffice.github.io\sdkjs-plugins\content\macros\libs/" # Default value + ) + args = parser.parse_args() + + old_cur = os.getcwd() + + if True == os.path.isdir(args.destination): + shutil.rmtree(args.destination, ignore_errors=True) + os.mkdir(args.destination) + convert_to_interface(["word/apiBuilder.js"], "word") + convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js"], "slide") + convert_to_interface(["word/apiBuilder.js", "slide/apiBuilder.js", "cell/apiBuilder.js"], "cell") + os.chdir(old_cur) + + diff --git a/scripts/sdkjs_common/jsdoc/README.md b/scripts/sdkjs_common/jsdoc/README.md new file mode 100644 index 00000000..61ad80af --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/README.md @@ -0,0 +1,57 @@ + +# Documentation Generation Guide + +This guide explains how to generate documentation for Onlyoffice API using the provided Python scripts, `generate_docs_json.py` and `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors. + +## Prerequisites + +1. **Node.js and npm**: Ensure you have Node.js and npm installed on your machine. You can download them from [Node.js official website](https://nodejs.org/). + +2. **jsdoc**: The scripts use `jsdoc` to generate documentation. Install it using npm: + ```bash + npm install + ``` + +## Scripts Overview + +### `generate_docs_json.py` + +This script generates JSON documentation based on the `apiBuilder.js` files. + +- **Usage**: + ```bash + python generate_docs_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/sdkjs/deploy/api_builder/json`. + +### `generate_docs_md.py` + +This script generates Markdown documentation from the `apiBuilder.js` files. + +- **Usage**: + ```bash + python generate_docs_md.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the Markdown documentation will be saved. If not specified, the default path is `Onlyoffice/office-js-api`. + +## Example + +To generate JSON documentation with the default output path: +```bash +python generate_docs_json.py /path/to/save/json +``` + +To generate Markdown documentation and specify a custom output path: +```bash +python generate_docs_md.py /path/to/save/markdown +``` + +## Notes + +- Make sure to have all necessary permissions to run these scripts and write to the specified directories. +- The output directories will be created if they do not exist. + diff --git a/scripts/sdkjs_common/jsdoc/config/cell.json b/scripts/sdkjs_common/jsdoc/config/cell.json new file mode 100644 index 00000000..f30d4b66 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/cell.json @@ -0,0 +1,15 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js", "../../../../sdkjs/cell/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js new file mode 100644 index 00000000..28919711 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js @@ -0,0 +1,110 @@ +exports.handlers = { + processingComplete: function(e) { + // Инициализация массива для сохранения отфильтрованных doclets + const filteredDoclets = []; + + const cleanName = name => name ? name.replace('~', '').replaceAll('"', '') : name; + + // Итерация по doclets и фильтрация + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAddMethod = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && + (!isMethod || hasTypeofEditorsTag); + + if (shouldAddMethod) { + // Оставляем только нужные поля + doclet.memberof = cleanName(doclet.memberof); + doclet.longname = cleanName(doclet.longname); + doclet.name = cleanName(doclet.name); + + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + memberof: cleanName(doclet.memberof), + + params: doclet.params ? doclet.params.map(param => ({ + type: param.type ? { + names: param.type.names, + parsedType: param.type.parsedType + } : param.type, + + name: param.name, + description: param.description, + optional: param.optional, + defaultvalue: param.defaultvalue + })) : doclet.params, + + returns: doclet.returns ? doclet.returns.map(returnObj => ({ + type: { + names: returnObj.type.names, + parsedType: returnObj.type.parsedType + } + })) : doclet.returns, + + name: doclet.name, + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: doclet.scope, + + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + see: doclet.see + }; + + if (!doclet.see) { + delete doclet.see; + } + + // Добавляем отфильтрованный doclet в массив + filteredDoclets.push(filteredDoclet); + } + else if (doclet.kind == 'class') { + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + name: cleanName(doclet.name), + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: "global", + augments: doclet.augments || undefined, + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + see: doclet.see || undefined + }; + + filteredDoclets.push(filteredDoclet); + } + } + + // Заменяем doclets на отфильтрованный массив + e.doclets.splice(0, e.doclets.length, ...filteredDoclets); + } +}; diff --git a/scripts/sdkjs_common/jsdoc/config/forms.json b/scripts/sdkjs_common/jsdoc/config/forms.json new file mode 100644 index 00000000..c4e83886 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs-forms/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/slide.json b/scripts/sdkjs_common/jsdoc/config/slide.json new file mode 100644 index 00000000..2c8532b3 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/slide.json @@ -0,0 +1,15 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs/slide/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/word.json b/scripts/sdkjs_common/jsdoc/config/word.json new file mode 100644 index 00000000..3b90c0ad --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/word.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/apiBuilder.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py new file mode 100644 index 00000000..5c678e5a --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -0,0 +1,91 @@ +import os +import subprocess +import json +import argparse + +# Конфигурационные файлы +configs = [ + "./config/word.json", + "./config/cell.json", + "./config/slide.json", + "./config/forms.json" +] + +editors_maps = { + "word": "CDE", + "cell": "CSE", + "slide": "CPE", + "forms": "CFE" +} + +def generate(output_dir): + missing_examples_file = f'{output_dir}/missing_examples.txt' + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + # Пересоздание файла missing_examples.txt + with open(missing_examples_file, 'w', encoding='utf-8') as f: + f.write('') + + # Генерация json документации + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + command = f"set EDITOR={editors_maps[editor_name]} && npx jsdoc -c {config} -X > {output_file}" + print(f"Generating {editor_name}.json: {command}") + subprocess.run(command, shell=True) + + # дозапись примеров в json документацию + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + + # Чтение JSON файла + with open(output_file, 'r', encoding='utf-8') as f: + data = json.load(f) + + # Модификация JSON данных + for doclet in data: + if 'see' in doclet: + if doclet['see'] is not None: + file_path = 'C:\\Users\\khrom\\Desktop\\Onlyoffice\\' + doclet['see'][0] + if os.path.exists(file_path): + with open(file_path, 'r', encoding='utf-8') as see_file: + example_content = see_file.read() + + # Извлечение первой строки как комментария, если она существует + lines = example_content.split('\n') + if lines[0].startswith('//'): + comment = lines[0] + '\n' + code_content = '\n'.join(lines[1:]) + else: + comment = '' + code_content = example_content + + # Форматирование содержимого для doclet['example'] + doclet['example'] = comment + "```js\n" + code_content + "\n```" + del doclet['see'] + else: + # Запись пропущенного примера в файл missing_examples.txt + with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: + missing_file.write(f"{file_path}\n") + + # Запись измененного JSON файла обратно + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(data, f, ensure_ascii=False, indent=4) + + print("Documentation generation completed.") + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default="../../../../document-builder-declarations/document-builder" # Default value + ) + args = parser.parse_args() + + generate(args.destination) \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/generate_docs_md.py new file mode 100644 index 00000000..a8d9dd99 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_md.py @@ -0,0 +1,254 @@ +import os +import json +import re +import shutil +import argparse +import generate_docs_json + +# Конфигурационные файлы +editors = [ + "word", + "cell", + "slide", + "forms" +] + +def load_json(file_path): + with open(file_path, 'r', encoding='utf-8') as f: + return json.load(f) + +def write_markdown_file(file_path, content): + with open(file_path, 'w', encoding='utf-8') as md_file: + md_file.write(content) + +def remove_js_comments(text): + # Удаляем однострочные комментарии, оставляя текст после // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Удаляем многострочные комментарии, оставляя текст после /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +def correct_description(string): + if string is None: + return 'No description provided.' + + # Заменяем открывающий тег на ** + string = re.sub(r'', '**', string) + # Заменяем закрывающий тег на ** + string = re.sub(r'', '**', string) + # Заметка + return re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) + + +def correct_default_value(value, enumerations, classes): + if value is None: + return '' + + if value == True: + value = "true" + elif value == False: + value = "false" + else: + value = str(value) + + return generate_data_types_markdown([value], enumerations, classes) + +def remove_line_breaks(string): + return re.sub(r'[\r\n]', '', string) + +def generate_data_types_markdown(types, enumerations, classes): + param_types_md = ' |'.join(types) + + for enum in enumerations: + if enum['name'] in types: + param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}](../../Enumeration/{enum['name']}.md)") + for cls in classes: + if cls in types: + param_types_md = param_types_md.replace(cls, f"[{cls}](../../{cls}/{cls}.md)") + + def replace_with_links(match): + element = match.group(1).strip() + base_type = element.split('.')[0] # Берем только первую часть до точки, если она есть + if any(enum['name'] == base_type for enum in enumerations): + return f"<[{element}](../../Enumeration/{base_type}.md)>" + elif base_type in classes: + return f"<[{element}](../../{base_type}/{base_type}.md)>" + return f"<{element}>" + + return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md) + +def generate_class_markdown(class_name, methods): + content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" + content += "## Methods\n\n" + for method in methods: + method_name = method['name'] + content += f"- [{method_name}](./Methods/{method_name}.md)\n" + return content + +def generate_method_markdown(method, enumerations, classes): + method_name = method['name'] + description = method.get('description', 'No description provided.') + description = correct_description(description) + params = method.get('params', []) + returns = method.get('returns', []) + example = method.get('example', '') + memberof = method.get('memberof', '') + + content = f"# {method_name}\n\n{description}\n\n" + + # Syntax section + param_list = ', '.join([param['name'] for param in params]) if params else '' + content += f"## Syntax\n\nexpression.{method_name}({param_list});\n\n" + if memberof: + content += f"`expression` - A variable that represents a [{memberof}](../{memberof}.md) class.\n\n" + + content += "## Parameters\n\n" + + if params: + content += "| **Name** | **Required/Optional** | **Data type** | **Default** | **Description** |\n" + content += "| ------------- | ------------- | ------------- | ------------- | ------------- |\n" + for param in params: + param_name = param.get('name', 'Unnamed') + param_types = param.get('type', {}).get('names', []) if param.get('type') else [] + param_types_md = generate_data_types_markdown(param_types, enumerations, classes) + param_desc = remove_line_breaks(correct_description(param.get('description', 'No description provided.'))) + param_required = "Required" if not param.get('optional') else "Optional" + param_default = correct_default_value(param.get('defaultvalue', ''), enumerations, classes) + + content += f"| {param_name} | {param_required} | {param_types_md} | {param_default} | {param_desc} |\n" + else: + content += "This method doesn't have any parameters.\n" + + content += "\n## Returns\n\n" + if returns: + return_type = ', '.join(returns[0].get('type', {}).get('names', [])) if returns[0].get('type') else 'Unknown' + + # Check for enumerations and classes in return type and add links if they exist + return_type_md = generate_data_types_markdown([return_type], enumerations, classes) + content += return_type_md + else: + content += "This method doesn't return any data." + + if example: + # Separate comment and code, and remove comment symbols + comment, code = example.split('```js', 1) + comment = remove_js_comments(comment) + content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n" + + return content + +def generate_enumeration_markdown(enumeration, enumerations, classes): + enum_name = enumeration['name'] + description = enumeration.get('description', 'No description provided.') + description = correct_description(description) + example = enumeration.get('example', '') + + content = f"# {enum_name}\n\n{description}\n\n" + + if 'TypeUnion' == enumeration['type']['parsedType']['type']: + content += "## Type\n\nEnumeration\n\n" + content += "## Values\n\n" + elements = enumeration['type']['parsedType']['elements'] + for element in elements: + element_name = element['name'] if element['type'] != 'NullLiteral' else 'null' + # Check if element is in enumerations or classes before adding link + if any(enum['name'] == element_name for enum in enumerations): + content += f"- [{element_name}](../../Enumeration/{element_name}.md)\n" + elif element_name in classes: + content += f"- [{element_name}](../../{element_name}/{element_name}.md)\n" + else: + content += f"- {element_name}\n" + elif enumeration['properties'] is not None: + content += "## Type\n\nObject\n\n" + content += "## Properties\n\n" + content += "| Name | Type | Description |\n" + content += "| ---- | ---- | ----------- |\n" + properties = enumeration['properties'] + for prop in properties: + prop_name = prop['name'] + prop_description = prop.get('description', 'No description provided.') + prop_description = remove_line_breaks(correct_description(prop_description)) + param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes) + content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" + else: + content += "## Type\n\n" + types = enumeration['type']['names'] + for t in types: + t = generate_data_types_markdown([t], enumerations, classes) + content += t + "\n\n" + + if example: + # Separate comment and code, and remove comment symbols + comment, code = example.split('```js', 1) + comment = remove_js_comments(comment) + content += f"\n\n## Example\n\n{comment}\n\n```javascript\n{code.strip()}\n" + + return content + +def process_doclets(data, output_dir): + classes = {} + enumerations = [] + + for doclet in data: + if doclet['kind'] == 'class': + class_name = doclet['name'] + classes[class_name] = [] + elif doclet['kind'] == 'function': + class_name = doclet.get('memberof') + if class_name: + if class_name not in classes: + classes[class_name] = [] + classes[class_name].append(doclet) + elif doclet['kind'] == 'typedef': + enumerations.append(doclet) + + # Process classes + for class_name, methods in classes.items(): + class_dir = os.path.join(output_dir, class_name) + methods_dir = os.path.join(class_dir, 'Methods') + os.makedirs(methods_dir, exist_ok=True) + + # Write class file + class_content = generate_class_markdown(class_name, methods) + write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) + + # Write method files + for method in methods: + method_content = generate_method_markdown(method, enumerations, classes) + write_markdown_file(os.path.join(methods_dir, f"{method['name']}.md"), method_content) + + # Process enumerations + enum_dir = os.path.join(output_dir, 'Enumeration') + os.makedirs(enum_dir, exist_ok=True) + + for enum in enumerations: + enum_content = generate_enumeration_markdown(enum, enumerations, classes) + write_markdown_file(os.path.join(enum_dir, f"{enum['name']}.md"), enum_content) + + +def generate(output_dir): + print('Generating Markdown documentation...') + + generate_docs_json.generate(output_dir + 'tmp_json') + for editor_name in editors: + input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json") + os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True) + + data = load_json(input_file) + process_doclets(data, output_dir + f'/{editor_name}') + + shutil.rmtree(output_dir + 'tmp_json') + print('Done') + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default="../../../../office-js-api/" # Default value + ) + args = parser.parse_args() + + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/package.json b/scripts/sdkjs_common/jsdoc/package.json new file mode 100644 index 00000000..3bda5f24 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/package.json @@ -0,0 +1,7 @@ +{ + "dependencies": { + "jsdoc-to-markdown": "7.1.1", + "dmd": "6.1.0", + "handlebars": "4.7.7" + } +} From f012c604b8652fd4214aed03154940c214e9a84b Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Fri, 26 Jul 2024 20:42:09 +0700 Subject: [PATCH 03/26] Build json docs fixes --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 5c678e5a..d52f4a62 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -2,6 +2,7 @@ import subprocess import json import argparse +import re # Конфигурационные файлы configs = [ @@ -64,7 +65,7 @@ def generate(output_dir): code_content = example_content # Форматирование содержимого для doclet['example'] - doclet['example'] = comment + "```js\n" + code_content + "\n```" + doclet['example'] = remove_js_comments(comment) + "```js\n" + remove_builder_lines(code_content) + "\n```" del doclet['see'] else: # Запись пропущенного примера в файл missing_examples.txt @@ -77,6 +78,18 @@ def generate(output_dir): print("Documentation generation completed.") +def remove_builder_lines(text): + lines = text.splitlines() # Разделить текст на строки + filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] + return "\n".join(filtered_lines) + +def remove_js_comments(text): + # Удаляем однострочные комментарии, оставляя текст после // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Удаляем многострочные комментарии, оставляя текст после /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate documentation") parser.add_argument( From f6d55d07c16d13c84036d57998c693427cef573c Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Fri, 26 Jul 2024 22:42:00 +0700 Subject: [PATCH 04/26] Fixed getting doclets for docs generation --- .../jsdoc/config/correct_doclets.js | 63 ++++++++++++++++--- 1 file changed, 56 insertions(+), 7 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js index 28919711..933819ff 100644 --- a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js +++ b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js @@ -1,23 +1,67 @@ exports.handlers = { processingComplete: function(e) { // Инициализация массива для сохранения отфильтрованных doclets - const filteredDoclets = []; + let filteredDoclets = []; const cleanName = name => name ? name.replace('~', '').replaceAll('"', '') : name; - // Итерация по doclets и фильтрация + const classesDocletsMap = {}; // доклеты классов пишем в конце + let passedClasses = []; // те которые проходят для редактора + + // набивка доступных классов текущего редактора + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAdd = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && + (!isMethod || hasTypeofEditorsTag); + + if (shouldAdd) { + if (doclet.memberof && false == passedClasses.includes(cleanName(doclet.memberof))) { + passedClasses.push(cleanName(doclet.memberof)); + } + } + else if (doclet.kind == 'class') { + classesDocletsMap[cleanName(doclet.name)] = doclet; + } + } + + // проходимся по классам и удаляем из мапы те, что недоступны в редакторе + passedClasses = passedClasses.filter(className => { + const doclet = classesDocletsMap[className]; + if (!doclet) { + return true; + } + + const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors')); + + // класс пропускаем если нет тега редактора или текущий редактор есть среди тегов + const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR)); + return isPassed; + }); + + for (let i = 0; i < e.doclets.length; i++) { const doclet = e.doclets[i]; const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); - const shouldAddMethod = + const shouldAdd = doclet.kind !== 'member' && (!doclet.longname || doclet.longname.search('private') === -1) && doclet.scope !== 'inner' && (!isMethod || hasTypeofEditorsTag); - if (shouldAddMethod) { + if (shouldAdd) { + // если класса нет в нашей мапе, значит мы его удалили сами -> недоступен в редакторе + if (false == passedClasses.includes(cleanName(doclet.memberof))) { + continue; + } + // Оставляем только нужные поля doclet.memberof = cleanName(doclet.memberof); doclet.longname = cleanName(doclet.longname); @@ -77,14 +121,19 @@ exports.handlers = { see: doclet.see }; - if (!doclet.see) { - delete doclet.see; + if (!filteredDoclet.see) { + delete filteredDoclet.see; } // Добавляем отфильтрованный doclet в массив filteredDoclets.push(filteredDoclet); } else if (doclet.kind == 'class') { + // если класса нет в нашей мапе, значит мы его удалили сами -> недоступен в редакторе + if (false == passedClasses.includes(cleanName(doclet.name))) { + continue; + } + const filteredDoclet = { comment: doclet.comment, description: doclet.description, @@ -99,7 +148,7 @@ exports.handlers = { } : doclet.meta, see: doclet.see || undefined }; - + filteredDoclets.push(filteredDoclet); } } From efcfb00239c4ba04490287c86b3720af3c88f6d3 Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Mon, 29 Jul 2024 13:27:41 +0700 Subject: [PATCH 05/26] Fixed docs generation scripts --- scripts/sdkjs_common/jsdoc/config/cell.json | 3 +- .../jsdoc/config/correct_doclets.js | 75 ++++++++++++++++--- scripts/sdkjs_common/jsdoc/config/forms.json | 2 +- scripts/sdkjs_common/jsdoc/config/slide.json | 3 +- .../sdkjs_common/jsdoc/generate_docs_json.py | 1 - .../sdkjs_common/jsdoc/generate_docs_md.py | 38 +++++++--- 6 files changed, 97 insertions(+), 25 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/config/cell.json b/scripts/sdkjs_common/jsdoc/config/cell.json index f30d4b66..2403a820 100644 --- a/scripts/sdkjs_common/jsdoc/config/cell.json +++ b/scripts/sdkjs_common/jsdoc/config/cell.json @@ -5,7 +5,8 @@ "plugins": ["./correct_doclets.js"], "opts": { "destination": "./out", - "recurse": true + "recurse": true, + "encoding": "utf8" }, "templates": { "json": { diff --git a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js index 933819ff..dfd5073a 100644 --- a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js +++ b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js @@ -8,6 +8,23 @@ exports.handlers = { const classesDocletsMap = {}; // доклеты классов пишем в конце let passedClasses = []; // те которые проходят для редактора + // Убираем повторения оставляя посление doclets + const latestDoclets = {}; + e.doclets.forEach(doclet => { + const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; + const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); + + const shouldAddMethod = + doclet.kind !== 'member' && + (!doclet.longname || doclet.longname.search('private') === -1) && + doclet.scope !== 'inner' && hasTypeofEditorsTag; + + if (shouldAddMethod || doclet.kind == 'typedef' || doclet.kind == 'class') { + latestDoclets[doclet.longname] = doclet; + } + }); + e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets)); + // набивка доступных классов текущего редактора for (let i = 0; i < e.doclets.length; i++) { const doclet = e.doclets[i]; @@ -44,19 +61,17 @@ exports.handlers = { return isPassed; }); - for (let i = 0; i < e.doclets.length; i++) { const doclet = e.doclets[i]; const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; const hasTypeofEditorsTag = isMethod && doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value.includes(process.env.EDITOR)); - const shouldAdd = + const shouldAddMethod = doclet.kind !== 'member' && (!doclet.longname || doclet.longname.search('private') === -1) && - doclet.scope !== 'inner' && - (!isMethod || hasTypeofEditorsTag); + doclet.scope !== 'inner' && hasTypeofEditorsTag; - if (shouldAdd) { + if (shouldAddMethod) { // если класса нет в нашей мапе, значит мы его удалили сами -> недоступен в редакторе if (false == passedClasses.includes(cleanName(doclet.memberof))) { continue; @@ -121,10 +136,6 @@ exports.handlers = { see: doclet.see }; - if (!filteredDoclet.see) { - delete filteredDoclet.see; - } - // Добавляем отфильтрованный doclet в массив filteredDoclets.push(filteredDoclet); } @@ -146,9 +157,55 @@ exports.handlers = { lineno: doclet.meta.lineno, columnno: doclet.meta.columnno } : doclet.meta, + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, see: doclet.see || undefined }; + filteredDoclets.push(filteredDoclet); + } + else if (doclet.kind == 'typedef') { + const filteredDoclet = { + comment: doclet.comment, + description: doclet.description, + name: cleanName(doclet.name), + longname: cleanName(doclet.longname), + kind: doclet.kind, + scope: "global", + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + see: doclet.see, + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type + }; + filteredDoclets.push(filteredDoclet); } } diff --git a/scripts/sdkjs_common/jsdoc/config/forms.json b/scripts/sdkjs_common/jsdoc/config/forms.json index c4e83886..d39d5319 100644 --- a/scripts/sdkjs_common/jsdoc/config/forms.json +++ b/scripts/sdkjs_common/jsdoc/config/forms.json @@ -1,6 +1,6 @@ { "source": { - "include": ["../../../../sdkjs-forms/apiBuilder.js"] + "include": ["../../../../sdkjs/word/apiBuilder.js", "../../../../sdkjs-forms/apiBuilder.js"] }, "plugins": ["./correct_doclets.js"], "opts": { diff --git a/scripts/sdkjs_common/jsdoc/config/slide.json b/scripts/sdkjs_common/jsdoc/config/slide.json index 2c8532b3..96b5dbf7 100644 --- a/scripts/sdkjs_common/jsdoc/config/slide.json +++ b/scripts/sdkjs_common/jsdoc/config/slide.json @@ -5,7 +5,8 @@ "plugins": ["./correct_doclets.js"], "opts": { "destination": "./out", - "recurse": true + "recurse": true, + "encoding": "utf8" }, "templates": { "json": { diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index d52f4a62..bc17b3da 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -66,7 +66,6 @@ def generate(output_dir): # Форматирование содержимого для doclet['example'] doclet['example'] = remove_js_comments(comment) + "```js\n" + remove_builder_lines(code_content) + "\n```" - del doclet['see'] else: # Запись пропущенного примера в файл missing_examples.txt with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/generate_docs_md.py index a8d9dd99..84a78ff4 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_md.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_md.py @@ -77,8 +77,11 @@ def replace_with_links(match): return re.sub(r'<([^<>]+)>', replace_with_links, param_types_md) -def generate_class_markdown(class_name, methods): +def generate_class_markdown(class_name, methods, properties, enumerations, classes): content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" + + content += generate_properties_markdown(properties, enumerations, classes) + content += "## Methods\n\n" for method in methods: method_name = method['name'] @@ -137,6 +140,24 @@ def generate_method_markdown(method, enumerations, classes): return content +def generate_properties_markdown(properties, enumerations, classes): + if (properties is None): + return '' + + content = "## Properties\n\n" + content += "| Name | Type | Description |\n" + content += "| ---- | ---- | ----------- |\n" + for prop in properties: + prop_name = prop['name'] + prop_description = prop.get('description', 'No description provided.') + prop_description = remove_line_breaks(correct_description(prop_description)) + param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes) + content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" + content += "\n" + + return content + + def generate_enumeration_markdown(enumeration, enumerations, classes): enum_name = enumeration['name'] description = enumeration.get('description', 'No description provided.') @@ -160,16 +181,7 @@ def generate_enumeration_markdown(enumeration, enumerations, classes): content += f"- {element_name}\n" elif enumeration['properties'] is not None: content += "## Type\n\nObject\n\n" - content += "## Properties\n\n" - content += "| Name | Type | Description |\n" - content += "| ---- | ---- | ----------- |\n" - properties = enumeration['properties'] - for prop in properties: - prop_name = prop['name'] - prop_description = prop.get('description', 'No description provided.') - prop_description = remove_line_breaks(correct_description(prop_description)) - param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes) - content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" + content += generate_properties_markdown(enumeration['properties'], enumerations, classes) else: content += "## Type\n\n" types = enumeration['type']['names'] @@ -187,12 +199,14 @@ def generate_enumeration_markdown(enumeration, enumerations, classes): def process_doclets(data, output_dir): classes = {} + classes_props = {} enumerations = [] for doclet in data: if doclet['kind'] == 'class': class_name = doclet['name'] classes[class_name] = [] + classes_props[class_name] = doclet.get('properties', None) elif doclet['kind'] == 'function': class_name = doclet.get('memberof') if class_name: @@ -209,7 +223,7 @@ def process_doclets(data, output_dir): os.makedirs(methods_dir, exist_ok=True) # Write class file - class_content = generate_class_markdown(class_name, methods) + class_content = generate_class_markdown(class_name, methods, classes_props[class_name], enumerations, classes) write_markdown_file(os.path.join(class_dir, f"{class_name}.md"), class_content) # Write method files From 13db6d3155069f3014957393f233477db572167c Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Mon, 29 Jul 2024 14:15:46 +0700 Subject: [PATCH 06/26] Fixed path in jsdoc generation script --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index bc17b3da..282023ed 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -50,7 +50,7 @@ def generate(output_dir): for doclet in data: if 'see' in doclet: if doclet['see'] is not None: - file_path = 'C:\\Users\\khrom\\Desktop\\Onlyoffice\\' + doclet['see'][0] + file_path = '../../../../' + doclet['see'][0] if os.path.exists(file_path): with open(file_path, 'r', encoding='utf-8') as see_file: example_content = see_file.read() From d6b5dc08309b64a996eafd6f252d745457d9df92 Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Mon, 29 Jul 2024 15:35:26 +0700 Subject: [PATCH 07/26] Fixed comments --- .../jsdoc/config/correct_doclets.js | 24 ++++++++-------- .../sdkjs_common/jsdoc/generate_docs_json.py | 28 +++++++++---------- .../sdkjs_common/jsdoc/generate_docs_md.py | 28 +++++++++---------- 3 files changed, 39 insertions(+), 41 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js index dfd5073a..55b87d27 100644 --- a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js +++ b/scripts/sdkjs_common/jsdoc/config/correct_doclets.js @@ -1,14 +1,14 @@ exports.handlers = { processingComplete: function(e) { - // Инициализация массива для сохранения отфильтрованных doclets + // array for filtered doclets let filteredDoclets = []; const cleanName = name => name ? name.replace('~', '').replaceAll('"', '') : name; - const classesDocletsMap = {}; // доклеты классов пишем в конце - let passedClasses = []; // те которые проходят для редактора + const classesDocletsMap = {}; // doclets for classes write at the end + let passedClasses = []; // passed classes for current editor - // Убираем повторения оставляя посление doclets + // Remove dublicates doclets const latestDoclets = {}; e.doclets.forEach(doclet => { const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; @@ -25,7 +25,7 @@ exports.handlers = { }); e.doclets.splice(0, e.doclets.length, ...Object.values(latestDoclets)); - // набивка доступных классов текущего редактора + // check available classess for current editor for (let i = 0; i < e.doclets.length; i++) { const doclet = e.doclets[i]; const isMethod = doclet.kind === 'function' || doclet.kind === 'method'; @@ -47,7 +47,7 @@ exports.handlers = { } } - // проходимся по классам и удаляем из мапы те, что недоступны в редакторе + // remove unavailave classes in current editor passedClasses = passedClasses.filter(className => { const doclet = classesDocletsMap[className]; if (!doclet) { @@ -56,7 +56,7 @@ exports.handlers = { const hasTypeofEditorsTag = !!(doclet.tags && doclet.tags.some(tag => tag.title === 'typeofeditors')); - // класс пропускаем если нет тега редактора или текущий редактор есть среди тегов + // class is passes if there is no editor tag or the current editor is among the tags const isPassed = false == hasTypeofEditorsTag || doclet.tags.some(tag => tag.title === 'typeofeditors' && tag.value && tag.value.includes(process.env.EDITOR)); return isPassed; }); @@ -72,12 +72,12 @@ exports.handlers = { doclet.scope !== 'inner' && hasTypeofEditorsTag; if (shouldAddMethod) { - // если класса нет в нашей мапе, значит мы его удалили сами -> недоступен в редакторе + // if the class is not in our map, then we deleted it ourselves -> not available in the editor if (false == passedClasses.includes(cleanName(doclet.memberof))) { continue; } - // Оставляем только нужные поля + // We leave only the necessary fields doclet.memberof = cleanName(doclet.memberof); doclet.longname = cleanName(doclet.longname); doclet.name = cleanName(doclet.name); @@ -136,11 +136,11 @@ exports.handlers = { see: doclet.see }; - // Добавляем отфильтрованный doclet в массив + // Add the filtered doclet to the array filteredDoclets.push(filteredDoclet); } else if (doclet.kind == 'class') { - // если класса нет в нашей мапе, значит мы его удалили сами -> недоступен в редакторе + // if the class is not in our map, then we deleted it ourselves -> not available in the editor if (false == passedClasses.includes(cleanName(doclet.name))) { continue; } @@ -210,7 +210,7 @@ exports.handlers = { } } - // Заменяем doclets на отфильтрованный массив + // Replace doclets with a filtered array e.doclets.splice(0, e.doclets.length, ...filteredDoclets); } }; diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 282023ed..6ef90c74 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -4,7 +4,7 @@ import argparse import re -# Конфигурационные файлы +# Configuration files configs = [ "./config/word.json", "./config/cell.json", @@ -25,11 +25,11 @@ def generate(output_dir): if not os.path.exists(output_dir): os.makedirs(output_dir) - # Пересоздание файла missing_examples.txt + # Recreate missing_examples.txt file with open(missing_examples_file, 'w', encoding='utf-8') as f: f.write('') - # Генерация json документации + # Generate JSON documentation for config in configs: editor_name = config.split('/')[-1].replace('.json', '') output_file = os.path.join(output_dir, editor_name + ".json") @@ -37,16 +37,16 @@ def generate(output_dir): print(f"Generating {editor_name}.json: {command}") subprocess.run(command, shell=True) - # дозапись примеров в json документацию + # Append examples to JSON documentation for config in configs: editor_name = config.split('/')[-1].replace('.json', '') output_file = os.path.join(output_dir, editor_name + ".json") - # Чтение JSON файла + # Read the JSON file with open(output_file, 'r', encoding='utf-8') as f: data = json.load(f) - # Модификация JSON данных + # Modify JSON data for doclet in data: if 'see' in doclet: if doclet['see'] is not None: @@ -55,7 +55,7 @@ def generate(output_dir): with open(file_path, 'r', encoding='utf-8') as see_file: example_content = see_file.read() - # Извлечение первой строки как комментария, если она существует + # Extract the first line as a comment if it exists lines = example_content.split('\n') if lines[0].startswith('//'): comment = lines[0] + '\n' @@ -64,28 +64,28 @@ def generate(output_dir): comment = '' code_content = example_content - # Форматирование содержимого для doclet['example'] + # Format content for doclet['example'] doclet['example'] = remove_js_comments(comment) + "```js\n" + remove_builder_lines(code_content) + "\n```" else: - # Запись пропущенного примера в файл missing_examples.txt + # Record missing examples in missing_examples.txt with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: missing_file.write(f"{file_path}\n") - # Запись измененного JSON файла обратно + # Write the modified JSON file back with open(output_file, 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False, indent=4) print("Documentation generation completed.") def remove_builder_lines(text): - lines = text.splitlines() # Разделить текст на строки + lines = text.splitlines() # Split text into lines filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] return "\n".join(filtered_lines) def remove_js_comments(text): - # Удаляем однострочные комментарии, оставляя текст после // + # Remove single-line comments, leaving text after // text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) - # Удаляем многострочные комментарии, оставляя текст после /* + # Remove multi-line comments, leaving text after /* text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) return text.strip() @@ -100,4 +100,4 @@ def remove_js_comments(text): ) args = parser.parse_args() - generate(args.destination) \ No newline at end of file + generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/generate_docs_md.py index 84a78ff4..f7345725 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_md.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_md.py @@ -5,7 +5,7 @@ import argparse import generate_docs_json -# Конфигурационные файлы +# Configuration files editors = [ "word", "cell", @@ -22,9 +22,9 @@ def write_markdown_file(file_path, content): md_file.write(content) def remove_js_comments(text): - # Удаляем однострочные комментарии, оставляя текст после // + # Remove single-line comments, leaving text after // text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) - # Удаляем многострочные комментарии, оставляя текст после /* + # Remove multi-line comments, leaving text after /* text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) return text.strip() @@ -32,14 +32,13 @@ def correct_description(string): if string is None: return 'No description provided.' - # Заменяем открывающий тег на ** + # Replace opening tag with ** string = re.sub(r'', '**', string) - # Заменяем закрывающий тег на ** + # Replace closing tag with ** string = re.sub(r'', '**', string) - # Заметка + # Note return re.sub(r'(.*?)', r'💡 \1', string, flags=re.DOTALL) - def correct_default_value(value, enumerations, classes): if value is None: return '' @@ -56,19 +55,19 @@ def correct_default_value(value, enumerations, classes): def remove_line_breaks(string): return re.sub(r'[\r\n]', '', string) -def generate_data_types_markdown(types, enumerations, classes): +def generate_data_types_markdown(types, enumerations, classes, root='../../'): param_types_md = ' |'.join(types) for enum in enumerations: if enum['name'] in types: - param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}](../../Enumeration/{enum['name']}.md)") + param_types_md = param_types_md.replace(enum['name'], f"[{enum['name']}]({root}Enumeration/{enum['name']}.md)") for cls in classes: if cls in types: - param_types_md = param_types_md.replace(cls, f"[{cls}](../../{cls}/{cls}.md)") + param_types_md = param_types_md.replace(cls, f"[{cls}]({root}{cls}/{cls}.md)") def replace_with_links(match): element = match.group(1).strip() - base_type = element.split('.')[0] # Берем только первую часть до точки, если она есть + base_type = element.split('.')[0] # Take only the first part before the dot, if any if any(enum['name'] == base_type for enum in enumerations): return f"<[{element}](../../Enumeration/{base_type}.md)>" elif base_type in classes: @@ -80,7 +79,7 @@ def replace_with_links(match): def generate_class_markdown(class_name, methods, properties, enumerations, classes): content = f"# {class_name}\n\nRepresents the {class_name} class.\n\n" - content += generate_properties_markdown(properties, enumerations, classes) + content += generate_properties_markdown(properties, enumerations, classes, '../') content += "## Methods\n\n" for method in methods: @@ -140,7 +139,7 @@ def generate_method_markdown(method, enumerations, classes): return content -def generate_properties_markdown(properties, enumerations, classes): +def generate_properties_markdown(properties, enumerations, classes, root='../../'): if (properties is None): return '' @@ -151,7 +150,7 @@ def generate_properties_markdown(properties, enumerations, classes): prop_name = prop['name'] prop_description = prop.get('description', 'No description provided.') prop_description = remove_line_breaks(correct_description(prop_description)) - param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes) + param_types_md = generate_data_types_markdown(prop['type']['names'], enumerations, classes, root) content += f"| {prop_name} | {param_types_md} | {prop_description} |\n" content += "\n" @@ -239,7 +238,6 @@ def process_doclets(data, output_dir): enum_content = generate_enumeration_markdown(enum, enumerations, classes) write_markdown_file(os.path.join(enum_dir, f"{enum['name']}.md"), enum_content) - def generate(output_dir): print('Generating Markdown documentation...') From dcfde5b5e7b8c50cdabca58a7c3fe1bda770659b Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Mon, 29 Jul 2024 13:57:17 +0300 Subject: [PATCH 08/26] Refactoring --- .gitignore | 2 ++ scripts/base.py | 12 ++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 37983f69..fa6a77f0 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ tests/puppeteer/node_modules tests/puppeteer/work_directory tests/puppeteer/package.json tests/puppeteer/package-lock.json +scripts/sdkjs_common/jsdoc/node_modules +scripts/sdkjs_common/jsdoc/package-lock.json diff --git a/scripts/base.py b/scripts/base.py index afcc7789..00fa0c97 100644 --- a/scripts/base.py +++ b/scripts/base.py @@ -384,7 +384,7 @@ def cmd2(prog, args=[], is_no_errors=False): sys.exit("Error (" + prog + "): " + str(ret)) return ret -def cmd_exe(prog, args): +def cmd_exe(prog, args, is_no_errors=False): prog_dir = os.path.dirname(prog) env_dir = os.environ if ("linux" == host_platform()): @@ -406,7 +406,7 @@ def cmd_exe(prog, args): command += (" \"" + arg + "\"") process = subprocess.Popen(command, stderr=subprocess.STDOUT, shell=True, env=env_dir) ret = process.wait() - if ret != 0: + if ret != 0 and True != is_no_errors: sys.exit("Error (" + prog + "): " + str(ret)) return ret @@ -1046,15 +1046,15 @@ def web_apps_addons_param(): def download(url, dst): return cmd_exe("curl", ["-L", "-o", dst, url]) -def extract(src, dst): +def extract(src, dst, is_no_errors=False): app = "7za" if ("mac" == host_platform()) else "7z" - return cmd_exe(app, ["x", "-y", src, "-o" + dst]) + return cmd_exe(app, ["x", "-y", src, "-o" + dst], is_no_errors) -def extract_unicode(src, dst): +def extract_unicode(src, dst, is_no_errors=False): if "windows" == host_platform(): run_as_bat_win_isolate([u"chcp 65001", u"call 7z.exe x -y \"" + src + u"\" \"-o" + dst + u"\"", u"exit"]) return - return extract(src, dst) + return extract(src, dst, is_no_errors) def archive_folder(src, dst): app = "7za" if ("mac" == host_platform()) else "7z" From e29fd0ca095e436d60431a51d6472551796bb46b Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Mon, 29 Jul 2024 18:05:07 +0700 Subject: [PATCH 09/26] Added branch name to dist path for jsdoc json generation --- .../sdkjs_common/jsdoc/generate_docs_json.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 6ef90c74..179c26bf 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -89,6 +89,25 @@ def remove_js_comments(text): text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) return text.strip() +def get_current_branch(path): + try: + # Navigate to the specified directory and get the current branch name + result = subprocess.run( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], + cwd=path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True + ) + if result.returncode == 0: + return result.stdout.strip() + else: + print(f"Error: {result.stderr}") + return None + except Exception as e: + print(f"Exception: {e}") + return None + if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate documentation") parser.add_argument( @@ -100,4 +119,8 @@ def remove_js_comments(text): ) args = parser.parse_args() + branch_name = get_current_branch("../../../../sdkjs") + if branch_name: + args.destination = f"{args.destination}/{branch_name}" + generate(args.destination) From 8e7db87554b5d7ddb5b5bc3786c6db2fca30336d Mon Sep 17 00:00:00 2001 From: Semyon Bezrukov Date: Wed, 31 Jul 2024 14:18:27 +0300 Subject: [PATCH 10/26] Refactoring linux packages deploy (#853) * Fix deploy desktop editors linux packages (#842) * Small fix * Fix make targets * Small fix * Refactoring linux packages deploy --- scripts/package_branding.py | 59 ++++++++++++++++++++++++++++++-- scripts/package_builder.py | 32 +++--------------- scripts/package_desktop.py | 67 +++---------------------------------- scripts/package_server.py | 41 ++++------------------- 4 files changed, 72 insertions(+), 127 deletions(-) diff --git a/scripts/package_branding.py b/scripts/package_branding.py index ec902e42..4cd9a805 100644 --- a/scripts/package_branding.py +++ b/scripts/package_branding.py @@ -30,6 +30,59 @@ builder_product_name = "Document Builder" if utils.is_linux(): - desktop_make_targets = ["deb", "rpm", "suse-rpm", "tar"] - builder_make_targets = ["deb", "rpm"] # tar - server_make_targets = ["deb", "rpm", "tar"] + builder_make_targets = [ + { + "make": "tar", + "src": "tar/*.tar*", + "dst": "builder/linux/generic/" + }, + { + "make": "deb", + "src": "deb/*.deb", + "dst": "builder/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/builddir/RPMS/*/*.rpm", + "dst": "builder/linux/rhel/" + } + ] + desktop_make_targets = [ + { + "make": "tar", + "src": "tar/*.tar*", + "dst": "desktop/linux/generic/" + }, + { + "make": "deb", + "src": "deb/*.deb", + "dst": "desktop/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/build/RPMS/*/*.rpm", + "dst": "desktop/linux/rhel/" + }, + { + "make": "rpm-suse", + "src": "rpm-suse/build/RPMS/*/*.rpm", + "dst": "desktop/linux/suse/" + } + ] + server_make_targets = [ + { + "make": "deb", + "src": "deb/*.deb", + "dst": "server/linux/debian/" + }, + { + "make": "rpm", + "src": "rpm/builddir/RPMS/*/*.rpm", + "dst": "server/linux/rhel/" + }, + { + "make": "tar", + "src": "*.tar*", + "dst": "server/linux/snap/" + } + ] diff --git a/scripts/package_builder.py b/scripts/package_builder.py index d7beeeea..ae9b9c06 100644 --- a/scripts/package_builder.py +++ b/scripts/package_builder.py @@ -137,7 +137,7 @@ def make_linux(): utils.set_cwd("document-builder-package") utils.log_h2("builder build") - make_args = branding.builder_make_targets + make_args = [t["make"] for t in branding.builder_make_targets] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -146,32 +146,10 @@ def make_linux(): utils.set_summary("builder build", ret) if common.deploy: - if ret: - if "tar" in branding.builder_make_targets: - utils.log_h2("builder tar deploy") - ret = s3_upload( - utils.glob_path("tar/*.tar.gz"), - "builder/linux/generic/") - utils.set_summary("builder tar deploy", ret) - if "deb" in branding.builder_make_targets: - utils.log_h2("builder deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "builder/linux/debian/") - utils.set_summary("builder deb deploy", ret) - if "rpm" in branding.builder_make_targets: - utils.log_h2("builder rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/*/*.rpm"), - "builder/linux/rhel/") - utils.set_summary("builder rpm deploy", ret) - else: - if "tar" in branding.builder_make_targets: - utils.set_summary("builder tar deploy", False) - if "deb" in branding.builder_make_targets: - utils.set_summary("builder deb deploy", False) - if "rpm" in branding.builder_make_targets: - utils.set_summary("builder rpm deploy", False) + for t in branding.builder_make_targets: + utils.log_h2("builder " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("builder " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_desktop.py b/scripts/package_desktop.py index 5fc62370..c16af17d 100644 --- a/scripts/package_desktop.py +++ b/scripts/package_desktop.py @@ -329,7 +329,7 @@ def make_linux(): utils.set_cwd("desktop-apps/win-linux/package/linux") utils.log_h2("desktop build") - make_args = branding.desktop_make_targets + make_args = [t["make"] for t in branding.desktop_make_targets] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -337,68 +337,11 @@ def make_linux(): ret = utils.sh("make clean && make " + " ".join(make_args), verbose=True) utils.set_summary("desktop build", ret) - rpm_arch = "*" - if common.platform == "linux_aarch64": rpm_arch = "aarch64" - if common.deploy: - if ret: - utils.log_h2("desktop tar deploy") - if "tar" in branding.desktop_make_targets: - ret = s3_upload( - utils.glob_path("tar/*.tar*"), - "desktop/linux/generic/") - utils.set_summary("desktop tar deploy", ret) - if "deb" in branding.desktop_make_targets: - utils.log_h2("desktop deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "desktop/linux/debian/") - utils.set_summary("desktop deb deploy", ret) - if "deb-astra" in branding.desktop_make_targets: - utils.log_h2("desktop deb-astra deploy") - ret = s3_upload( - utils.glob_path("deb-astra/*.deb"), - "desktop/linux/astra/") - utils.set_summary("desktop deb-astra deploy", ret) - if "rpm" in branding.desktop_make_targets: - utils.log_h2("desktop rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/rhel/") - utils.set_summary("desktop rpm deploy", ret) - if "suse-rpm" in branding.desktop_make_targets: - utils.log_h2("desktop suse-rpm deploy") - ret = s3_upload( - utils.glob_path("suse-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/suse/") - utils.set_summary("desktop suse-rpm deploy", ret) - if "apt-rpm" in branding.desktop_make_targets: - utils.log_h2("desktop apt-rpm deploy") - ret = s3_upload( - utils.glob_path("apt-rpm/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/altlinux/") - utils.set_summary("desktop apt-rpm deploy", ret) - if "urpmi" in branding.desktop_make_targets: - utils.log_h2("desktop urpmi deploy") - ret = s3_upload( - utils.glob_path("urpmi/builddir/RPMS/" + rpm_arch + "/*.rpm"), - "desktop/linux/rosa/") - utils.set_summary("desktop urpmi deploy", ret) - else: - if "tar" in branding.desktop_make_targets: - utils.set_summary("desktop tar deploy", False) - if "deb" in branding.desktop_make_targets: - utils.set_summary("desktop deb deploy", False) - if "deb-astra" in branding.desktop_make_targets: - utils.set_summary("desktop deb-astra deploy", False) - if "rpm" in branding.desktop_make_targets: - utils.set_summary("desktop rpm deploy", False) - if "suse-rpm" in branding.desktop_make_targets: - utils.set_summary("desktop suse-rpm deploy", False) - if "apt-rpm" in branding.desktop_make_targets: - utils.set_summary("desktop apt-rpm deploy", False) - if "urpmi" in branding.desktop_make_targets: - utils.set_summary("desktop urpmi deploy", False) + for t in branding.desktop_make_targets: + utils.log_h2("desktop " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("desktop " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return diff --git a/scripts/package_server.py b/scripts/package_server.py index 4c475fcd..a72874a9 100644 --- a/scripts/package_server.py +++ b/scripts/package_server.py @@ -61,7 +61,8 @@ def make_linux(edition): utils.set_cwd("document-server-package") utils.log_h2("server " + edition + " build") - make_args = branding.server_make_targets + ["-e", "PRODUCT_NAME=" + product_name] + make_args = [t["make"] for t in branding.server_make_targets] + make_args += ["-e", "PRODUCT_NAME=" + product_name] if common.platform == "linux_aarch64": make_args += ["-e", "UNAME_M=aarch64"] if not branding.onlyoffice: @@ -70,40 +71,10 @@ def make_linux(edition): utils.set_summary("server " + edition + " build", ret) if common.deploy: - if ret: - if "deb" in branding.server_make_targets: - utils.log_h2("server " + edition + " deb deploy") - ret = s3_upload( - utils.glob_path("deb/*.deb"), - "server/linux/debian/") - utils.set_summary("server " + edition + " deb deploy", ret) - if "rpm" in branding.server_make_targets: - utils.log_h2("server " + edition + " rpm deploy") - ret = s3_upload( - utils.glob_path("rpm/builddir/RPMS/*/*.rpm"), - "server/linux/rhel/") - utils.set_summary("server " + edition + " rpm deploy", ret) - if "apt-rpm" in branding.server_make_targets: - utils.log_h2("server " + edition + " apt-rpm deploy") - ret = s3_upload( - utils.glob_path("apt-rpm/builddir/RPMS/*/*.rpm"), - "server/linux/altlinux/") - utils.set_summary("server " + edition + " apt-rpm deploy", ret) - if "tar" in branding.server_make_targets: - utils.log_h2("server " + edition + " snap deploy") - ret = s3_upload( - utils.glob_path("*.tar.gz"), - "server/linux/snap/") - utils.set_summary("server " + edition + " snap deploy", ret) - else: - if "deb" in branding.server_make_targets: - utils.set_summary("server " + edition + " deb deploy", False) - if "rpm" in branding.server_make_targets: - utils.set_summary("server " + edition + " rpm deploy", False) - if "apt-rpm" in branding.server_make_targets: - utils.set_summary("server " + edition + " apt-rpm deploy", False) - if "tar" in branding.server_make_targets: - utils.set_summary("server " + edition + " snap deploy", False) + for t in branding.server_make_targets: + utils.log_h2("server " + edition + " " + t["make"] + " deploy") + ret = s3_upload(utils.glob_path(t["src"]), t["dst"]) + utils.set_summary("server " + edition + " " + t["make"] + " deploy", ret) utils.set_cwd(common.workspace_dir) return From 0983e67f2104009f9a2e7143153c036acfb63a40 Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Fri, 2 Aug 2024 20:22:34 +0700 Subject: [PATCH 11/26] [jsdoc] Fixed api docs generation --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 179c26bf..653653a7 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -50,7 +50,10 @@ def generate(output_dir): for doclet in data: if 'see' in doclet: if doclet['see'] is not None: - file_path = '../../../../' + doclet['see'][0] + if editor_name == 'forms': + file_path = '../../../../' + doclet['see'][0].replace('{Editor}', 'Word') + else: + file_path = '../../../../' + doclet['see'][0].replace('{Editor}', editor_name.title()) if os.path.exists(file_path): with open(file_path, 'r', encoding='utf-8') as see_file: example_content = see_file.read() From eeca17e78b0b1cb042e9146ebc88d36500450c4c Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Fri, 2 Aug 2024 21:26:44 +0700 Subject: [PATCH 12/26] Fixed jsdoc md generation --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 15 +++++++++++---- scripts/sdkjs_common/jsdoc/generate_docs_md.py | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 653653a7..b4710bba 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -19,7 +19,7 @@ "forms": "CFE" } -def generate(output_dir): +def generate(output_dir, md=False): missing_examples_file = f'{output_dir}/missing_examples.txt' if not os.path.exists(output_dir): @@ -51,9 +51,12 @@ def generate(output_dir): if 'see' in doclet: if doclet['see'] is not None: if editor_name == 'forms': - file_path = '../../../../' + doclet['see'][0].replace('{Editor}', 'Word') + doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word') else: - file_path = '../../../../' + doclet['see'][0].replace('{Editor}', editor_name.title()) + doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title()) + + file_path = '../../../../' + doclet['see'][0] + if os.path.exists(file_path): with open(file_path, 'r', encoding='utf-8') as see_file: example_content = see_file.read() @@ -68,7 +71,11 @@ def generate(output_dir): code_content = example_content # Format content for doclet['example'] - doclet['example'] = remove_js_comments(comment) + "```js\n" + remove_builder_lines(code_content) + "\n```" + doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" + + if md == False: + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```' + else: # Record missing examples in missing_examples.txt with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_md.py b/scripts/sdkjs_common/jsdoc/generate_docs_md.py index f7345725..ea4dc39d 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_md.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_md.py @@ -241,7 +241,7 @@ def process_doclets(data, output_dir): def generate(output_dir): print('Generating Markdown documentation...') - generate_docs_json.generate(output_dir + 'tmp_json') + generate_docs_json.generate(output_dir + 'tmp_json', md=True) for editor_name in editors: input_file = os.path.join(output_dir + 'tmp_json', editor_name + ".json") os.makedirs(output_dir + f'/{editor_name.title()}', exist_ok=True) From 9a44dae4f94ad90f4482cbb74258eef44adeeeac Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Mon, 5 Aug 2024 17:28:06 +0700 Subject: [PATCH 13/26] Jsdocs api plugins generation script --- scripts/sdkjs_common/jsdoc/README.md | 21 ++- .../jsdoc/config/{ => builder}/cell.json | 0 .../config/{ => builder}/correct_doclets.js | 0 .../jsdoc/config/{ => builder}/forms.json | 0 .../jsdoc/config/{ => builder}/slide.json | 0 .../jsdoc/config/{ => builder}/word.json | 0 .../jsdoc/config/plugins/cell.json | 16 +++ .../jsdoc/config/plugins/common.json | 16 +++ .../jsdoc/config/plugins/correct_doclets.js | 85 +++++++++++ .../jsdoc/config/plugins/forms.json | 16 +++ .../jsdoc/config/plugins/slide.json | 16 +++ .../jsdoc/config/plugins/word.json | 16 +++ .../sdkjs_common/jsdoc/generate_docs_json.py | 18 +-- .../jsdoc/generate_docs_plugins_json.py | 132 ++++++++++++++++++ 14 files changed, 326 insertions(+), 10 deletions(-) rename scripts/sdkjs_common/jsdoc/config/{ => builder}/cell.json (100%) rename scripts/sdkjs_common/jsdoc/config/{ => builder}/correct_doclets.js (100%) rename scripts/sdkjs_common/jsdoc/config/{ => builder}/forms.json (100%) rename scripts/sdkjs_common/jsdoc/config/{ => builder}/slide.json (100%) rename scripts/sdkjs_common/jsdoc/config/{ => builder}/word.json (100%) create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/cell.json create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/common.json create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/forms.json create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/slide.json create mode 100644 scripts/sdkjs_common/jsdoc/config/plugins/word.json create mode 100644 scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py diff --git a/scripts/sdkjs_common/jsdoc/README.md b/scripts/sdkjs_common/jsdoc/README.md index 61ad80af..01c14063 100644 --- a/scripts/sdkjs_common/jsdoc/README.md +++ b/scripts/sdkjs_common/jsdoc/README.md @@ -1,7 +1,7 @@ # Documentation Generation Guide -This guide explains how to generate documentation for Onlyoffice API using the provided Python scripts, `generate_docs_json.py` and `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors. +This guide explains how to generate documentation for Onlyoffice Builder/Plugins API using the provided Python scripts: `generate_docs_json.py`, `generate_docs_plugins_json.py`, `generate_docs_md.py`. These scripts are used to create JSON and Markdown documentation for the `apiBuilder.js` files from the word, cell, and slide editors. ## Prerequisites @@ -24,7 +24,19 @@ This script generates JSON documentation based on the `apiBuilder.js` files. ``` - **Parameters**: - - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/sdkjs/deploy/api_builder/json`. + - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/document-builder-declarations/document-builder`. + +### `generate_docs_plugins_json.py` + +This script generates JSON documentation based on the `api_plugins.js` files. + +- **Usage**: + ```bash + python generate_docs_plugins_json.py output_path + ``` + +- **Parameters**: + - `output_path` (optional): The directory where the JSON documentation will be saved. If not specified, the default path is `Onlyoffice/document-builder-declarations/document-builder-plugin`. ### `generate_docs_md.py` @@ -45,6 +57,11 @@ To generate JSON documentation with the default output path: python generate_docs_json.py /path/to/save/json ``` +To generate JSON documentation with the default output path: +```bash +python generate_docs_plugins_json.py /path/to/save/json +``` + To generate Markdown documentation and specify a custom output path: ```bash python generate_docs_md.py /path/to/save/markdown diff --git a/scripts/sdkjs_common/jsdoc/config/cell.json b/scripts/sdkjs_common/jsdoc/config/builder/cell.json similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/cell.json rename to scripts/sdkjs_common/jsdoc/config/builder/cell.json diff --git a/scripts/sdkjs_common/jsdoc/config/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/correct_doclets.js rename to scripts/sdkjs_common/jsdoc/config/builder/correct_doclets.js diff --git a/scripts/sdkjs_common/jsdoc/config/forms.json b/scripts/sdkjs_common/jsdoc/config/builder/forms.json similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/forms.json rename to scripts/sdkjs_common/jsdoc/config/builder/forms.json diff --git a/scripts/sdkjs_common/jsdoc/config/slide.json b/scripts/sdkjs_common/jsdoc/config/builder/slide.json similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/slide.json rename to scripts/sdkjs_common/jsdoc/config/builder/slide.json diff --git a/scripts/sdkjs_common/jsdoc/config/word.json b/scripts/sdkjs_common/jsdoc/config/builder/word.json similarity index 100% rename from scripts/sdkjs_common/jsdoc/config/word.json rename to scripts/sdkjs_common/jsdoc/config/builder/word.json diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/cell.json b/scripts/sdkjs_common/jsdoc/config/plugins/cell.json new file mode 100644 index 00000000..b49b71ea --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/cell.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/cell/api_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/common.json b/scripts/sdkjs_common/jsdoc/config/plugins/common.json new file mode 100644 index 00000000..4bf510c2 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/common.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/common/plugins/plugin_base_api.js" ,"../../../../sdkjs/common/apiBase_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js b/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js new file mode 100644 index 00000000..bea3873c --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/correct_doclets.js @@ -0,0 +1,85 @@ +exports.handlers = { + processingComplete: function(e) { + const filteredDoclets = []; + + function checkNullProps(oDoclet) { + for (let key of Object.keys(oDoclet)) { + if (oDoclet[key] == null) { + delete oDoclet[key]; + } + if (typeof(oDoclet[key]) == "object") { + checkNullProps(oDoclet[key]); + } + } + } + + for (let i = 0; i < e.doclets.length; i++) { + const doclet = e.doclets[i]; + if (true == doclet.undocumented || doclet.kind == 'package') { + continue; + } + + const filteredDoclet = { + comment: doclet.comment, + + meta: doclet.meta ? { + lineno: doclet.meta.lineno, + columnno: doclet.meta.columnno + } : doclet.meta, + + kind: doclet.kind, + since: doclet.since, + name: doclet.name, + type: doclet.type ? { + names: doclet.type.names, + parsedType: doclet.type.parsedType + } : doclet.type, + + description: doclet.description, + memberof: doclet.memberof, + + properties: doclet.properties ? doclet.properties.map(property => ({ + type: property.type ? { + names: property.type.names, + parsedType: property.type.parsedType + } : property.type, + + name: property.name, + description: property.description, + optional: property.optional, + defaultvalue: property.defaultvalue + })) : doclet.properties, + + longname: doclet.longname, + scope: doclet.scope, + alias: doclet.alias, + + params: doclet.params ? doclet.params.map(param => ({ + type: param.type ? { + names: param.type.names, + parsedType: param.type.parsedType + } : param.type, + + name: param.name, + description: param.description, + optional: param.optional, + defaultvalue: param.defaultvalue + })) : doclet.params, + + returns: doclet.returns ? doclet.returns.map(returnObj => ({ + type: { + names: returnObj.type.names, + parsedType: returnObj.type.parsedType + } + })) : doclet.returns, + see: doclet.see + }; + + checkNullProps(filteredDoclet) + + filteredDoclets.push(filteredDoclet); + } + + e.doclets.splice(0, e.doclets.length, ...filteredDoclets); + } +}; \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/forms.json b/scripts/sdkjs_common/jsdoc/config/plugins/forms.json new file mode 100644 index 00000000..65cb6a2e --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/forms.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs-forms/apiPlugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/slide.json b/scripts/sdkjs_common/jsdoc/config/plugins/slide.json new file mode 100644 index 00000000..d0151716 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/slide.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/slide/api_plugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} diff --git a/scripts/sdkjs_common/jsdoc/config/plugins/word.json b/scripts/sdkjs_common/jsdoc/config/plugins/word.json new file mode 100644 index 00000000..b06743ea --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/config/plugins/word.json @@ -0,0 +1,16 @@ +{ + "source": { + "include": ["../../../../sdkjs/word/api_plugins.js", "../../../../sdkjs-forms/apiPlugins.js"] + }, + "plugins": ["./correct_doclets.js"], + "opts": { + "destination": "./out", + "recurse": true, + "encoding": "utf8" + }, + "templates": { + "json": { + "pretty": true + } + } +} \ No newline at end of file diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index b4710bba..e5c73d95 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -4,12 +4,14 @@ import argparse import re +root = '../../../..' + # Configuration files configs = [ - "./config/word.json", - "./config/cell.json", - "./config/slide.json", - "./config/forms.json" + "./config/builder/word.json", + "./config/builder/cell.json", + "./config/builder/slide.json", + "./config/builder/forms.json" ] editors_maps = { @@ -55,7 +57,7 @@ def generate(output_dir, md=False): else: doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title()) - file_path = '../../../../' + doclet['see'][0] + file_path = f'{root}/' + doclet['see'][0] if os.path.exists(file_path): with open(file_path, 'r', encoding='utf-8') as see_file: @@ -85,7 +87,7 @@ def generate(output_dir, md=False): with open(output_file, 'w', encoding='utf-8') as f: json.dump(data, f, ensure_ascii=False, indent=4) - print("Documentation generation completed.") + print("Documentation generation for builder completed.") def remove_builder_lines(text): lines = text.splitlines() # Split text into lines @@ -125,11 +127,11 @@ def get_current_branch(path): type=str, help="Destination directory for the generated documentation", nargs='?', # Indicates the argument is optional - default="../../../../document-builder-declarations/document-builder" # Default value + default=f"{root}/document-builder-declarations/document-builder" # Default value ) args = parser.parse_args() - branch_name = get_current_branch("../../../../sdkjs") + branch_name = get_current_branch(f"{root}/sdkjs") if branch_name: args.destination = f"{args.destination}/{branch_name}" diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py new file mode 100644 index 00000000..12df9817 --- /dev/null +++ b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py @@ -0,0 +1,132 @@ +import os +import subprocess +import json +import argparse +import re + +# Configuration files +configs = [ + "./config/plugins/common.json", + "./config/plugins/word.json", + "./config/plugins/cell.json", + "./config/plugins/slide.json", + "./config/plugins/forms.json" +] + +root = '../../../..' + +def generate(output_dir, md=False): + missing_examples_file = f'{output_dir}/missing_examples.txt' + + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + # Recreate missing_examples.txt file + with open(missing_examples_file, 'w', encoding='utf-8') as f: + f.write('') + + # Generate JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + command = f"npx jsdoc -c {config} -X > {output_file}" + print(f"Generating {editor_name}.json: {command}") + subprocess.run(command, shell=True) + + # Append examples to JSON documentation + for config in configs: + editor_name = config.split('/')[-1].replace('.json', '') + output_file = os.path.join(output_dir, editor_name + ".json") + + # Read the JSON file + with open(output_file, 'r', encoding='utf-8') as f: + data = json.load(f) + + # Modify JSON data + for doclet in data: + if 'see' in doclet: + if doclet['see'] is not None: + if editor_name == 'forms': + doclet['see'][0] = doclet['see'][0].replace('{Editor}', 'Word') + else: + doclet['see'][0] = doclet['see'][0].replace('{Editor}', editor_name.title()) + + file_path = f'{root}/' + doclet['see'][0] + + if os.path.exists(file_path): + with open(file_path, 'r', encoding='utf-8') as see_file: + example_content = see_file.read() + + # Extract the first line as a comment if it exists + lines = example_content.split('\n') + if lines[0].startswith('//'): + comment = lines[0] + '\n' + code_content = '\n'.join(lines[1:]) + else: + comment = '' + code_content = example_content + + # Format content for doclet['example'] + doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" + + if md == False: + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```' + + else: + # Record missing examples in missing_examples.txt + with open(missing_examples_file, 'a', encoding='utf-8') as missing_file: + missing_file.write(f"{file_path}\n") + + # Write the modified JSON file back + with open(output_file, 'w', encoding='utf-8') as f: + json.dump(data, f, ensure_ascii=False, indent=4) + + print("Documentation generation for builder completed.") + +def remove_builder_lines(text): + lines = text.splitlines() # Split text into lines + filtered_lines = [line for line in lines if not line.strip().startswith("builder.")] + return "\n".join(filtered_lines) + +def remove_js_comments(text): + # Remove single-line comments, leaving text after // + text = re.sub(r'^\s*//\s?', '', text, flags=re.MULTILINE) + # Remove multi-line comments, leaving text after /* + text = re.sub(r'/\*\s*|\s*\*/', '', text, flags=re.DOTALL) + return text.strip() + +def get_current_branch(path): + try: + # Navigate to the specified directory and get the current branch name + result = subprocess.run( + ["git", "rev-parse", "--abbrev-ref", "HEAD"], + cwd=path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True + ) + if result.returncode == 0: + return result.stdout.strip() + else: + print(f"Error: {result.stderr}") + return None + except Exception as e: + print(f"Exception: {e}") + return None + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate documentation") + parser.add_argument( + "destination", + type=str, + help="Destination directory for the generated documentation", + nargs='?', # Indicates the argument is optional + default=f"{root}/document-builder-declarations/document-builder-plugin" # Default value + ) + args = parser.parse_args() + + branch_name = get_current_branch(f"{root}/sdkjs") + if branch_name: + args.destination = f"{args.destination}/{branch_name}" + + generate(args.destination) From 8b773614bac552866c6d7347ba981d41f903c847 Mon Sep 17 00:00:00 2001 From: Semyon Bezrukov Date: Tue, 6 Aug 2024 11:45:54 +0300 Subject: [PATCH 14/26] Fix builder rpm package deploy (#857) --- scripts/package_branding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/package_branding.py b/scripts/package_branding.py index 4cd9a805..5a838955 100644 --- a/scripts/package_branding.py +++ b/scripts/package_branding.py @@ -43,7 +43,7 @@ }, { "make": "rpm", - "src": "rpm/builddir/RPMS/*/*.rpm", + "src": "rpm/build/RPMS/*/*.rpm", "dst": "builder/linux/rhel/" } ] From a8912dff4180c831b89a621023ba6023e0860d80 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 6 Aug 2024 13:46:42 +0300 Subject: [PATCH 15/26] Refactoring --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 5 ++++- scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index e5c73d95..e44d9c72 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -76,7 +76,7 @@ def generate(output_dir, md=False): doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" if md == False: - doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```' + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name}"}}\n{code_content}\n```' else: # Record missing examples in missing_examples.txt @@ -133,6 +133,9 @@ def get_current_branch(path): branch_name = get_current_branch(f"{root}/sdkjs") if branch_name: + index_last_name = branch_name.rfind("/") + if -1 != index_last_name: + branch_name = branch_name[index_last_name + 1:] args.destination = f"{args.destination}/{branch_name}" generate(args.destination) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py index 12df9817..25e4089b 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py @@ -70,7 +70,7 @@ def generate(output_dir, md=False): doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" if md == False: - doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name.title()}"}}\n{code_content}\n```' + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name}"}}\n{code_content}\n```' else: # Record missing examples in missing_examples.txt @@ -127,6 +127,9 @@ def get_current_branch(path): branch_name = get_current_branch(f"{root}/sdkjs") if branch_name: + index_last_name = branch_name.rfind("/") + if -1 != index_last_name: + branch_name = branch_name[index_last_name + 1:] args.destination = f"{args.destination}/{branch_name}" generate(args.destination) From 13cbd84b58ab5ecfb3bb3c93672da10aa6a7ac81 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 6 Aug 2024 14:24:12 +0300 Subject: [PATCH 16/26] Change documentType for pdf --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 5 ++++- scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index e44d9c72..6b85d67f 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -76,7 +76,10 @@ def generate(output_dir, md=False): doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" if md == False: - doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name}"}}\n{code_content}\n```' + document_type = editor_name + if "forms" == document_type: + document_type = "pdf" + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```' else: # Record missing examples in missing_examples.txt diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py index 25e4089b..9c1cf6f1 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py @@ -70,7 +70,10 @@ def generate(output_dir, md=False): doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" if md == False: - doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{editor_name}"}}\n{code_content}\n```' + document_type = editor_name + if "forms" == document_type: + document_type = "pdf" + doclet['description'] = doclet['description'] + f'\n\n## Try it\n\n ```js document-builder={{"documentType": "{document_type}"}}\n{code_content}\n```' else: # Record missing examples in missing_examples.txt From 9ce103b31b0e4953ebdbad8af2f55c7629b2c2f6 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 6 Aug 2024 17:54:39 +0300 Subject: [PATCH 17/26] Add returncode in runcommand function --- scripts/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/base.py b/scripts/base.py index 00fa0c97..86df1472 100644 --- a/scripts/base.py +++ b/scripts/base.py @@ -426,12 +426,13 @@ def cmd_and_return_cwd(prog, args=[], is_no_errors=False): def run_command(sCommand): popen = subprocess.Popen(sCommand, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) - result = {'stdout' : '', 'stderr' : ''} + result = {'stdout' : '', 'stderr' : '', 'returncode' : 0} try: stdout, stderr = popen.communicate() popen.wait() result['stdout'] = stdout.strip().decode('utf-8', errors='ignore') result['stderr'] = stderr.strip().decode('utf-8', errors='ignore') + result['returncode'] = popen.returncode finally: popen.stdout.close() popen.stderr.close() From 096ce99588b8e10a17d6a125793b417135c901bd Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Wed, 7 Aug 2024 17:45:40 +0700 Subject: [PATCH 18/26] [jsdoc][plugins] Added examples field to json. --- scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py index 9c1cf6f1..bf1cff46 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_plugins_json.py @@ -66,8 +66,7 @@ def generate(output_dir, md=False): comment = '' code_content = example_content - # Format content for doclet['example'] - doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" + doclet['examples'] = [remove_js_comments(comment) + code_content] if md == False: document_type = editor_name From ebc084f9eaee343a2a2f7834735455c9665ae20e Mon Sep 17 00:00:00 2001 From: Nikita Khromov Date: Wed, 7 Aug 2024 18:24:02 +0700 Subject: [PATCH 19/26] [jsdoc][bu] Removed example filed from json docs --- scripts/sdkjs_common/jsdoc/generate_docs_json.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/sdkjs_common/jsdoc/generate_docs_json.py b/scripts/sdkjs_common/jsdoc/generate_docs_json.py index 6b85d67f..b920dba4 100644 --- a/scripts/sdkjs_common/jsdoc/generate_docs_json.py +++ b/scripts/sdkjs_common/jsdoc/generate_docs_json.py @@ -72,8 +72,8 @@ def generate(output_dir, md=False): comment = '' code_content = example_content - # Format content for doclet['example'] - doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" + if md == True: + doclet['example'] = remove_js_comments(comment) + "```js\n" + code_content + "\n```" if md == False: document_type = editor_name From 01575d1f2e0a82a5d4535e5cccfa0ef125441172 Mon Sep 17 00:00:00 2001 From: Semyon Bezrukov Date: Mon, 12 Aug 2024 16:53:56 +0300 Subject: [PATCH 20/26] Fix core and builder archive deploy (#860) * Refactoring script parameters * Add builder 7z deploy * Refactoring core 7z deploy * Small fix --- make_package.py | 28 ++++++++++----- scripts/deploy_core.py | 5 --- scripts/develop/config_server.py | 24 +++++++------ scripts/package_builder.py | 38 ++++++++++++++++++-- scripts/package_core.py | 61 ++++++++++++++------------------ scripts/package_utils.py | 6 ++-- 6 files changed, 96 insertions(+), 66 deletions(-) diff --git a/make_package.py b/make_package.py index c65cc864..238b0b50 100755 --- a/make_package.py +++ b/make_package.py @@ -10,15 +10,17 @@ # parse parser = argparse.ArgumentParser(description="Build packages.") parser.add_argument("-P", "--platform", dest="platform", type=str, - action="store", help="Defines platform", required=True) -parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+", - action="store", help="Defines targets", required=True) -parser.add_argument("-R", "--branding", dest="branding", type=str, - action="store", help="Provides branding path") + action="store", help="Defines platform", required=True) +parser.add_argument("-T", "--targets", dest="targets", type=str, nargs="+", + action="store", help="Defines targets", required=True) parser.add_argument("-V", "--version", dest="version", type=str, - action="store", help="Defines version") + action="store", help="Defines version") parser.add_argument("-B", "--build", dest="build", type=str, - action="store", help="Defines build") + action="store", help="Defines build") +parser.add_argument("-H", "--branch", dest="branch", type=str, + action="store", help="Defines branch") +parser.add_argument("-R", "--branding", dest="branding", type=str, + action="store", help="Provides branding path") args = parser.parse_args() # vars @@ -29,8 +31,16 @@ common.clean = "clean" in args.targets common.sign = "sign" in args.targets common.deploy = "deploy" in args.targets -common.version = args.version if args.version else utils.get_env("BUILD_VERSION", "0.0.0") -common.build = args.build if args.build else utils.get_env("BUILD_NUMBER", "0") +if args.version: common.version = args.version +else: common.version = utils.get_env("PRODUCT_VERSION", "0.0.0") +utils.set_env("PRODUCT_VERSION", common.version) +utils.set_env("BUILD_VERSION", common.version) +if args.build: common.build = args.build +else: common.build = utils.get_env("BUILD_NUMBER", "0") +utils.set_env("BUILD_NUMBER", common.build) +if args.branch: common.branch = args.branch +else: common.branch = utils.get_env("BRANCH_NAME", "null") +utils.set_env("BRANCH_NAME", common.branch) common.branding = args.branding common.timestamp = utils.get_timestamp() common.workspace_dir = utils.get_abspath(utils.get_script_dir(__file__) + "/..") diff --git a/scripts/deploy_core.py b/scripts/deploy_core.py index 7a3d974b..14725578 100644 --- a/scripts/deploy_core.py +++ b/scripts/deploy_core.py @@ -64,10 +64,5 @@ def make(): # dictionaries base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False) - - if base.is_file(archive_dir + ".7z"): - base.delete_file(archive_dir + ".7z") - base.archive_folder(archive_dir + "/*", archive_dir + ".7z") - return diff --git a/scripts/develop/config_server.py b/scripts/develop/config_server.py index 425b7c6a..f2eb67e6 100644 --- a/scripts/develop/config_server.py +++ b/scripts/develop/config_server.py @@ -5,8 +5,9 @@ import os import json -def get_core_url(arch, branch): - return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/" + base.host_platform() + "/core/" + branch + "/latest/" + arch + "/core.7z" +def get_core_url(platform, branch): + return "http://repo-doc-onlyoffice-com.s3.amazonaws.com/archive/" \ + + branch + "/latest/core-" + platform.replace("_", "-") + ".7z" def make(): git_dir = base.get_script_dir() + "/../.." @@ -20,14 +21,21 @@ def make(): arch = "x64" arch2 = "_64" - if ("windows" == base.host_platform()) and not base.host_platform_is64(): + if base.is_windows() and not base.host_platform_is64(): arch = "x86" arch2 = "_32" + if base.is_os_arm(): + arch2 = "_arm64" + platform = "" + if base.is_windows(): + platform = "win" + arch2 + else: + platform = base.host_platform() + arch2 - url = get_core_url(arch, config.option("branch")) + url = get_core_url(platform, config.option("branch")) data_url = base.get_file_last_modified_url(url) if (data_url == "" and config.option("branch") != "develop"): - url = get_core_url(arch, "develop") + url = get_core_url(platform, "develop") data_url = base.get_file_last_modified_url(url) old_data_url = base.readFile("./core.7z.data") @@ -49,12 +57,6 @@ def make(): base.extract("./core.7z", "./") base.writeFile("./core.7z.data", data_url) - platform = "" - if ("windows" == base.host_platform()): - platform = "win" + arch2 - else: - platform = base.host_platform() + arch2 - base.copy_files("./core/*", "./") else: print("-----------------------------------------------------------") diff --git a/scripts/package_builder.py b/scripts/package_builder.py index ae9b9c06..b8d7dffe 100644 --- a/scripts/package_builder.py +++ b/scripts/package_builder.py @@ -7,14 +7,17 @@ def make(): utils.log_h1("BUILDER") + if not (utils.is_windows() or utils.is_macos() or utils.is_linux()): + utils.log("Unsupported host OS") + return + if common.deploy: + make_archive() if utils.is_windows(): make_windows() elif utils.is_macos(): make_macos() elif utils.is_linux(): make_linux() - else: - utils.log("Unsupported host OS") return def s3_upload(files, dst): @@ -29,6 +32,37 @@ def s3_upload(files, dst): ret &= upload return ret +def make_archive(): + utils.set_cwd(utils.get_path( + "build_tools/out/" + common.prefix + "/" + branding.company_name.lower())) + + utils.log_h2("builder archive build") + utils.delete_file("builder.7z") + args = ["7z", "a", "-y", "builder.7z", "./documentbuilder/*"] + if utils.is_windows(): + ret = utils.cmd(*args, verbose=True) + else: + ret = utils.sh(" ".join(args), verbose=True) + utils.set_summary("builder archive build", ret) + + utils.log_h2("builder archive deploy") + dest = "builder-" + common.prefix.replace("_","-") + ".7z" + dest_latest = "archive/%s/latest/%s" % (common.branch, dest) + dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest) + ret = utils.s3_upload( + "builder.7z", "s3://" + branding.s3_bucket + "/" + dest_version) + utils.set_summary("builder archive deploy", ret) + if ret: + utils.log("URL: " + branding.s3_base_url + "/" + dest_version) + utils.add_deploy_data(dest_version) + utils.s3_copy( + "s3://" + branding.s3_bucket + "/" + dest_version, + "s3://" + branding.s3_bucket + "/" + dest_latest) + utils.log("URL: " + branding.s3_base_url + "/" + dest_latest) + + utils.set_cwd(common.workspace_dir) + return + def make_windows(): global inno_file, zip_file, suffix, key_prefix utils.set_cwd("document-builder-package") diff --git a/scripts/package_core.py b/scripts/package_core.py index 8bf2e181..1855f159 100644 --- a/scripts/package_core.py +++ b/scripts/package_core.py @@ -10,47 +10,38 @@ def make(): utils.log("Unsupported host OS") return if common.deploy: - make_core() + make_archive() return -def make_core(): - prefix = common.platformPrefixes[common.platform] - company = branding.company_name.lower() - repos = { - "windows_x64": { "repo": "windows", "arch": "x64", "version": common.version + "." + common.build }, - "windows_x86": { "repo": "windows", "arch": "x86", "version": common.version + "." + common.build }, - "darwin_x86_64": { "repo": "mac", "arch": "x64", "version": common.version + "-" + common.build }, - "darwin_arm64": { "repo": "mac", "arch": "arm", "version": common.version + "-" + common.build }, - "linux_x86_64": { "repo": "linux", "arch": "x64", "version": common.version + "-" + common.build }, - } - repo = repos[common.platform] - branch = utils.get_env("BRANCH_NAME") - core_7z = utils.get_path("build_tools/out/%s/%s/core.7z" % (prefix, company)) - dest_version = "%s/core/%s/%s/%s" % (repo["repo"], branch, repo["version"], repo["arch"]) - dest_latest = "%s/core/%s/%s/%s" % (repo["repo"], branch, "latest", repo["arch"]) +def make_archive(): + utils.set_cwd(utils.get_path( + "build_tools/out/" + common.prefix + "/" + branding.company_name.lower())) - if branch is None: - utils.log_err("BRANCH_NAME variable is undefined") - utils.set_summary("core deploy", False) - return - if not utils.is_file(core_7z): - utils.log_err("file not exist: " + core_7z) - utils.set_summary("core deploy", False) - return + utils.log_h2("core archive build") + utils.delete_file("core.7z") + args = ["7z", "a", "-y", "core.7z", "./core/*"] + if utils.is_windows(): + ret = utils.cmd(*args, verbose=True) + else: + ret = utils.sh(" ".join(args), verbose=True) + utils.set_summary("core archive build", ret) - utils.log_h2("core deploy") + utils.log_h2("core archive deploy") + dest = "core-" + common.prefix.replace("_","-") + ".7z" + dest_latest = "archive/%s/latest/%s" % (common.branch, dest) + dest_version = "archive/%s/%s/%s" % (common.branch, common.build, dest) ret = utils.s3_upload( - core_7z, - "s3://" + branding.s3_bucket + "/" + dest_version + "/core.7z") + "core.7z", "s3://" + branding.s3_bucket + "/" + dest_version) + utils.set_summary("core archive deploy", ret) if ret: - utils.log("URL: " + branding.s3_base_url + "/" + dest_version + "/core.7z") - utils.add_deploy_data(dest_version + "/core.7z") - ret = utils.s3_sync( - "s3://" + branding.s3_bucket + "/" + dest_version + "/", - "s3://" + branding.s3_bucket + "/" + dest_latest + "/", - delete=True) - utils.log("URL: " + branding.s3_base_url + "/" + dest_latest + "/core.7z") - utils.set_summary("core deploy", ret) + utils.log("URL: " + branding.s3_base_url + "/" + dest_version) + utils.add_deploy_data(dest_version) + utils.s3_copy( + "s3://" + branding.s3_bucket + "/" + dest_version, + "s3://" + branding.s3_bucket + "/" + dest_latest) + utils.log("URL: " + branding.s3_base_url + "/" + dest_latest) + + utils.set_cwd(common.workspace_dir) return def deploy_closuremaps_sdkjs(license): diff --git a/scripts/package_utils.py b/scripts/package_utils.py index 75c7b6fc..1d427e54 100644 --- a/scripts/package_utils.py +++ b/scripts/package_utils.py @@ -385,15 +385,13 @@ def s3_upload(src, dst, **kwargs): ret = sh(" ".join(args), verbose=True) return ret -def s3_sync(src, dst, **kwargs): +def s3_copy(src, dst, **kwargs): args = ["aws"] if kwargs.get("endpoint_url"): args += ["--endpoint-url", kwargs["endpoint_url"]] - args += ["s3", "sync", "--no-progress"] + args += ["s3", "cp", "--no-progress"] if kwargs.get("acl"): args += ["--acl", kwargs["acl"]] - if kwargs.get("delete") and kwargs["delete"]: - args += ["--delete"] args += [src, dst] if is_windows(): ret = cmd(*args, verbose=True) From 40e9938885cf8deded7d5337b22e0cd2801fb755 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 00:25:06 +0300 Subject: [PATCH 21/26] Add test for dicts & spellmodule to core --- sln.json | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/sln.json b/sln.json index ecd3f438..353f3d9a 100644 --- a/sln.json +++ b/sln.json @@ -1,6 +1,10 @@ { "root" : "../", + "spell" : [ + "[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro" + ], + "core" : [ "core/Common/3dParty/cryptopp/project/cryptopp.pro", @@ -54,7 +58,10 @@ "[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro", - "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro" + "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro", + "[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro", + + "spell" ], @@ -70,13 +77,8 @@ "[win,linux]desktop-sdk/ChromiumBasedEditors/videoplayerlib/videoplayerlib.pro" ], - "spell" : [ - "[win,linux,mac]core/Common/3dParty/hunspell/qt/hunspell.pro" - ], - "desktop" : [ "core", - "spell", "multimedia", "core/DesktopEditor/xmlsec/src/ooxmlsignature.pro", From 6e4a2e4d5eb45facb74d5a29b2dc9a9b6af4977e Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 00:25:56 +0300 Subject: [PATCH 22/26] Add dictionariestester to core deploy --- scripts/deploy_core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/deploy_core.py b/scripts/deploy_core.py index 14725578..e5bb3333 100644 --- a/scripts/deploy_core.py +++ b/scripts/deploy_core.py @@ -61,6 +61,7 @@ def make(): base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "ooxml_crypt") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "vboxtester") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "metafiletester") + base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "dictionariestester") # dictionaries base.copy_dictionaries(git_dir + "/dictionaries", archive_dir + "/dictionaries", True, False) From 50eca8aab58e1bc12d0635e609d372a835ec1ae7 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 07:45:29 +0300 Subject: [PATCH 23/26] Fix build --- sln.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sln.json b/sln.json index 353f3d9a..41b9dce9 100644 --- a/sln.json +++ b/sln.json @@ -55,14 +55,14 @@ "[win,linux,mac,!linux_arm64]core/OfficeCryptReader/ooxml_crypt/ooxml_crypt.pro", + "spell", + "[win,linux,mac,!no_tests]core/DesktopEditor/vboxtester/vboxtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro", "[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro", - "spell" - ], "builder" : [ From 997bfa3dd5d83ce0123f324dafa9fabea280e408 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 09:14:23 +0300 Subject: [PATCH 24/26] Fix typo --- sln.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sln.json b/sln.json index 41b9dce9..6a596c5f 100644 --- a/sln.json +++ b/sln.json @@ -61,7 +61,7 @@ "[win,linux,mac,!no_tests]core/Test/Applications/StandardTester/standardtester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/x2tTester/x2ttester.pro", "[win,linux,mac,!no_tests]core/Test/Applications/MetafileTester/MetafileTester.pro", - "[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro", + "[win,linux,mac,!no_tests]core/Common/3dParty/hunspell/test/test.pro" ], From dabbc31c09c2f8238253a0390c0dffe716fa3ab9 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 10:38:33 +0300 Subject: [PATCH 25/26] Handling complex dependencies in project file --- scripts/sln.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/scripts/sln.py b/scripts/sln.py index fc7e10a9..5b578d47 100644 --- a/scripts/sln.py +++ b/scripts/sln.py @@ -14,6 +14,15 @@ def is_exist_in_array(projects, proj): return True return False +def get_full_projects_list(json_data, list): + result = [] + for rec in list: + if rec in json_data: + result += get_full_projects_list(json_data, json_data[rec]) + else: + result.append(rec) + return result + def adjust_project_params(params): ret_params = params @@ -86,13 +95,9 @@ def get_projects(pro_json_path, platform): # check aliases to modules records_src = data[module] - records = [] + records = get_full_projects_list(data, records_src) - for rec in records_src: - if rec in data: - records += data[rec] - else: - records.append(rec) + print(records) for rec in records: params = [] From 63fbbc560367ffe54a73ffd352b3eed950a9a518 Mon Sep 17 00:00:00 2001 From: Oleg Korshul Date: Tue, 13 Aug 2024 17:26:14 +0300 Subject: [PATCH 26/26] Add missed library to deploy --- scripts/deploy_core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/deploy_core.py b/scripts/deploy_core.py index e5bb3333..9f7ad71c 100644 --- a/scripts/deploy_core.py +++ b/scripts/deploy_core.py @@ -39,6 +39,7 @@ def make(): base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "Fb2File") base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "EpubFile") base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "DocxRenderer") + base.copy_lib(core_build_dir + "/lib/" + platform_postfix, archive_dir, "hunspell") base.copy_file(git_dir + "/sdkjs/pdf/src/engine/cmap.bin", archive_dir + "/cmap.bin") base.copy_exe(core_build_dir + "/bin/" + platform_postfix, archive_dir, "x2t")