diff --git a/.gitignore b/.gitignore
index dce4c90..4db6739 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,4 +4,6 @@ test.*
dist
temp
!test
-*.tgz
\ No newline at end of file
+*.tgz
+packages/app/main.js
+packages/app/index.html
\ No newline at end of file
diff --git a/.travis.yml b/.travis.yml
index d70c5e9..926ca3d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,28 +1,22 @@
language: node_js
-
node_js:
- - node
-
+- node
env:
global:
- - secure: ikFQOw+feac3+WhVb1QtCeAqp9OpQprD4oXmG4+q3MVAHFiP2pLcfZbrExr0bu472eEHLpRDjjlxHEGxcnJoSool4LUCkgTnlpN8/iGzug0gnv1/XoLZiwKVbonOQHaksJ8vGBIXFMPbXRYfc1DjSyWQhwxkdhrIGUEyPs0IqrBZ4nzbpCG/hsSbkKG6xMu5sXXwM/4Nmklx8kZRMWCQXB09gMysmt1TmPwxogyxXg1xm8GaSfA5GQHJZyW3oQpvGMKaXyDLymjLw66rlrUhZ9u0TKklkBqtVlhBnGm/eXxNFnDTfz+D8K3v2MSoSRuFb8bsofLHDFDm9suBuVV83nmINhkIcsmxEbS5B+Cv916za0NgCKxKJW/DRuCCoOdjRrXk36oC+7DCvEIVH8Hsraj7oC/WgBWSHCkI12UAXhxoSHrS3OiNxJEVZJXqIUzzzbyJJE5DTHElmBm1T6ZM7WEydzAPfhtzs/d5SsjwP0w6n+8SrnKifuBaFNw7wv40sdwey9TKJEXD84AeBsbTmJQCljJuKZntQryfoCVYB82OasEoEF0ryCnc0to1/lszXLAKJBv7gOTxzQ62tKo3wJo2L6FhGIOYG5n0NVXz7kC+5rKh5ZIwKx42oT9P4OAAwpM3p+LxzS7lkn6WLamFlioCFnzmvItO9yOFR/pIiKY=
-
+ - secure: iQS7C6BZcBlFCfj4a/1scDVO6qOCMCb2lYfZSTVS1QRnMns3wNBaDARXKrjl2LLzVPqz5puhj7+z7grUZ/rAUIAsAQUJtHqpTcpHY6kksQsh5yqNYgfwACAx0VNPicLEv2f/ekjabxaA1+7FhbCR+DFa/ZUuXGC+RpVm/EWhDDEkmGVHvXoJN92dW5lfPy/kJwpus03Mr+tPVbDJwO83sWhulUTxRuf1bEAg2/fUiPf+qZzRy6swEnfS7GlQrpD0/KHYGoa0Dt3TmqJ1oP03CRvdo/g45VZutonjLNAu7oJ/lrSTNLU68JGOnv+WCGL9RwPgfBcjTldxmSer4fwYLh+oj1IJHexNA2G0dStYVwKfkj8jEd7DLJxOrOdTYXovl3tRpyGN7oCDoWepvH1nYPBvZno9Kgt32VW/xwhqZgia5tmzra0ouyxJ/GzOXph2NaHTe9jzuF2/i814q1r3VfC5Wm3kai5R2RoNtv6bk9vS4QyEwum5JhQRJpO9O8/z393fTxMN1ZOC7D307eUQLPiLSjb/MkEfEuWtqvuEieYKs7UelC9SzrM83urJUc94FFhGfLuhqriLTJSd8gdlm5iBPjz6WrhW8n6JCa0t7iI90Cxn4DIr7KB7SROW66KSC8HPi9srPcJKVb1dx/Z8PMSP4y6ZmkTMXpbryTFK9Vo=
before_install:
- - node ./build/check.js
-
+- node ./build/check.js
install:
- - npm i
- - npm run bootstrap
-
+- npm i
+- npm run bootstrap
script:
- - npm run lint
- - npm run build:prod
- - npm run test
-
+- npm run lint
+- npm run build:prod
+- npm run test
deploy:
- - skip_cleanup: true
- provider: script
- script: node ./build/release.js
- on:
- repo: obstudio/Marklet
- branch: master
+- skip_cleanup: true
+ provider: script
+ script: node ./build/release.js
+ on:
+ repo: obstudio/Marklet
+ branch: master
diff --git a/.vscode/launch.json b/.vscode/launch.json
index f1a290f..ffbf1c8 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -6,8 +6,11 @@
"request": "launch",
"name": "Debug Main Process",
"cwd": "${workspaceRoot}",
- "runtimeExecutable": "npm",
- "args": [ "start" ],
+ "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron",
+ "windows": {
+ "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron.cmd"
+ },
+ "args" : ["packages/app/main.dev.js"]
}
]
}
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 42b0849..691d6e0 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -5,10 +5,12 @@
"vue"
],
"files.exclude": {
+ "**/node_modules": true,
"**/package-lock.json": true,
"package.json.lerna_backup": true,
- "packages/**/node_modules": true,
- // "packages/**/tsconfig.json": true
+ "packages/**/tsconfig.json": true,
+ "packages/**/dist": true,
+ "packages/**/temp": true,
},
"editor.wordSeparators": "`~!@#%^&*()-=+[{]}\\|;:'\",.<>/?",
}
\ No newline at end of file
diff --git a/README.md b/README.md
index 6c8251c..460db34 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,5 @@
# Marklet
+
[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
[![dependency](https://img.shields.io/david/obstudio/Marklet.svg)](https://github.com/obstudio/Marklet/blob/master/package.json)
[![npm](https://img.shields.io/npm/v/markletjs.svg)](https://www.npmjs.com/package/markletjs)
@@ -6,18 +7,47 @@
A markup language designed for API manual pages.
-## Node
+## Packages
+
+- [markletjs](https://github.com/obstudio/Marklet/tree/master/packages/marklet): [![npm](https://img.shields.io/npm/v/markletjs.svg)](https://www.npmjs.com/package/markletjs)
+- [@marklet/cli](https://github.com/obstudio/Marklet/tree/master/packages/cli): [![npm](https://img.shields.io/npm/v/@marklet/cli.svg)](https://www.npmjs.com/package/@marklet/cli)
+- [@marklet/core](https://github.com/obstudio/Marklet/tree/master/packages/core): [![npm](https://img.shields.io/npm/v/@marklet/core.svg)](https://www.npmjs.com/package/@marklet/core)
+- [@marklet/dev-server](https://github.com/obstudio/Marklet/tree/master/packages/dev-server): [![npm](https://img.shields.io/npm/v/@marklet/dev-server.svg)](https://www.npmjs.com/package/@marklet/dev-server)
+- [@marklet/monaco](https://github.com/obstudio/Marklet/tree/master/packages/monaco): [![npm](https://img.shields.io/npm/v/@marklet/monaco.svg)](https://www.npmjs.com/package/@marklet/monaco)
+- [@marklet/parser](https://github.com/obstudio/Marklet/tree/master/packages/parser): [![npm](https://img.shields.io/npm/v/@marklet/parser.svg)](https://www.npmjs.com/package/@marklet/parser)
+- [@marklet/renderer](https://github.com/obstudio/Marklet/tree/master/packages/renderer): [![npm](https://img.shields.io/npm/v/@marklet/renderer.svg)](https://www.npmjs.com/package/@marklet/renderer)
+
+## Usage: CLI
+
+```
+Usage: marklet [filepath|dirpath] [options]
+
+Options:
+
+ -v, --version output the version number
+ -m, --mode [mode] Choose between parse, watch and edit mode (default: parse)
+ -s, --source [path] Read text from file
+ -i, --input [text] Read text directly from stdin
+ -d, --dest [path] Write parsed data to file instead of stdin
+ -p, --port [port] Port for the development server
+ -l, --default-language [language] Default language in code block
+ -H, --no-header-align Disable header to align at center
+ -S, --no-section Disallow section syntax
+ -h, --help output usage information
+```
+
+## Usage: Node
```shell
-npm i marklet
+npm i markletjs
```
-```javascript
+```js
const Marklet = require('marklet')
Marklet.watch({ source: 'path/to/file' })
```
-## Web
+## Usage: Web
```html
diff --git a/build/clear.js b/build/clear.js
deleted file mode 100644
index 00c84c6..0000000
--- a/build/clear.js
+++ /dev/null
@@ -1,11 +0,0 @@
-const fs = require('fs')
-const path = require('path')
-
-const BASE_DIR = path.join(__dirname, '../packages')
-
-fs.readdirSync(BASE_DIR).forEach((dir) => {
- fs.readdirSync(path.join(BASE_DIR, dir))
- .forEach((file) => {
- if (/[\w.-]+\.tgz/.test(file)) fs.unlinkSync(path.join(BASE_DIR, dir, file))
- })
-})
diff --git a/build/publish.js b/build/publish.js
index caa4999..7c4e22f 100644
--- a/build/publish.js
+++ b/build/publish.js
@@ -50,6 +50,7 @@ program
.option('-1, --major')
.option('-2, --minor')
.option('-3, --patch')
+ .option('-o, --only')
.option('-p, --publish')
.parse(process.argv)
@@ -58,6 +59,7 @@ if (program.all) program.args = packageNames
function bump(name, flag) {
packages[name].bump(flag || 'patch')
+ if (program.only) return
const npmName = packages[name].current.name
packageNames.forEach((next) => {
if (npmName in (packages[next].current.devDependencies || {})) {
diff --git a/build/util.js b/build/util.js
index 9785f28..10a5e85 100644
--- a/build/util.js
+++ b/build/util.js
@@ -26,8 +26,51 @@ function resolve(...names) {
return path.join(__dirname, '../packages', ...names)
}
+const timers = {}
+
+function start(label = '') {
+ if (!timers[label]) timers[label] = { total: 0 }
+ timers[label].start = Date.now()
+ return _getTime(label)
+}
+
+function pause(label = '') {
+ timers[label].total += Date.now() - timers[label].start
+ timers[label].start = Date.now()
+ return _getTime(label)
+}
+
+function finish(label = '') {
+ pause(label)
+ const result = _getTime(label)
+ timers[label].total = 0
+ return `Finished in ${result.toFixed(1)}s.`
+}
+
+function _getTime(label = '') {
+ return label in timers ? timers[label].total / 1000 : 0
+}
+
+function timing(label = '', callback) {
+ start(label)
+ const result = callback()
+ pause(label)
+ return result
+}
+
+function isElectron() {
+ return typeof process !== 'undefined'
+ && typeof process.versions !== 'undefined'
+ && typeof process.versions.electron !== 'undefined'
+}
+
module.exports = {
exec,
execSync,
resolve,
+ start,
+ pause,
+ finish,
+ timing,
+ isElectron,
}
\ No newline at end of file
diff --git a/package.json b/package.json
index c731339..8578516 100644
--- a/package.json
+++ b/package.json
@@ -1,40 +1,39 @@
{
"scripts": {
"start": "npm run build && node packages/cli -m edit",
- "publish": "lerna publish --no-git-tag-version",
"bootstrap": "lerna bootstrap --hoist --no-ci",
"build": "tsc -b && node build/build -sr",
"build:prod": "tsc -b && node build/build -psr",
"build:renderer": "node build/build -r",
"build:server": "node build/build -s",
"build:tsc": "tsc -b",
- "clear": "node build/clear",
- "test": "node packages/test/runner.js",
+ "test": "node packages/test",
"lint": "tslint packages/**/src/*.ts && eslint ."
},
"devDependencies": {
- "@octokit/rest": "^15.12.0",
+ "@octokit/rest": "^15.13.0",
"@sfc2js/clean-css": "^1.1.1",
"@sfc2js/sass": "^1.0.1",
"@types/cheerio": "^0.22.9",
"@types/js-yaml": "^3.11.2",
- "@types/node": "^10.10.1",
+ "@types/node": "^10.11.4",
"@types/ws": "^6.0.1",
- "ajv": "^6.5.3",
+ "ajv": "^6.5.4",
"chalk": "^2.4.1",
"cheerio": "^1.0.0-rc.2",
"commander": "^2.18.0",
- "eslint": "^5.6.0",
+ "electron": "^3.0.2",
+ "eslint": "^5.6.1",
"eslint-plugin-vue": "^5.0.0-beta.3",
"fast-deep-equal": "^2.0.1",
"html-minifier": "^3.5.20",
- "lerna": "^3.4.0",
+ "lerna": "^3.4.1",
"node-sass": "^4.9.3",
- "prettier": "^1.14.3",
+ "sass": "^1.14.1",
"semver": "^5.5.1",
- "sfc2js": "^3.3.0",
+ "sfc2js": "^3.3.2",
"tslint": "^5.11.0",
- "typescript": "^3.0.3",
- "webpack": "^4.19.1"
+ "typescript": "^3.1.1",
+ "webpack": "^4.20.2"
}
}
diff --git a/packages/app/README.md b/packages/app/README.md
new file mode 100644
index 0000000..a5f13a7
--- /dev/null
+++ b/packages/app/README.md
@@ -0,0 +1,3 @@
+# @marklet/app
+
+An electron app for marklet.
diff --git a/packages/app/build/transpile.js b/packages/app/build/transpile.js
new file mode 100644
index 0000000..80d914e
--- /dev/null
+++ b/packages/app/build/transpile.js
@@ -0,0 +1,68 @@
+const util = require('../../../build/util')
+const sfc2js = require('sfc2js')
+const sass = require('sass')
+const path = require('path')
+const fs = require('fs')
+
+util.start()
+
+sfc2js.install({
+ name: 'sass-plugin',
+ version: '1.0',
+ target: 'style',
+ lang: [
+ 'sass',
+ 'scss',
+ 'css',
+ ],
+ default: {
+ includePaths: [],
+ },
+ updated(options) {
+ const dirPath = path.dirname(options.srcPath)
+ this.options.includePaths.push(dirPath)
+ },
+ render(style) {
+ return sass.renderSync({ ...this.options, data: style.content }).css.toString()
+ },
+})
+
+module.exports = sfc2js.transpile({
+ baseDir: util.resolve(),
+ srcDir: 'app/comp',
+ outDir: 'app/temp',
+ enterance: util.isElectron() ? 'app.vue' : '',
+})
+
+let indexCache = ''
+try {
+ indexCache = fs.readFileSync(util.resolve('app/temp/index.cache.scss')).toString()
+} catch (error) { /**/ }
+
+const indexData = fs.readFileSync(util.resolve('app/comp/index.scss')).toString()
+
+if (indexData === indexCache && fs.existsSync(util.resolve('app/temp/index.css'))) {
+ module.exports.css += fs.readFileSync(util.resolve('app/temp/index.css')).toString()
+} else {
+ const indexCSS = sass.renderSync({ data: indexData }).css.toString()
+ fs.writeFileSync(util.resolve('app/temp/index.css'), indexCSS)
+ fs.writeFileSync(util.resolve('app/temp/index.cache.scss'), indexCache)
+ module.exports.css += indexCSS
+}
+
+if (util.isElectron()) {
+ const result = sfc2js.transpile({
+ baseDir: util.resolve(),
+ srcDir: 'renderer/comp',
+ outDir: 'renderer/temp',
+ enterance: '../src',
+ outCSSFile: '../dist/marklet.min.css',
+ defaultScript: {
+ props: ['node'],
+ },
+ })
+ module.exports.css += result.css
+ module.exports.plugin = result.app
+}
+
+console.log('Transpile Succeed.', util.finish())
diff --git a/packages/app/comp/.eslintrc.yml b/packages/app/comp/.eslintrc.yml
new file mode 100644
index 0000000..98ddf5e
--- /dev/null
+++ b/packages/app/comp/.eslintrc.yml
@@ -0,0 +1,5 @@
+extends:
+ - plugin:vue/essential
+
+globals:
+ Vue: true
diff --git a/packages/app/comp/app.vue b/packages/app/comp/app.vue
new file mode 100644
index 0000000..684e3cd
--- /dev/null
+++ b/packages/app/comp/app.vue
@@ -0,0 +1,135 @@
+
+
+
+
+
+
+
diff --git a/packages/app/comp/index.scss b/packages/app/comp/index.scss
new file mode 100644
index 0000000..ef3896f
--- /dev/null
+++ b/packages/app/comp/index.scss
@@ -0,0 +1,49 @@
+body {
+ overflow: hidden;
+ font-family:
+ -apple-system,
+ BlinkMacSystemFont,
+ "Segoe UI",
+ Helvetica,
+ Arial,
+ sans-serif,
+ "Apple Color Emoji",
+ "Segoe UI Emoji",
+ "Segoe UI Symbol";
+}
+
+.no-transition {
+ transition: none !important;
+}
+
+.monaco-editor {
+ .monaco-scrollable-element {
+ > .scrollbar {
+ transition: opacity 0.3s ease;
+
+ > .slider {
+ opacity: 0.5;
+ cursor: pointer;
+ border-radius: 6px;
+ background-color: #c0c4cc;
+ transition: opacity 0.3s ease;
+
+ &:hover {
+ opacity: 1;
+ }
+ }
+
+ &.vertical {
+ margin: 4px 0;
+ }
+
+ &.horizontal {
+ margin: 0 4px;
+ }
+
+ &.invisible.fade {
+ transition: opacity 0.8s ease;
+ }
+ }
+ }
+}
diff --git a/packages/app/index.dev.html b/packages/app/index.dev.html
new file mode 100644
index 0000000..8463489
--- /dev/null
+++ b/packages/app/index.dev.html
@@ -0,0 +1,21 @@
+
+
+
+
+ Marklet
+
+
+
+
+
+
+
+
+
diff --git a/packages/app/index.prod.html b/packages/app/index.prod.html
new file mode 100644
index 0000000..658f0b0
--- /dev/null
+++ b/packages/app/index.prod.html
@@ -0,0 +1,12 @@
+
+
+
+
+ Marklet
+
+
+
+
+
+
+
diff --git a/packages/app/main.dev.js b/packages/app/main.dev.js
new file mode 100644
index 0000000..f852d37
--- /dev/null
+++ b/packages/app/main.dev.js
@@ -0,0 +1,31 @@
+const { app, BrowserWindow } = require('electron')
+const path = require('path')
+
+let mainWindow
+
+function createMain() {
+ mainWindow = new BrowserWindow({
+ width: 800,
+ height: 600,
+ minWidth: 800,
+ minHeight: 600,
+ useContentSize: true,
+ autoHideMenuBar: false,
+ })
+
+ mainWindow.loadFile(path.join(__dirname, 'index.dev.html'))
+
+ mainWindow.on('closed', () => {
+ mainWindow = null
+ })
+}
+
+app.on('ready', createMain)
+
+app.on('window-all-closed', () => {
+ if (process.platform !== 'darwin') app.quit()
+})
+
+app.on('activate', () => {
+ if (mainWindow === null) createMain()
+})
diff --git a/packages/app/main.prod.js b/packages/app/main.prod.js
new file mode 100644
index 0000000..7ba670c
--- /dev/null
+++ b/packages/app/main.prod.js
@@ -0,0 +1,31 @@
+const { app, BrowserWindow } = require('electron')
+const path = require('path')
+
+let mainWindow
+
+function createMain() {
+ mainWindow = new BrowserWindow({
+ width: 800,
+ height: 600,
+ minWidth: 800,
+ minHeight: 600,
+ useContentSize: true,
+ autoHideMenuBar: false,
+ })
+
+ mainWindow.loadFile(path.join(__dirname, 'index.prod.html'))
+
+ mainWindow.on('closed', () => {
+ mainWindow = null
+ })
+}
+
+app.on('ready', createMain)
+
+app.on('window-all-closed', () => {
+ if (process.platform !== 'darwin') app.quit()
+})
+
+app.on('activate', () => {
+ if (mainWindow === null) createMain()
+})
diff --git a/packages/lexer/package.json b/packages/app/package.json
similarity index 53%
rename from packages/lexer/package.json
rename to packages/app/package.json
index 5cf1771..113f2fc 100644
--- a/packages/lexer/package.json
+++ b/packages/app/package.json
@@ -1,26 +1,26 @@
{
- "name": "@marklet/lexer",
+ "name": "@marklet/app",
"version": "1.0.10",
- "description": "A document lexer for marklet.",
+ "private": true,
+ "main": "main.js",
"author": "shigma <1700011071@pku.edu.cn>",
- "contributors": [
- "jjyyxx <1449843302@qq.com>"
- ],
"homepage": "https://github.com/obstudio/Marklet",
"license": "MIT",
- "main": "dist/index.js",
- "typings": "dist/index.d.ts",
- "files": [
- "dist"
- ],
"repository": {
"type": "git",
"url": "git+https://github.com/obstudio/Marklet.git"
},
+ "scripts": {
+ "test": "echo \"Error: run tests from root\" && exit 1"
+ },
"bugs": {
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/core": "^2.0.0"
+ "@marklet/monaco": "^1.2.1",
+ "@marklet/parser": "^1.5.1",
+ "@marklet/renderer": "^1.3.1",
+ "neat-scroll": "^2.0.1",
+ "vue": "^2.5.17"
}
}
\ No newline at end of file
diff --git a/packages/cli/README.md b/packages/cli/README.md
new file mode 100644
index 0000000..d58cf6b
--- /dev/null
+++ b/packages/cli/README.md
@@ -0,0 +1,8 @@
+# @marklet/cli
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fcli)](https://github.com/obstudio/Marklet/blob/master/packages/cli/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/cli.svg)](https://www.npmjs.com/package/@marklet/cli)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/cli.svg)](https://www.npmjs.com/package/@marklet/cli)
+
+A command line interface for marklet.
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 0acb1ba..609cf87 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,12 +1,12 @@
{
"name": "@marklet/cli",
- "version": "1.1.5",
+ "version": "1.1.15",
"description": "A command line interface for marklet.",
"author": "jjyyxx <1449843302@qq.com>",
"contributors": [
"shigma <1700011071@pku.edu.cn>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/cli",
"license": "MIT",
"repository": {
"type": "git",
@@ -19,8 +19,8 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/dev-server": "^1.0.12",
- "@marklet/parser": "^1.1.0",
+ "@marklet/dev-server": "^1.0.22",
+ "@marklet/parser": "^1.5.1",
"chalk": "^2.4.1",
"commander": "^2.18.0",
"js-yaml": "^3.12.0"
diff --git a/packages/core/README.md b/packages/core/README.md
new file mode 100644
index 0000000..4110d71
--- /dev/null
+++ b/packages/core/README.md
@@ -0,0 +1,8 @@
+# @marklet/core
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fcore)](https://github.com/obstudio/Marklet/blob/master/packages/core/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/core.svg)](https://www.npmjs.com/package/@marklet/core)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/core.svg)](https://www.npmjs.com/package/@marklet/core)
+
+Some core conceptions of marklet.
diff --git a/packages/core/package.json b/packages/core/package.json
index c294c16..391ede9 100644
--- a/packages/core/package.json
+++ b/packages/core/package.json
@@ -1,12 +1,12 @@
{
"name": "@marklet/core",
- "version": "2.0.0",
+ "version": "3.2.2",
"description": "Some core conceptions of marklet.",
"author": "shigma <1700011071@pku.edu.cn>",
"contributors": [
"jjyyxx <1449843302@qq.com>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/core",
"license": "MIT",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
diff --git a/packages/core/src/document.ts b/packages/core/src/document.ts
new file mode 100644
index 0000000..407ee89
--- /dev/null
+++ b/packages/core/src/document.ts
@@ -0,0 +1,187 @@
+import {
+ Lexer,
+ parseRule,
+ StringLike,
+ TokenLike,
+ MacroMap,
+ LexerMacros,
+ LexerConfig,
+ LexerRule,
+ LexerRegexRule,
+} from './lexer'
+
+import { InlineLexer } from './inline'
+
+export interface DocumentOptions {
+ /** lexer rule regex macros */
+ macros?: LexerMacros
+ /** entrance context */
+ entrance?: string
+ /** default inline context */
+ inlineEntrance?: string
+ /** assign start/end to tokens */
+ requireBound?: boolean
+ /** other configurations */
+ config?: LexerConfig
+}
+
+type DocumentLexerRule = LexerRegexRule
+type NativeLexerContext = DocumentLexerRule[] | InlineLexer
+export type DocumentContexts = Record[] | InlineLexer>
+
+enum ContextOperation {
+ INCLUDE,
+ PUSH,
+ INLINE,
+ INITIAL,
+}
+
+interface ContextLog {
+ name: string
+ operation: ContextOperation
+}
+
+export class DocumentLexer extends Lexer {
+ private stackTrace: ContextLog[]
+ private contexts: Record[] | InlineLexer> = {}
+ private entrance: string
+ private inlineEntrance: string
+ private requireBound: boolean
+ private macros: MacroMap
+
+ constructor(contexts: DocumentContexts, options: DocumentOptions = {}) {
+ super(options.config)
+ this.entrance = options.entrance || 'main'
+ this.inlineEntrance = options.inlineEntrance || 'text'
+ this.requireBound = !!options.requireBound
+
+ this.macros = new MacroMap(options.macros || {})
+ for (const key in contexts) {
+ const context = contexts[key]
+ if (context instanceof Array) {
+ this.contexts[key] = context.map((rule) => parseRule(rule, this.macros))
+ } else {
+ this.contexts[key] = context
+ }
+ }
+ }
+
+ getContext(
+ context: string | InlineLexer | LexerRule | LexerRule[],
+ operation: ContextOperation,
+ prefixRegex?: RegExp,
+ postfixRegex?: RegExp,
+ ) {
+ const name = typeof context === 'string' ? context : 'anonymous'
+ if (operation === ContextOperation.INITIAL) {
+ this.stackTrace = [{ name, operation }]
+ } else if (operation !== ContextOperation.INCLUDE) {
+ this.stackTrace.push({ name, operation })
+ } else {
+ this.stackTrace[this.stackTrace.length - 1].name = name
+ }
+ let rules = typeof context === 'string' ? this.contexts[context] : context
+ if (!rules) throw new Error(`Context '${context}' was not found. (context-not-found)`)
+ if (rules instanceof InlineLexer) {
+ return rules.fork(prefixRegex, postfixRegex)
+ } else {
+ if (!(rules instanceof Array)) rules = [rules]
+ for (let i = rules.length - 1; i >= 0; i -= 1) {
+ const rule: LexerRule = rules[i]
+ if ('include' in rule) {
+ const includes = this.getContext(rule.include, ContextOperation.INCLUDE)
+ if (includes instanceof Array) {
+ rules.splice(i, 1, ...includes)
+ } else {
+ throw new Error('Including a inline context is illegal. (no-include-inline)')
+ }
+ }
+ }
+ const result = rules.slice()
+ if (prefixRegex) result.unshift({ regex: prefixRegex, pop: true, test: true })
+ if (postfixRegex) result.push({ regex: postfixRegex, pop: true, test: true })
+ return result
+ }
+ }
+
+ initialize(context: NativeLexerContext) {
+ if (!(context instanceof Array)) {
+ const result = context.run(this.meta.source)
+ return {
+ index: result.index,
+ output: [result.output],
+ }
+ }
+ this.meta.output = []
+ this.meta.context = context
+ }
+
+ getContent(rule: DocumentLexerRule, capture: RegExpExecArray) {
+ let prefixRegex = rule.prefix_regex
+ let postfixRegex = rule.strict ? /^(?=[\s\S])/ : null
+ if (prefixRegex instanceof Function) {
+ prefixRegex = new RegExp(`^(?:${this.macros.resolve(prefixRegex.call(this, capture))})`)
+ }
+ const context = this.getContext(rule.push, ContextOperation.PUSH, prefixRegex, postfixRegex)
+ const result = this.run(this.meta.source, false, context)
+ const content = result.output.map((token) => {
+ if (this.requireBound && typeof token === 'object') {
+ token.start += this.meta.index
+ token.end += this.meta.index
+ }
+ return token
+ })
+ this.stackTrace.pop()
+ this.meta.source = this.meta.source.slice(result.index)
+ this.meta.index += result.index
+ return content
+ }
+
+ pushUnmatch() {
+ this.meta.output.push(this.meta.unmatch)
+ }
+
+ pushToken(rule: DocumentLexerRule, capture: RegExpExecArray, content: TokenLike[]) {
+ let token = rule.token
+ if (typeof token === 'function') {
+ token = token.call(this, capture, content, this.config)
+ } else if (token === undefined) {
+ if (rule.push) {
+ token = { content }
+ } else if (!rule.pop) {
+ token = capture[0]
+ }
+ }
+ if (token) {
+ if (typeof token === 'object') {
+ token.type = token.type || rule.type
+ if (this.requireBound) {
+ token.start = this.meta.start
+ token.end = this.meta.index
+ }
+ }
+ this.meta.output.push(token)
+ }
+ }
+
+ inline(source: string, context: string = this.inlineEntrance): string {
+ const inlineContext = this.getContext(context, ContextOperation.INLINE)
+ if (inlineContext instanceof Array) {
+ throw new Error(`'${context}' is not a inline context. (not-inline-context)`)
+ }
+ const result = inlineContext.run(source).output
+ this.stackTrace.pop()
+ return result
+ }
+
+ parse(source: string, context: string = this.entrance): TokenLike[] {
+ const initialContext = this.getContext(context, ContextOperation.INITIAL)
+ source = source.replace(/\r\n/g, '\n')
+ try {
+ return this.run(source, true, initialContext).output
+ } catch (error) {
+ console.log(this.stackTrace)
+ console.error(error)
+ }
+ }
+}
diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts
index 9f71c60..69216ca 100644
--- a/packages/core/src/index.ts
+++ b/packages/core/src/index.ts
@@ -1,118 +1,34 @@
-export type StringLike = string | RegExp
-
-export type LexerConfig = Record
-export type LexerMacros = Record
-
-export type TokenLike = string | LexerToken
-export interface LexerToken {
- type?: string
- text?: string
- content?: TokenLike[]
- start?: number
- end?: number
- [key: string]: any
-}
-
-export type LexerRule<
- S extends StringLike = RegExp,
- T extends LexerInstance = LexerInstance,
- R extends RegExpExecArray = RegExpExecArray,
-> = LexerIncludeRule | LexerRegexRule
-
-export interface LexerIncludeRule { include: string }
-export interface LexerRegexRule<
- S extends StringLike = RegExp,
- T extends LexerInstance = LexerInstance,
- R extends RegExpExecArray = RegExpExecArray,
-> {
- /** the regular expression to execute */
- regex?: S
- /**
- * a string containing all the rule flags:
- * - `b`: match when the context begins
- * - `e`: match end of line
- * - `i`: ignore case
- * - `p`: pop from the current context
- * - `s`: pop when no match is found
- * - `t`: match top level context
- */
- flags?: string
- /** default type of the token */
- type?: string
- /** whether the rule is to be executed */
- test?: string | boolean | ((this: T, config: LexerConfig) => boolean)
- /** a result token */
- token?: TokenLike | TokenLike[] | ((
- this: T, capture: R, content: TokenLike[]
- ) => TokenLike | TokenLike[])
- /** the inner context */
- push?: string | LexerRule[]
- /** pop from the current context */
- pop?: boolean
- /** pop when no match is found */
- strict?: boolean
- /** match when the context begins */
- context_begins?: boolean
- /** match top level context */
- top_level?: boolean
- /** whether to ignore case */
- ignore_case?: boolean
- /** match end of line */
- eol?: boolean
-}
-
-/** Transform a string-like object into a raw string. */
-export function getString(string: StringLike): string {
- return string instanceof RegExp ? string.source : string
-}
-
-export function parseRule(rule: LexerRule, macros: LexerMacros = {}): LexerRule {
- if (!('include' in rule)) {
- if (rule.regex === undefined) {
- rule.regex = /(?=[\s\S])/
- if (!rule.type) rule.type = 'default'
- }
- if (rule.test === undefined) rule.test = true
- let src = getString(rule.regex)
- let flags = ''
- for (const key in macros) {
- src = src.replace(new RegExp(`{{${key}}}`, 'g'), `(?:${macros[key]})`)
- }
- rule.flags = rule.flags || ''
- if (rule.flags.replace(/[biepst]/g, '')) {
- throw new Error(`'${rule.flags}' contains invalid rule flags.`)
- }
- if (rule.flags.includes('p')) rule.pop = true
- if (rule.flags.includes('s')) rule.strict = true
- if (rule.flags.includes('b')) rule.context_begins = true
- if (rule.flags.includes('t')) rule.top_level = true
- if (rule.flags.includes('e') || rule.eol) src += ' *(?:\\n+|$)'
- if (rule.flags.includes('i') || rule.ignore_case) flags += 'i'
- rule.regex = new RegExp('^(?:' + src + ')', flags)
- if (rule.push instanceof Array) rule.push.forEach(_rule => parseRule(_rule, macros))
- }
- return rule as LexerRule
-}
-
-export interface LexerInstance {
- config: LexerConfig
- parse(source: string): any
-}
-
-export interface InlineLexerResult {
- index: number
- output: string
-}
-
-export interface InlineLexerInstance extends LexerInstance {
- parse(source: string): InlineLexerResult
-}
-
-export enum MatchStatus {
- /** No match was found */
- NO_MATCH,
- /** Found match and continue */
- CONTINUE,
- /** Found match and pop */
- POP,
-}
+export {
+ Lexer,
+ parseRule,
+ getString,
+ isStringLike,
+ StringLike,
+ TokenLike,
+ MacroMap,
+ LexerRule,
+ LexerMeta,
+ LexerToken,
+ LexerConfig,
+ LexerMacros,
+ LexerResult,
+ LexerMetaRule,
+ LexerRegexRule,
+ LexerIncludeRule,
+} from './lexer'
+
+export {
+ DocumentOptions,
+ DocumentContexts,
+ DocumentLexer,
+} from './document'
+
+export {
+ InlineContext,
+ InlineLexer,
+} from './inline'
+
+export {
+ SyntaxOptions,
+ SyntaxLexer,
+} from './syntax'
diff --git a/packages/core/src/inline.ts b/packages/core/src/inline.ts
new file mode 100644
index 0000000..cc313ce
--- /dev/null
+++ b/packages/core/src/inline.ts
@@ -0,0 +1,73 @@
+import {
+ Lexer,
+ parseRule,
+ StringLike,
+ LexerResult,
+ LexerConfig,
+ LexerRegexRule,
+} from './lexer'
+
+type InlineLexerRule = LexerRegexRule
+export type InlineContext = LexerRegexRule[]
+
+class InlineCapture extends Array implements RegExpExecArray {
+ public index: number
+ public input: string
+ private lexer: InlineLexer
+
+ constructor(lexer: InlineLexer, array: RegExpExecArray) {
+ super(...array)
+ this.lexer = lexer
+ this.index = array.index
+ this.input = array.input
+ }
+
+ get inner(): string {
+ const match = this.reverse().find(item => !!item)
+ return match ? this.lexer.run(match).output : ''
+ }
+}
+
+export class InlineLexer extends Lexer {
+ private rules: InlineLexerRule[]
+
+ constructor(context: InlineContext, config?: LexerConfig) {
+ super(config)
+ this.rules = context.map(rule => parseRule(rule) as InlineLexerRule)
+ }
+
+ initialize() {
+ this.meta.output = ''
+ this.meta.context = this.rules
+ }
+
+ getCapture(rule: InlineLexerRule, capture: RegExpExecArray) {
+ return new InlineCapture(this, capture)
+ }
+
+ pushToken(rule: InlineLexerRule, capture: InlineCapture) {
+ let token = rule.token
+ if (typeof token === 'function') {
+ token = token.call(this, capture, null, this.config)
+ } else if (token === undefined) {
+ token = capture[0]
+ }
+ this.meta.output += token
+ }
+
+ pushUnmatch() {
+ this.meta.output += this.meta.unmatch
+ }
+
+ parse(source: string): LexerResult {
+ return this.run(source.replace(/\r\n/g, '\n'), true)
+ }
+
+ fork(prefix?: RegExp, postfix?: RegExp): InlineLexer {
+ const fork = new InlineLexer([])
+ fork.rules = this.rules.slice()
+ if (prefix) fork.rules.unshift({ regex: prefix, pop: true, test: true })
+ if (postfix) fork.rules.push({ regex: postfix, pop: true, test: true })
+ return fork
+ }
+}
diff --git a/packages/core/src/lexer.ts b/packages/core/src/lexer.ts
new file mode 100644
index 0000000..69e9067
--- /dev/null
+++ b/packages/core/src/lexer.ts
@@ -0,0 +1,273 @@
+export type StringLike = string | RegExp
+export type TokenLike = string | LexerToken
+
+export type LexerConfig = Record
+export type LexerMacros = Record
+
+export interface LexerToken {
+ type?: string
+ text?: string
+ content?: TokenLike[]
+ start?: number
+ end?: number
+ [key: string]: any
+}
+
+export type LexerRule<
+ S extends StringLike = StringLike,
+ T extends Lexer = Lexer,
+ R extends RegExpExecArray = RegExpExecArray,
+> = LexerIncludeRule | LexerMetaRule | LexerRegexRule
+
+export interface LexerMetaRule { meta: string }
+
+export interface LexerIncludeRule { include: string }
+
+export interface LexerRegexRule<
+ S extends StringLike = RegExp,
+ T extends Lexer = Lexer,
+ R extends RegExpExecArray = RegExpExecArray,
+> {
+ /** the regular expression to execute */
+ regex?: S
+ /** an regex placed at the beginning of inner context */
+ prefix_regex?: S | ((this: T, capture: R) => StringLike)
+ /**
+ * a string containing all the rule flags:
+ * - `b`: match when the context begins
+ * - `e`: match end of line
+ * - `i`: ignore case
+ * - `p`: pop from the current context
+ * - `s`: strict mode
+ * - `t`: match top level context
+ */
+ flags?: string
+ /** default type of the token */
+ type?: string
+ /** whether the rule is to be executed */
+ test?: string | boolean | ((this: T, config: LexerConfig) => boolean)
+ /** a result token */
+ token?: TokenLike | ((this: T, capture: R, content: TokenLike[], config: LexerConfig) => TokenLike)
+ /** token scope */
+ scope?: string
+ /** token scope mapped with captures */
+ captures?: Record
+ /** the inner context */
+ push?: string | LexerRule | LexerRule[]
+ /** pop from the current context */
+ pop?: boolean | ((this: T, capture: R) => boolean)
+ /** strict mode: pop when no match is found */
+ strict?: boolean
+ /** match when the context begins */
+ context_begins?: boolean
+ /** match top level context */
+ top_level?: boolean
+ /** whether to ignore case */
+ ignore_case?: boolean
+ /** match end of line */
+ eol?: boolean
+}
+
+export class MacroMap {
+ private data: Record = {}
+
+ constructor(macros: Record = {}) {
+ for (const key in macros) {
+ this.data[key] = {
+ regex: new RegExp(`{{${key}}}`, 'g'),
+ macro: `(?:${getString(macros[key])})`,
+ }
+ }
+ }
+
+ resolve(source: StringLike): string {
+ source = getString(source)
+ for (const key in this.data) {
+ source = source.replace(this.data[key].regex, this.data[key].macro)
+ }
+ return source
+ }
+}
+
+const noMacro = new MacroMap()
+
+/** transform a string-like object into a raw string */
+export function getString(source: StringLike): string {
+ return source instanceof RegExp ? source.source : source
+}
+
+export function isStringLike(source: any): boolean {
+ return typeof source === 'string' || source instanceof RegExp
+}
+
+/** transform lexer rules with string into ones with regexp */
+export function parseRule(rule: LexerRule, macros: MacroMap = noMacro): LexerRule {
+ if (!('include' in rule || 'meta' in rule)) {
+ if (rule.regex === undefined) {
+ rule.regex = /(?=[\s\S])/
+ if (!rule.type) rule.type = 'default'
+ }
+ if (rule.test === undefined) rule.test = true
+ let source = macros.resolve(rule.regex)
+ let flags = ''
+ rule.flags = rule.flags || ''
+ if (rule.flags.replace(/[biepst]/g, '')) {
+ throw new Error(`'${rule.flags}' contains invalid rule flags. (invalid-flags)`)
+ }
+ if (rule.flags.includes('s')) rule.strict = true
+ if (rule.flags.includes('b')) rule.context_begins = true
+ if (rule.flags.includes('t')) rule.top_level = true
+ if (rule.flags.includes('p') && !rule.pop) rule.pop = true
+ if (rule.flags.includes('e') || rule.eol) source += '[ \t]*(?:\n+|$)'
+ if (rule.flags.includes('i') || rule.ignore_case) flags += 'i'
+ rule.regex = new RegExp('^(?:' + source + ')', flags)
+ const prefix = rule.prefix_regex
+ if (isStringLike(prefix)) {
+ rule.prefix_regex = new RegExp(`^(?:${macros.resolve(prefix as StringLike)})`)
+ }
+ const push = rule.push
+ if (push instanceof Array) {
+ push.forEach(_rule => parseRule(_rule, macros))
+ } else if (typeof push === 'object') {
+ rule.push = parseRule(push, macros)
+ }
+ }
+ return rule as LexerRule
+}
+
+enum MatchStatus {
+ /** No match was found */
+ NO_MATCH,
+ /** Found match and continue */
+ CONTINUE,
+ /** Found match and pop */
+ POP,
+}
+
+export interface LexerResult {
+ /** current index of the source string */
+ index: number
+ /** output string or array */
+ output: R
+}
+
+export interface LexerMeta extends Partial> {
+ /** record where the match starts */
+ start?: number
+ /** a copy of source string */
+ source?: string
+ /** a string collecting unmatch chars */
+ unmatch?: string
+ /** whether running at top level */
+ isTopLevel?: boolean
+ /** current lexing context */
+ context?: LexerRegexRule[]
+}
+
+export abstract class Lexer {
+ public meta: LexerMeta
+ public config: LexerConfig
+
+ constructor(config?: LexerConfig) {
+ this.config = config || {}
+ }
+
+ initialize?(...args: any[]): void | LexerResult
+ getCapture?(rule: LexerRegexRule, capture: RegExpExecArray): RegExpExecArray
+ getContent?(rule: LexerRegexRule, capture: RegExpExecArray): TokenLike[]
+ pushToken?(rule: LexerRegexRule, capture: RegExpExecArray, content: TokenLike[]): void
+ pushUnmatch?(): void
+
+ run(source: string, isTopLevel?: boolean, ...args: any[]): LexerResult {
+ // store meta data from lower level
+ const _meta = this.meta
+ this.meta = {
+ source,
+ isTopLevel,
+ index: 0,
+ unmatch: '',
+ }
+
+ // initialize or simply get the result
+ const final = this.initialize(...args)
+ if (final) return this.meta = _meta, final
+
+ // walk through the source string
+ while (this.meta.source) {
+ let status: MatchStatus = MatchStatus.NO_MATCH
+ for (const rule of this.meta.context) {
+ // Step 1: test before matching
+ if (rule.top_level && !this.meta.isTopLevel) continue
+ if (rule.context_begins && this.meta.index) continue
+
+ let test = rule.test
+ if (typeof test === 'string') {
+ if (test.charAt(0) === '!') {
+ test = !this.config[test.slice(1)]
+ } else {
+ test = !!this.config[test]
+ }
+ } else if (typeof test === 'function') {
+ test = !!test.call(this, this.config)
+ }
+ if (!test) continue
+
+ // Step 2: exec regex and get capture
+ const match = rule.regex.exec(this.meta.source)
+ if (!match) continue
+ this.meta.source = this.meta.source.slice(match[0].length)
+ this.meta.start = this.meta.index
+ this.meta.index += match[0].length
+ const capture = this.getCapture ? this.getCapture(rule, match) : match
+
+ // Step 3: reset match status
+ let pop = rule.pop
+ if (typeof pop === 'function') pop = pop.call(this, capture)
+ status = pop ? MatchStatus.POP : MatchStatus.CONTINUE
+
+ // Step 4: get inner tokens
+ const content = rule.push && this.getContent ? this.getContent(rule, capture) : []
+
+ // Step 5: detect endless loop
+ if (!rule.pop && this.meta.start === this.meta.index) {
+ throw new Error(`Endless loop at '${
+ this.meta.source.slice(0, 10)
+ } ${
+ this.meta.source.length > 10 ? '...' : ''
+ }'. (endless-loop)`)
+ }
+
+ // Step 6: handle unmatched chars
+ if (this.pushUnmatch && this.meta.unmatch) {
+ this.pushUnmatch()
+ this.meta.unmatch = ''
+ }
+
+ // Step 7: push generated token
+ this.pushToken(rule, capture, content)
+
+ // Step 8: break loop
+ break
+ }
+
+ if (status === MatchStatus.POP) break
+ if (status === MatchStatus.NO_MATCH) {
+ this.meta.unmatch += this.meta.source.charAt(0)
+ this.meta.source = this.meta.source.slice(1)
+ this.meta.index += 1
+ }
+ }
+
+ // handle ramaining unmatched chars
+ if (this.pushUnmatch && this.meta.unmatch) this.pushUnmatch()
+
+ const result: LexerResult = {
+ index: this.meta.index,
+ output: this.meta.output,
+ }
+
+ // restore meta data for lower level
+ this.meta = _meta
+ return result
+ }
+}
diff --git a/packages/core/src/syntax.ts b/packages/core/src/syntax.ts
new file mode 100644
index 0000000..f9a3e7e
--- /dev/null
+++ b/packages/core/src/syntax.ts
@@ -0,0 +1,36 @@
+import {
+ Lexer,
+ StringLike,
+ TokenLike,
+ parseRule,
+ getString,
+ LexerRule,
+ LexerMacros,
+ MacroMap,
+} from './lexer'
+
+export interface SyntaxOptions {
+ name?: string
+ alias?: string[]
+ macros?: Record
+ contexts?: Record
+}
+
+export class SyntaxLexer extends Lexer {
+ public name: string
+ public alias: string[]
+ private macros: MacroMap
+ private contexts: Record[]> = {}
+
+ constructor(options: SyntaxOptions) {
+ super()
+ this.name = options.name || ''
+ this.alias = options.alias || []
+ this.macros = new MacroMap(options.macros || {})
+
+ for (const key in options.contexts) {
+ const context = options.contexts[key]
+ this.contexts[key] = context.map(rule => parseRule(rule, this.macros))
+ }
+ }
+}
diff --git a/packages/detok/README.md b/packages/detok/README.md
new file mode 100644
index 0000000..78c98a7
--- /dev/null
+++ b/packages/detok/README.md
@@ -0,0 +1,8 @@
+# @marklet/detok
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fdetok)](https://github.com/obstudio/Marklet/blob/master/packages/detok/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/detok.svg)](https://www.npmjs.com/package/@marklet/detok)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/detok.svg)](https://www.npmjs.com/package/@marklet/detok)
+
+A detokenizer for marklet.
diff --git a/packages/detok/index.js b/packages/detok/index.js
new file mode 100644
index 0000000..94b8c48
--- /dev/null
+++ b/packages/detok/index.js
@@ -0,0 +1 @@
+module.exports = require('./dist/document').default
\ No newline at end of file
diff --git a/packages/detok/package.json b/packages/detok/package.json
index dfdb3af..a155548 100644
--- a/packages/detok/package.json
+++ b/packages/detok/package.json
@@ -1,15 +1,16 @@
{
"name": "@marklet/detok",
- "version": "1.0.10",
+ "version": "1.1.3",
"description": "A detokenizer for marklet.",
"author": "jjyyxx <1449843302@qq.com>",
"contributors": [
"shigma <1700011071@pku.edu.cn>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/detok",
"license": "MIT",
- "main": "dist/index.js",
- "typings": "dist/index.d.ts",
+ "main": "index.js",
+ "module": "dist/document.js",
+ "typings": "dist/document.d.ts",
"files": [
"dist"
],
@@ -24,6 +25,7 @@
"cheerio": "^1.0.0-rc.2"
},
"devDependencies": {
- "@marklet/core": "^2.0.0"
+ "@marklet/core": "^3.2.2",
+ "@marklet/parser": "^1.5.1"
}
}
\ No newline at end of file
diff --git a/packages/detok/src/document.ts b/packages/detok/src/document.ts
new file mode 100644
index 0000000..f4946d5
--- /dev/null
+++ b/packages/detok/src/document.ts
@@ -0,0 +1,85 @@
+import { TokenLike, LexerToken } from '@marklet/core'
+import { Tokens } from '@marklet/parser'
+import inline from './inline'
+
+type DocumentDetokenizer = (tok: LexerToken) => string
+
+const alignMap = {
+ left: '<',
+ center: '=',
+ right: '>'
+}
+
+let listLevel = 0
+
+function toCamel(string: string) {
+ return string.replace(/-[a-z]/g, match => match.slice(1).toUpperCase())
+}
+
+const detokenizers: Record = {
+ heading: (token: Tokens.Heading) =>
+ '#'.repeat(token.level)
+ + ' '
+ + inline(token.text)
+ + (token.center ? ' #' : ''),
+ section: (token: Tokens.Section) =>
+ '^'.repeat(token.level)
+ + ' '
+ + inline(token.text)
+ + (token.initial === 'closed' ? ' ^' : '') // FIXME: currently not taking `section_default` into consideration
+ + '\n'
+ + detokenize(token.content),
+ quote: (token: Tokens.Quote) =>
+ '>'
+ + token.style
+ + ' '
+ + detokenize(token.content),
+ separator(token: Tokens.Separator) {
+ const sep = token.thick ? '=' : '-'
+ switch (token.style) {
+ case 'normal': return sep.repeat(3)
+ case 'dashed': return sep + (' ' + sep).repeat(2)
+ case 'dotted': return sep + ('.' + sep).repeat(2)
+ }
+ },
+ codeblock: (token: Tokens.CodeBlock) =>
+ '```'
+ + token.lang
+ + '\n'
+ + token.text
+ + '\n```',
+ usage: (token: Tokens.Usage) =>
+ '? '
+ + inline(token.text)
+ + '\n'
+ + detokenize(token.content),
+ usages: (token: Tokens.Usages) => detokenize(token.content),
+ list: (token: Tokens.List) => token.children.map(detokenize).join(''),
+ listItem(token: Tokens.ListItem) {
+ let result = ' '.repeat(listLevel * 2)
+ + (token.order ? token.order + '. ' : '- ')
+ + detokenize(token.text)
+ listLevel += 1
+ result += (token.children || []).map(detokenize).join('')
+ listLevel -= 1
+ return result
+ },
+ inlinelist: (token: Tokens.InlineList) =>
+ '+ '
+ + token.content.map(inline).join(' + '),
+ table: (token: Tokens.Table) =>
+ token.columns.map(col => (col.bold ? '*' : '') + alignMap[col.align]).join('\t')
+ + '\n'
+ + token.data.map(row => row.map(inline).join('\t')).join('\n'),
+ paragraph: (token: Tokens.Paragraph) => inline(token.text),
+}
+
+export default function detokenize(input: TokenLike[] | TokenLike): string {
+ if (Array.isArray(input)) {
+ return input.map(item => detokenize(item)).join('\n\n')
+ } else {
+ return typeof input === 'string'
+ ? inline(input)
+ : detokenizers[toCamel(input.type)](input)
+ }
+}
diff --git a/packages/detok/src/index.ts b/packages/detok/src/index.ts
deleted file mode 100644
index 1ba5b73..0000000
--- a/packages/detok/src/index.ts
+++ /dev/null
@@ -1,117 +0,0 @@
-import cheerio from 'cheerio'
-import { TokenLike, LexerToken } from '@marklet/core'
-type InlineTokenTypes = 'br' | 'code' | 'span' | 'em' | 'strong' | 'del'
-type BlockTokenTypes = 'text' | 'heading' | 'section' | 'quote' | 'separator' | 'codeblock' | 'usage' | 'usages' | 'list' | 'inlinelist' | 'table' | 'paragraph'
-
-function iterate(el: CheerioElement) {
- let result = ''
- for (const child of el.children) {
- if (child.type === 'text') {
- result += child.nodeValue
- } else if (child.type === 'tag') {
- result += textDetokenizers[child.tagName](child)
- }
- }
- return result
-}
-
-function makeSimpleWrap(leftWrap: string, rightWrap = leftWrap) {
- return (el: CheerioElement) => leftWrap + iterate(el) + rightWrap
-}
-
-
-export const textDetokenizers: Record string> = {
- br() {
- return '\n'
- },
- code(el) {
- const code = el.firstChild.nodeValue
- if (el.attribs.class === 'package') {
- return '{{' + code + '}}'
- }
- const backticks = code.match(/`+/g)
- const wrap = '`'.repeat(backticks === null ? 1 : Math.max(...backticks.map(b => b.length)) + 1)
- return wrap + code + wrap
- },
- span(el) {
- const content = iterate(el)
- return el.attribs.class === 'comment' ? '((' + content + '))' : '_' + content + '_'
- },
-
- em: makeSimpleWrap('*'),
- strong: makeSimpleWrap('**'),
- del: makeSimpleWrap('-')
-}
-
-export const detokenizers: Record string> = {
- text(token: string) {
- const $ = cheerio.load(token)
- const root = $('body')
- let result = ''
- root.each((_, el) => result += iterate(el))
-
- return result
- },
- heading(token: LexerToken) {
- const prefix = '#'.repeat(token.level)
- return prefix + ' ' + detokenize(token.text)
- + (token.center ? ' ' + prefix : '')
- },
- section(token: LexerToken) {
- return '^'.repeat(token.level) + ' ' + detokenize(token.text)
- },
- quote(token: LexerToken) {
- return '>' + token.style + ' ' + detokenize(token.content)
- },
- separator(token: LexerToken) {
- const sep = token.thick ? '=' : '-'
- switch (token.style) {
- case 'normal':
- return sep.repeat(3)
- case 'dashed':
- return sep + (' ' + sep).repeat(2)
- case 'dotted':
- return sep + ('.' + sep).repeat(2)
- }
- },
- codeblock(token: LexerToken) {
- return '```' + token.lang + '\n' + token.text + '\n```'
- },
- usage(token: LexerToken) {
- return '? ' + detokenize(token.text) + '\n' + detokenize(token.content)
- },
- usages(token: LexerToken) {
- return detokenize(token.content)
- },
- list(token: LexerToken) {
- let result = ''
- let count = 0
- for (const item of token.content) {
- const bullet = (item).ordered ? ++count : '-'
- result += ' '.repeat(token.indent) + bullet + ' ' + detokenize((item).content)
- }
- return result
- },
- inlinelist(token: LexerToken) {
- return '+' + token.content.join('+') + '+'
- },
- table(/* token */) {
- // TODO: add detok when lexer implement this
- return ''
- },
- paragraph(token: LexerToken) {
- return detokenize(token.text)
- }
-}
-
-export function detokenize(input: TokenLike[] | TokenLike) {
- if (Array.isArray(input)) {
- let result = ''
- for (const token of input) {
- result += typeof token === 'string' ? detokenizers.text(token) : detokenizers[token.type](token) + '\n\n'
- }
- return result
- } else {
- return typeof input === 'string' ? detokenizers.text(input) : detokenizers[input.type](input)
- }
-}
diff --git a/packages/detok/src/inline.ts b/packages/detok/src/inline.ts
new file mode 100644
index 0000000..f1523ef
--- /dev/null
+++ b/packages/detok/src/inline.ts
@@ -0,0 +1,47 @@
+import cheerio from 'cheerio'
+
+type InlineDetokenizer = (el: CheerioElement) => string
+
+export function iterate(el: CheerioElement) {
+ let result = ''
+ for (const child of el.children) {
+ if (child.type === 'text') {
+ result += child.nodeValue
+ } else if (child.type === 'tag') {
+ result += detokenizers[child.tagName](child)
+ }
+ }
+ return result
+}
+
+function makeWrap(leftWrap: string, rightWrap: string = leftWrap): InlineDetokenizer {
+ return (el: CheerioElement) => leftWrap + iterate(el) + rightWrap
+}
+
+const detokenizers: Record = {
+ em: makeWrap('*'),
+ strong: makeWrap('**'),
+ del: makeWrap('-'),
+ br: () => '\n',
+ code(el) {
+ const code = el.firstChild.nodeValue
+ if (el.attribs.class === 'package') {
+ return '{{' + code + '}}'
+ }
+ const backticks = code.match(/`+/g)
+ const wrap = '`'.repeat(backticks === null ? 1 : Math.max(...backticks.map(b => b.length)) + 1)
+ return wrap + code + wrap
+ },
+ span(el) {
+ const content = iterate(el)
+ return el.attribs.class === 'comment' ? '((' + content + '))' : '_' + content + '_'
+ },
+ a: (el) => `[${el.firstChild.nodeValue}|${el.attribs['data-raw-url']}]`,
+ img: (el) => `[${el.firstChild.nodeValue}|!${el.attribs.src}]`
+}
+
+export default function detokenize(token: string) {
+ let result = ''
+ cheerio.load(token)('body').each((_, el) => result += iterate(el))
+ return result
+}
diff --git a/packages/dev-server/README.md b/packages/dev-server/README.md
new file mode 100644
index 0000000..905fe1d
--- /dev/null
+++ b/packages/dev-server/README.md
@@ -0,0 +1,8 @@
+# @marklet/dev-server
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fdev-server)](https://github.com/obstudio/Marklet/blob/master/packages/dev-server/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/dev-server.svg)](https://www.npmjs.com/package/@marklet/dev-server)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/dev-server.svg)](https://www.npmjs.com/package/@marklet/dev-server)
+
+A develop server for marklet.
diff --git a/packages/dev-server/comp/edit.vue b/packages/dev-server/comp/edit.vue
index 5ebb62c..8fe1e49 100644
--- a/packages/dev-server/comp/edit.vue
+++ b/packages/dev-server/comp/edit.vue
@@ -17,7 +17,7 @@ module.exports = {
-
+
diff --git a/packages/dev-server/comp/watch.vue b/packages/dev-server/comp/watch.vue
index 56752f2..2ddfe7b 100644
--- a/packages/dev-server/comp/watch.vue
+++ b/packages/dev-server/comp/watch.vue
@@ -15,7 +15,7 @@ module.exports = {
-
+
diff --git a/packages/dev-server/package.json b/packages/dev-server/package.json
index 4b2c846..a0e9c11 100644
--- a/packages/dev-server/package.json
+++ b/packages/dev-server/package.json
@@ -1,12 +1,12 @@
{
"name": "@marklet/dev-server",
- "version": "1.0.12",
+ "version": "1.0.22",
"description": "A develop server for marklet.",
"author": "jjyyxx <1449843302@qq.com>",
"contributors": [
"shigma <1700011071@pku.edu.cn>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/dev-server",
"license": "MIT",
"main": "dist/server.js",
"typings": "dist/server.d.ts",
@@ -21,8 +21,8 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/parser": "^1.1.0",
- "@marklet/renderer": "^1.1.2",
+ "@marklet/parser": "^1.5.1",
+ "@marklet/renderer": "^1.3.1",
"vue": "^2.5.17",
"ws": "^6.0.0"
}
diff --git a/packages/dev-server/src/client.ts b/packages/dev-server/src/client.ts
index a0a006d..169cdd6 100644
--- a/packages/dev-server/src/client.ts
+++ b/packages/dev-server/src/client.ts
@@ -1,6 +1,6 @@
import Vue from 'vue'
import * as renderer from '@marklet/renderer'
-import { Lexer, LexerConfig } from '@marklet/parser'
+import { DocumentLexer, LexerConfig } from '@marklet/parser'
declare module 'vue/types/vue' {
interface Vue {
@@ -70,7 +70,7 @@ export const Marklet = {
edit: require('@/edit.vue'),
},
parse(source: string, config: LexerConfig) {
- return new Lexer(config).parse(source)
+ return new DocumentLexer(config).parse(source)
},
start({ el, type }: { el: string | HTMLElement, type: 'watch' | 'edit' }) {
document.title = 'Marklet - ' + type
diff --git a/packages/inline/package.json b/packages/inline/package.json
deleted file mode 100644
index e6038e5..0000000
--- a/packages/inline/package.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
- "name": "@marklet/inline",
- "version": "1.0.0",
- "description": "A fast inline lexer for marklet.",
- "author": "shigma <1700011071@pku.edu.cn>",
- "contributors": [
- "jjyyxx <1449843302@qq.com>"
- ],
- "homepage": "https://github.com/obstudio/Marklet",
- "license": "MIT",
- "main": "dist/index.js",
- "typings": "dist/index.d.ts",
- "files": [
- "dist"
- ],
- "repository": {
- "type": "git",
- "url": "git+https://github.com/obstudio/Marklet.git"
- },
- "bugs": {
- "url": "https://github.com/obstudio/Marklet/issues"
- },
- "dependencies": {
- "@marklet/core": "^2.0.0"
- }
-}
diff --git a/packages/inline/src/index.ts b/packages/inline/src/index.ts
deleted file mode 100644
index b52b68d..0000000
--- a/packages/inline/src/index.ts
+++ /dev/null
@@ -1,112 +0,0 @@
-import {
- StringLike,
- LexerConfig,
- LexerRegexRule,
- InlineLexerInstance,
- InlineLexerResult,
- MatchStatus,
- parseRule,
-} from '@marklet/core'
-
-class InlineCapture extends Array implements RegExpExecArray {
- index: number
- input: string
- lexer: InlineLexer
-
- constructor(lexer: InlineLexer, array: RegExpExecArray) {
- super(...array)
- this.lexer = lexer
- this.index = array.index
- this.input = array.input
- }
-
- get inner(): string {
- const match = this.reverse().find(item => !!item)
- return match ? this.lexer.parse(match).output : ''
- }
-}
-
-type InlineLexerRule = LexerRegexRule
-
-export type InlineLexerRules = InlineLexerRule[]
-
-export class InlineLexer implements InlineLexerInstance {
- config: LexerConfig
- private rules: InlineLexerRule[]
-
- constructor(rules: InlineLexerRules, config: LexerConfig = {}) {
- this.rules = rules.map(rule => parseRule(rule) as InlineLexerRule)
- this.config = config || {}
- }
-
- private _parse(source: string): InlineLexerResult {
- let index = 0, unmatch = '', output = ''
- while (source) {
- let status: MatchStatus = MatchStatus.NO_MATCH
- for (const rule of this.rules) {
- if (rule.context_begins && index) continue
-
- // test
- let test = rule.test
- if (typeof test === 'string') {
- if (test.charAt(0) === '!') {
- test = !this.config[test.slice(1)]
- } else {
- test = !!this.config[test]
- }
- } else if (typeof test === 'function') {
- test = test.call(this, this.config)
- }
- if (!test) continue
-
- // regex
- const match = rule.regex.exec(source)
- if (!match) continue
- if (!match[0].length && !rule.pop) {
- throw new Error(`Endless loop at '${
- source.slice(0, 10)
- } ${
- source.length > 10 ? '...' : ''
- }'.`)
- }
- const capture = new InlineCapture(this, match)
- source = source.slice(capture[0].length)
- index += capture[0].length
-
- // pop
- status = rule.pop ? MatchStatus.POP : MatchStatus.CONTINUE
-
- // resolve unmatch
- if (unmatch) {
- output += unmatch
- unmatch = ''
- }
-
- // token
- let token = rule.token
- if (typeof token === 'function') {
- token = token.call(this, capture)
- } else if (token === undefined) {
- token = capture[0]
- }
- output += token
-
- break
- }
-
- if (status === MatchStatus.POP) break
- if (status === MatchStatus.NO_MATCH) {
- unmatch += source.charAt(0)
- source = source.slice(1)
- index += 1
- }
- }
-
- if (unmatch) output += unmatch
- return { index, output }
- }
-
- parse(source: string): InlineLexerResult {
- return this._parse(source.replace(/\r\n/g, '\n'))
- }
-}
\ No newline at end of file
diff --git a/packages/inline/tsconfig.json b/packages/inline/tsconfig.json
deleted file mode 100644
index 9677f08..0000000
--- a/packages/inline/tsconfig.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "extends": "../../tsconfig.base.json",
- "include": [
- "src"
- ],
- "compilerOptions": {
- "outDir": "dist",
- "rootDir": "src"
- },
- "references": [
- { "path": "../core" }
- ]
-}
\ No newline at end of file
diff --git a/packages/lexer/src/index.ts b/packages/lexer/src/index.ts
deleted file mode 100644
index 27faa37..0000000
--- a/packages/lexer/src/index.ts
+++ /dev/null
@@ -1,216 +0,0 @@
-import {
- StringLike,
- LexerMacros,
- LexerConfig,
- LexerRule,
- LexerInstance,
- LexerRegexRule,
- InlineLexerInstance,
- TokenLike,
- MatchStatus,
- parseRule,
- getString,
-} from '@marklet/core'
-
-export interface LexerOptions {
- /** lexer rule regex macros */
- macros?: LexerMacros
- /** entrance context */
- entrance?: string
- /** default inline context */
- inlineEntrance?: string
- /** assign start/end to tokens */
- requireBound?: boolean
- /** other configurations */
- config?: LexerConfig
-}
-
-type NativeLexerContext = LexerRegexRule[] | InlineLexerInstance
-export type LexerContexts = Record[] | InlineLexerInstance>
-
-interface LexerResult {
- index: number
- result: TokenLike[]
-}
-
-export class Lexer implements LexerInstance {
- config: LexerConfig
- private contexts: Record = {}
- private entrance: string
- private inlineEntrance: string
- private requireBound: boolean
-
- constructor(contexts: LexerContexts, options: LexerOptions = {}) {
- this.config = options.config || {}
- this.entrance = options.entrance || 'main'
- this.inlineEntrance = options.inlineEntrance || 'text'
- this.requireBound = !!options.requireBound
-
- const _macros = options.macros || {}
- const macros: Record = {}
- for (const key in _macros) {
- macros[key] = getString(_macros[key])
- }
- for (const key in contexts) {
- const context = contexts[key]
- this.contexts[key] = context instanceof Array
- ? context.map(rule => parseRule(rule, macros))
- : context
- }
- }
-
- private getContext(context: string | InlineLexerInstance | LexerRule[], strictMode?: boolean) {
- const result = typeof context === 'string' ? this.contexts[context] : context
- if (!result) throw new Error(`Context '${context}' was not found.`)
- if (result instanceof Array) {
- for (let i = result.length - 1; i >= 0; i -= 1) {
- const rule: LexerRule = result[i]
- if ('include' in rule) {
- const includes = this.getContext(rule.include)
- if (includes instanceof Array) {
- result.splice(i, 1, ...includes)
- } else {
- result.splice(i, 1, {
- regex: /^(?=[\s\S])/,
- push: rule.include,
- strict: true,
- })
- }
- }
- }
- if (strictMode) {
- result.push({
- regex: /^(?=[\s\S])/,
- pop: true,
- })
- }
- }
- return result as NativeLexerContext
- }
-
- private _parse(source: string, context: NativeLexerContext, isTopLevel?: boolean): LexerResult {
- let index = 0, unmatch = ''
- const result: TokenLike[] = []
-
- // apply inline lexer
- if (!(context instanceof Array)) {
- const result = context.parse(source)
- return {
- index: result.index,
- result: [result.output],
- }
- }
-
- while (source) {
- let status: MatchStatus = MatchStatus.NO_MATCH
- for (const rule of context) {
- if (rule.top_level && !isTopLevel) continue
- if (rule.context_begins && index) continue
-
- // test
- let test = rule.test
- if (typeof test === 'string') {
- if (test.charAt(0) === '!') {
- test = !this.config[test.slice(1)]
- } else {
- test = !!this.config[test]
- }
- } else if (typeof test === 'function') {
- test = test.call(this, this.config)
- }
- if (!test) continue
-
- // regex
- const capture = rule.regex.exec(source)
- if (!capture) continue
- source = source.slice(capture[0].length)
- const start = index
- index += capture[0].length
-
- // pop
- const pop = rule.pop
- status = pop ? MatchStatus.POP : MatchStatus.CONTINUE
-
- // push
- let content: TokenLike[] = [], push = rule.push
- if (push) {
- const context = this.getContext(push, rule.strict)
- const subtoken = this._parse(source, context)
- content = subtoken.result.map((tok) => {
- if (this.requireBound && typeof tok === 'object') {
- tok.start += index
- tok.end += index
- }
- return tok
- })
- source = source.slice(subtoken.index)
- index += subtoken.index
- }
-
- // detect error
- if (!pop && index === start) {
- throw new Error(`Endless loop at '${
- source.slice(0, 10)
- } ${
- source.length > 10 ? '...' : ''
- }'.`)
- }
-
- // resolve unmatch
- if (unmatch) {
- result.push(unmatch)
- unmatch = ''
- }
-
- // token
- let token = rule.token
- if (typeof token === 'function') {
- token = token.call(this, capture, content)
- } else if (token === undefined) {
- if (push) {
- token = content
- } else if (!pop) {
- token = capture[0]
- }
- }
- if (token instanceof Array) token = { content: token }
- if (token) {
- if (typeof token === 'object') {
- token.type = token.type || rule.type
- if (this.requireBound) {
- token.start = start
- token.end = index
- }
- }
- result.push(token)
- }
-
- break
- }
-
- if (status === MatchStatus.POP) break
- if (status === MatchStatus.NO_MATCH) {
- unmatch += source.charAt(0)
- source = source.slice(1)
- index += 1
- }
- }
-
- if (unmatch) result.push(unmatch)
- return { index, result }
- }
-
- inline(source: string, context: string = this.inlineEntrance): string {
- const inlineContext = this.getContext(context)
- if (inlineContext instanceof Array) {
- throw new Error(`'${context}' is not a inline context.`)
- }
- return inlineContext.parse(source).output
- }
-
- parse(source: string, context: string = this.entrance): TokenLike[] {
- const initialContext = this.getContext(context)
- source = source.replace(/\r\n/g, '\n')
- return this._parse(source, initialContext, true).result
- }
-}
diff --git a/packages/lexer/tsconfig.json b/packages/lexer/tsconfig.json
deleted file mode 100644
index 9677f08..0000000
--- a/packages/lexer/tsconfig.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "extends": "../../tsconfig.base.json",
- "include": [
- "src"
- ],
- "compilerOptions": {
- "outDir": "dist",
- "rootDir": "src"
- },
- "references": [
- { "path": "../core" }
- ]
-}
\ No newline at end of file
diff --git a/packages/marklet/README.md b/packages/marklet/README.md
new file mode 100644
index 0000000..1f9910c
--- /dev/null
+++ b/packages/marklet/README.md
@@ -0,0 +1,51 @@
+# Marklet
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg)](https://github.com/obstudio/Marklet/blob/master/package.json)
+[![npm](https://img.shields.io/npm/v/markletjs.svg)](https://www.npmjs.com/package/markletjs)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/markletjs.svg)](https://www.npmjs.com/package/markletjs)
+
+A markup language designed for API manual pages.
+
+## Usage: CLI
+
+```
+Usage: marklet [filepath|dirpath] [options]
+
+Options:
+
+ -v, --version output the version number
+ -m, --mode [mode] Choose between parse, watch and edit mode (default: parse)
+ -s, --source [path] Read text from file
+ -i, --input [text] Read text directly from stdin
+ -d, --dest [path] Write parsed data to file instead of stdin
+ -p, --port [port] Port for the development server
+ -l, --default-language [language] Default language in code block
+ -H, --no-header-align Disable header to align at center
+ -S, --no-section Disallow section syntax
+ -h, --help output usage information
+```
+
+## Usage: Node
+
+```shell
+npm i markletjs
+```
+
+```js
+const Marklet = require('marklet')
+Marklet.watch({ source: 'path/to/file' })
+```
+
+## Usage: Web
+
+```html
+
+```
+
+```html
+
+
+```
diff --git a/packages/marklet/index.d.ts b/packages/marklet/index.d.ts
index a1d61dd..6917436 100644
--- a/packages/marklet/index.d.ts
+++ b/packages/marklet/index.d.ts
@@ -1,5 +1,5 @@
-import { parse, Lexer, LexerConfig } from '@marklet/parser'
+import { parse, DocumentLexer, LexerConfig } from '@marklet/parser'
-export { parse, Lexer }
+export { parse, DocumentLexer as Lexer }
export let config: LexerConfig
export function render(element: string | HTMLElement, source: string): void
diff --git a/packages/marklet/index.js b/packages/marklet/index.js
index 92823c8..8a6f7a0 100644
--- a/packages/marklet/index.js
+++ b/packages/marklet/index.js
@@ -1,7 +1,7 @@
-const { parse, Lexer } = require('@marklet/parser')
+const { parse, DocumentLexer } = require('@marklet/parser')
const renderer = require('@marklet/renderer')
-const lexer = new Lexer()
+const lexer = new DocumentLexer()
const config = lexer.config
function render(element, source) {
@@ -9,8 +9,8 @@ function render(element, source) {
}
module.exports = {
+ Lexer: DocumentLexer,
parse,
- Lexer,
config,
render,
}
diff --git a/packages/marklet/package.json b/packages/marklet/package.json
index e9262d3..147cebe 100644
--- a/packages/marklet/package.json
+++ b/packages/marklet/package.json
@@ -1,12 +1,12 @@
{
"name": "markletjs",
- "version": "1.1.13",
+ "version": "1.2.1",
"description": "A markup language designed for API manual pages.",
"author": "jjyyxx <1449843302@qq.com>",
"contributors": [
"shigma <1700011071@pku.edu.cn>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/marklet",
"license": "MIT",
"main": "index.js",
"typings": "index.d.ts",
@@ -28,8 +28,8 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/cli": "^1.1.5",
- "@marklet/parser": "^1.1.0",
- "@marklet/renderer": "^1.1.2"
+ "@marklet/cli": "^1.1.15",
+ "@marklet/parser": "^1.5.1",
+ "@marklet/renderer": "^1.3.1"
}
}
\ No newline at end of file
diff --git a/packages/monaco/.eslintrc.yml b/packages/monaco/.eslintrc.yml
new file mode 100644
index 0000000..0ae919d
--- /dev/null
+++ b/packages/monaco/.eslintrc.yml
@@ -0,0 +1,2 @@
+env:
+ browser: true
\ No newline at end of file
diff --git a/packages/monaco/.npmignore b/packages/monaco/.npmignore
new file mode 100644
index 0000000..f649af7
--- /dev/null
+++ b/packages/monaco/.npmignore
@@ -0,0 +1 @@
+.eslintrc.yml
\ No newline at end of file
diff --git a/packages/monaco/README.md b/packages/monaco/README.md
new file mode 100644
index 0000000..faa16c5
--- /dev/null
+++ b/packages/monaco/README.md
@@ -0,0 +1,8 @@
+# @marklet/monaco
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fmonaco)](https://github.com/obstudio/Marklet/blob/master/packages/monaco/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/monaco.svg)](https://www.npmjs.com/package/@marklet/monaco)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/monaco.svg)](https://www.npmjs.com/package/@marklet/monaco)
+
+A monaco plugin for marklet.
diff --git a/packages/monaco/index.js b/packages/monaco/index.js
new file mode 100644
index 0000000..93ac922
--- /dev/null
+++ b/packages/monaco/index.js
@@ -0,0 +1,50 @@
+const loader = require('monaco-editor/min/vs/loader')
+const isBrowser = typeof window === 'object' && window
+const amdRequire = isBrowser ? loader.require : loader
+const Marklet = require('./marklet')
+const path = require('path')
+const fs = require('fs')
+
+function resolveModulePath(filepath) {
+ for (const basepath of require.resolve.paths(__dirname)) {
+ const fullpath = path.resolve(basepath, filepath)
+ if (fs.existsSync(fullpath)) {
+ return 'file:///' + fullpath
+ .split(path.sep)
+ .map(name => name.replace(/#/g, '%23').replace(/ /g, '%20'))
+ .join('/')
+ }
+ }
+ throw new Error(`Cannot find module '${filepath}'`)
+}
+
+amdRequire.config({
+ baseUrl: resolveModulePath('monaco-editor/min')
+})
+
+// workaround monaco-css not understanding the environment
+if (isBrowser) window.module = undefined
+
+let monaco = null
+
+module.exports = new Promise((resolve, reject) => {
+ try {
+ amdRequire(['vs/editor/editor.main'], () => {
+ monaco = window.monaco
+ monaco.languages.register({
+ id: 'marklet',
+ extensions: ['mkl'],
+ })
+ monaco.languages.setMonarchTokensProvider('marklet', Marklet)
+ resolve(window.monaco)
+ })
+ } catch (error) {
+ reject(error)
+ }
+})
+
+module.exports.install = async function(Vue) {
+ Object.defineProperty(Vue.prototype, '$colorize', {
+ get: () => monaco ? (code, lang) => monaco.editor.colorize(code, lang) : undefined
+ })
+}
diff --git a/packages/monaco/marklet.js b/packages/monaco/marklet.js
new file mode 100644
index 0000000..37e2cc4
--- /dev/null
+++ b/packages/monaco/marklet.js
@@ -0,0 +1,15 @@
+module.exports = {
+ tokenizer: {
+ root: [
+ { include: 'topLevel' },
+ ],
+ topLevel: [
+ {
+ regex: /(#{1,4}) +([^\n]+?)( +#)?/,
+ action: {
+ token: 'heading'
+ }
+ }
+ ],
+ },
+}
diff --git a/packages/monaco/package.json b/packages/monaco/package.json
new file mode 100644
index 0000000..3de5bad
--- /dev/null
+++ b/packages/monaco/package.json
@@ -0,0 +1,22 @@
+{
+ "name": "@marklet/monaco",
+ "version": "1.2.1",
+ "description": "A monaco plugin for marklet.",
+ "author": "jjyyxx <1449843302@qq.com>",
+ "contributors": [
+ "shigma <1700011071@pku.edu.cn>"
+ ],
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/monaco",
+ "license": "MIT",
+ "main": "index.js",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/obstudio/Marklet.git"
+ },
+ "bugs": {
+ "url": "https://github.com/obstudio/Marklet/issues"
+ },
+ "dependencies": {
+ "monaco-editor": "^0.14.3"
+ }
+}
\ No newline at end of file
diff --git a/packages/parser/README.md b/packages/parser/README.md
new file mode 100644
index 0000000..62b32e0
--- /dev/null
+++ b/packages/parser/README.md
@@ -0,0 +1,8 @@
+# @marklet/parser
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fparser)](https://github.com/obstudio/Marklet/blob/master/packages/parser/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/parser.svg)](https://www.npmjs.com/package/@marklet/parser)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/parser.svg)](https://www.npmjs.com/package/@marklet/parser)
+
+A document lexer for marklet.
diff --git a/packages/parser/package.json b/packages/parser/package.json
index a6e8865..0a3149b 100644
--- a/packages/parser/package.json
+++ b/packages/parser/package.json
@@ -1,12 +1,12 @@
{
"name": "@marklet/parser",
- "version": "1.1.0",
+ "version": "1.5.1",
"description": "A document lexer for marklet.",
"author": "shigma <1700011071@pku.edu.cn>",
"contributors": [
"jjyyxx <1449843302@qq.com>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/parser",
"license": "MIT",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
@@ -21,8 +21,6 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/core": "^2.0.0",
- "@marklet/lexer": "^1.0.10",
- "@marklet/inline": "^1.0.0"
+ "@marklet/core": "^3.2.2"
}
}
\ No newline at end of file
diff --git a/packages/parser/src/document.ts b/packages/parser/src/document.ts
new file mode 100644
index 0000000..572c8e8
--- /dev/null
+++ b/packages/parser/src/document.ts
@@ -0,0 +1,198 @@
+import { DocumentLexer, LexerToken } from '@marklet/core'
+import { LexerConfig, Tokens } from './index'
+import MarkletInlineLexer from './inline'
+
+export default class MarkletDocumentLexer extends DocumentLexer {
+ constructor(config: LexerConfig = {}) {
+ super({
+ text: new MarkletInlineLexer(config),
+ main: [
+ {
+ type: 'newline',
+ regex: /\n+/,
+ token: null
+ },
+ {
+ type: 'heading',
+ regex: /(#{1,4}) +([^\n]+?)( +#)?/,
+ eol: true,
+ token([_, bullet, text, mark]) {
+ let center
+ if (this.config.header_align && mark) {
+ text = this.inline(text)
+ center = true
+ } else {
+ text = this.inline(text + (mark || ''))
+ center = false
+ }
+ return { level: bullet.length, text, center }
+ }
+ },
+ {
+ type: 'section',
+ test: 'allow_section',
+ regex: /(\^{1,4}) +([^\n]+?)( +\^)?/,
+ eol: true,
+ push: 'main',
+ prefix_regex(cap) {
+ const level = cap[1].length
+ return `(?=[#^]{1,${level}} +[^\\n])`
+ },
+ token(cap, content, config) {
+ const text = this.inline(cap[2])
+ const initial = !cap[3] === (config.section_default === 'open') ? 'open' : 'closed'
+ return { level: cap[1].length, text, initial, content }
+ }
+ },
+ {
+ type: 'quote',
+ regex: />([\w-]*) +/,
+ prefix_regex: /\n[ \t]*\n/,
+ push: 'main',
+ token: (cap, content) => ({
+ style: cap[1],
+ content,
+ })
+ },
+ {
+ type: 'separator',
+ regex: / *([-=])(\1|\.\1| \1)\2+/,
+ eol: true,
+ token: (cap) => ({
+ thick: cap[1] === '=',
+ style: cap[2].length === 1 ? 'normal'
+ : cap[2][0] === ' ' ? 'dashed' : 'dotted'
+ })
+ },
+ {
+ type: 'codeblock',
+ regex: / *(`{3,}) *([\w-]+)? *\n([\s\S]*?)\n? *\1/,
+ eol: true,
+ token: (cap, _, config) => ({
+ lang: cap[2] || config.default_language,
+ text: cap[3] || '',
+ })
+ },
+ {
+ type: 'usages',
+ regex: /(?= *\? +\S)/,
+ strict: true,
+ push: {
+ type: 'usage',
+ regex: / *\? +([^\n]+?)/,
+ prefix_regex: /(?= *\? )/,
+ eol: true,
+ push: 'text',
+ token(cap, [detail]) {
+ return {
+ text: this.inline(cap[1]),
+ detail,
+ }
+ }
+ },
+ },
+ {
+ type: 'list',
+ regex: /(?=[ \t]*{{bull}}[ \t]+)/,
+ strict: true,
+ push: {
+ type: 'list-item',
+ regex: /([ \t]*)({{bull}})[ \t]+/,
+ prefix_regex: /(?=[ \t]*{{bull}}[ \t]+)/,
+ push: 'text',
+ token([_, indent, bullet], [text], config) {
+ return {
+ text,
+ order: bullet.slice(0, -1),
+ indent: indent.replace(/\t/g, ' '.repeat(config.tab_indent)).length,
+ }
+ }
+ },
+ token(_, content: Tokens.ListItem[]) {
+ const indents: number[] = []
+ const root: Tokens.ListItem[] = []
+ content.forEach((item) => {
+ let id = indents.length - 1
+ let currentList = root
+ for (; id >= 0; id -= 1) {
+ if (indents[id] < item.indent) break
+ }
+ indents.splice(id + 1, Infinity, item.indent)
+ for (; id >= 0; id -= 1) {
+ const lastItem = currentList[currentList.length - 1]
+ currentList = lastItem.children = lastItem.children || []
+ }
+ delete item.indent
+ currentList.push(item)
+ })
+ return { children: root }
+ }
+ },
+ {
+ type: 'inlinelist',
+ regex: /(?=\+[ \t]+)/,
+ prefix_regex: /[ \t]*\n(?!\+)/,
+ push: {
+ type: 'inlinelist-item',
+ regex: /\+[ \t]+/,
+ prefix_regex: /\n|[ \t]+(?=\+)/,
+ push: 'text',
+ token: (_, [text]) => text.trim()
+ },
+ },
+ {
+ type: 'table',
+ test: 'marklet_table',
+ regex: /{{sign}}({{tab}}{{sign}})*{{eol}}/,
+ strict: true,
+ push: {
+ type: 'table-row',
+ regex: /(?=\S)/,
+ strict: true,
+ push: {
+ type: 'table-cell',
+ regex: /({{cell}})({{eol}}|{{tab}})/,
+ pop: (cap) => cap[2].includes('\n'),
+ token([_, text]) {
+ return this.inline(text)
+ }
+ },
+ },
+ token([header], content: LexerToken[]) {
+ const columns = header.match(/[*=<>]+/g).map((col) => ({
+ bold: col.includes('*'),
+ align: col.includes('<') ? 'left'
+ : col.includes('=') ? 'center'
+ : col.includes('>') ? 'right' : 'center'
+ }))
+ const data = content.map(row => row.content.slice(0, columns.length))
+ return { data, columns }
+ }
+ },
+ {
+ type: 'paragraph',
+ regex: /(?=\S)/,
+ push: 'text',
+ token: (_, [text]) => ({ text })
+ }
+ ],
+ }, {
+ macros: {
+ bull: /-|[a-zA-Zα-ωΑ-Ω\d]+\./,
+ sign: /[*=<>]+/,
+ tab: /\t+| {4,}/,
+ eol: /[ \t]*(?:\n|$)/,
+ cell: /\S(?: {0,3}\S)*/,
+ },
+ config: {
+ tab_indent: 2,
+ header_align: true,
+ allow_section: true,
+ marklet_table: true,
+ section_default: 'open',
+ default_language: '',
+ ...config,
+ }
+ })
+ }
+}
diff --git a/packages/parser/src/index.ts b/packages/parser/src/index.ts
index 488a4ef..b4d73b6 100644
--- a/packages/parser/src/index.ts
+++ b/packages/parser/src/index.ts
@@ -1,19 +1,6 @@
-import { LexerConfig, TokenLike } from '@marklet/core'
-import { InlineLexer } from '@marklet/inline'
-import { Lexer } from '@marklet/lexer'
-
-function escape(html: string): string {
- return html
- .replace(/&/g, '&')
- .replace(//g, '>')
- .replace(/"/g, '"')
- .replace(/'/g, ''')
-}
-
-function collect(content: TokenLike[]) {
- return content
-}
+import { LexerConfig, TokenLike, LexerToken } from '@marklet/core'
+import MarkletInlineLexer from './inline'
+import MarkletDocumentLexer from './document'
interface MarkletLexerConfig extends LexerConfig {
/** enable header to align at center */
@@ -24,263 +11,6 @@ interface MarkletLexerConfig extends LexerConfig {
default_language?: string
}
-class MarkletInlineLexer extends InlineLexer {
- constructor(config: MarkletLexerConfig = {}) {
- super([
- {
- regex: /(?=\n[ \t]*(\n|$))/,
- pop: true
- },
- {
- type: 'escape',
- regex: /\\([\s\S])/,
- token: (cap) => cap[1]
- },
- {
- type: 'newline',
- regex: /\n/,
- token: '
'
- },
- {
- type: 'code',
- regex: /(`+)\s*([\s\S]*?[^`]?)\s*\1(?!`)/,
- token: (cap) => `${escape(cap[2])}
`
- },
- {
- type: 'strikeout',
- regex: /-(?=\S)([\s\S]*?\S)-(?!-)/,
- token: (cap) => `${cap.inner}`
- },
- {
- type: 'underline',
- regex: /_(?=\S)([\s\S]*?\S)_(?!_)/,
- token: (cap) => `${cap.inner}`
- },
- {
- type: 'bold',
- regex: /\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
- token: (cap) => `${cap.inner}`
- },
- {
- type: 'italic',
- regex: /\*(?=\S)([\s\S]*?\S)\*(?!\*)/,
- token: (cap) => `${cap.inner}`
- },
- {
- type: 'comment',
- regex: /\(\((?=\S)([\s\S]*?\S)\)\)(?!\))/,
- token: (cap) => ``
- },
- {
- type: 'package',
- regex: /{{(?=\S)([\s\S]*?\S)}}(?!})/,
- token: (cap) => `${cap.inner}
`
- },
- {
- type: 'link',
- regex: /\[(?:([^\]|]+)\|)?([^\]]+)\]/,
- token(cap) {
- let text, match
- if (cap[1]) {
- text = cap[1]
- } else if (match = cap[2].match(/^\$\w+(#\w+)$/)) {
- text = match[1]
- // } else if (this.resolve(cap[2]) in this.options.dictionary) { // FIXME: function not added yet
- // text = this.options.dictionary[this.resolve(cap[2])]
- } else if (cap[2].includes('#') || cap[2].includes('/')) {
- text = cap[2].match(/[#/]([^#/]+)$/)[1]
- } else {
- text = cap[2]
- }
- return cap[2][0] === '!' ?
- `` : // TODO: special treatment like necessary?
- `${text}`
- }
- }
- ], config)
- }
-}
-
-class MarkletLexer extends Lexer {
- constructor(config: MarkletLexerConfig = {}) {
- super({
- text: new MarkletInlineLexer(config),
- main: [
- {
- type: 'newline',
- regex: /\n+/,
- token: null
- },
- {
- type: 'heading',
- regex: /(#{1,4}) +([^\n]+?)( +#)?/,
- eol: true,
- token(cap) {
- let text, center
- if (this.config.header_align && cap[3]) {
- text = this.inline(cap[2])
- center = true
- } else {
- text = this.inline(cap[2] + (cap[3] || ''))
- center = false
- }
- return { level: cap[1].length, text, center }
- }
- },
- {
- type: 'section',
- test: 'allow_section',
- regex: /(\^{1,4}) +([^\n]+?)/,
- eol: true,
- push: 'main',
- token(cap) {
- return {
- level: cap[1].length,
- text: this.inline(cap[2]),
- }
- }
- },
- {
- type: 'quote',
- regex: />([\w-]*) +/,
- push: 'block',
- token: (cap, content) => ({ style: cap[1], content })
- },
- {
- type: 'separator',
- regex: / *([-=])(\1|\.\1| \1)\2+/,
- eol: true,
- token: (cap) => ({
- thick: cap[1] === '=',
- style: cap[2].length === 1 ? 'normal'
- : cap[2][0] === ' ' ? 'dashed' : 'dotted'
- })
- },
- {
- type: 'codeblock',
- regex: / *(`{3,}) *([\w-]+)? *\n([\s\S]*?)\n? *\1/,
- eol: true,
- token(cap) {
- return {
- lang: cap[2] || this.config.default_language,
- text: cap[3] || '',
- }
- }
- },
- {
- type: 'usages',
- regex: /(?= *\? +\S)/,
- strict: true,
- push: [
- {
- type: 'usage',
- regex: / *\? +([^\n]+?)/,
- eol: true,
- push: [
- {
- regex: /(?= *\? )/,
- pop: true
- },
- {
- include: 'text'
- }
- ],
- token(cap, cont) {
- return {
- text: this.inline(cap[1]),
- content: cont,
- }
- }
- }
- ]
- },
- {
- type: 'list',
- regex: / *(?={{bullet}} +[^\n]+)/,
- strict: true,
- push: [
- {
- type: 'item',
- regex: /( *)({{bullet}}) +(?=[^\n]+)/,
- push: [{
- regex: /\n? *(?={{bullet}} +[^\n]+)/,
- pop: true
- }, {
- include: 'text'
- }],
- token(cap, cont) {
- return {
- text: cont.join(''),
- ordered: cap[2].length > 1,
- indent: cap[1].length,
- }
- }
- }
- ],
- token: (_, cont) => collect(cont)
- },
- {
- type: 'inlinelist',
- regex: /(?=\+)/,
- push: [
- {
- type: 'item',
- regex: /\+/,
- push: [
- {
- regex: /\+?$|\+\n(?=\+)|\+?(?=\n)|(?=\+)/,
- pop: true
- },
- {
- include: 'text'
- }
- ],
- token(_, cont) {
- return cont.join('')
- }
- },
- {
- regex: /\n|$/,
- pop: true
- }
- ],
- token: (_, cont) => ({ content: cont })
- },
- {
- type: 'table',
- regex: /$^/, // FIXME: placeholder for syntax discussion
- push: [],
- token: (_, cont) => ({ content: cont })
- },
- {
- type: 'paragraph',
- push: 'text',
- token: (_, cont) => ({ text: cont.join('') })
- }
- ],
- block: [
- {
- regex: /\n[ \t]*\n/,
- pop: true
- },
- {
- include: 'main'
- }
- ],
- }, {
- macros: {
- bullet: /-|\d+\./,
- },
- config: {
- header_align: true,
- allow_section: true,
- default_language: '',
- ...config,
- }
- })
- }
-}
-
export interface ParseOptions {
input: string
config?: MarkletLexerConfig
@@ -293,11 +23,84 @@ export function parse(options: ParseOptions): TokenLike[] {
} else {
throw new Error("'input' option is required.")
}
- return new MarkletLexer(options.config).parse(source)
+ return new MarkletDocumentLexer(options.config).parse(source)
}
export {
- MarkletLexer as Lexer,
- MarkletInlineLexer as InlineLexer,
MarkletLexerConfig as LexerConfig,
+ MarkletInlineLexer as InlineLexer,
+ MarkletDocumentLexer as DocumentLexer,
+}
+
+export namespace Tokens {
+ export type Text = string
+
+ export interface Heading extends LexerToken {
+ type: 'heading'
+ level: number
+ center: boolean
+ }
+
+ export interface Section extends LexerToken {
+ type: 'section'
+ level: number
+ initial: 'open' | 'closed'
+ content: LexerToken[]
+ }
+
+ export interface CodeBlock extends LexerToken {
+ type: 'codeblock'
+ lang: string
+ }
+
+ export interface Separator extends LexerToken {
+ type: 'separator'
+ thick: boolean
+ style: 'normal' | 'dashed' | 'dotted'
+ }
+
+ export interface InlineList extends LexerToken {
+ type: 'inlinelist'
+ content: string[]
+ }
+
+ export interface ListItem extends LexerToken {
+ type: 'list-item'
+ order: string
+ children?: ListItem[]
+ }
+
+ export interface List extends LexerToken {
+ type: 'list'
+ children?: ListItem[]
+ }
+
+ export interface Paragraph extends LexerToken {
+ type: 'paragraph'
+ }
+
+ export interface Quote extends LexerToken {
+ type: 'quote'
+ style: string
+ content: LexerToken[]
+ }
+
+ export interface Table extends LexerToken {
+ type: 'table'
+ columns: {
+ align: 'left' | 'center' | 'right'
+ bold: boolean
+ }[]
+ data: string[][]
+ }
+
+ export interface Usage extends LexerToken {
+ type: 'usage'
+ content: LexerToken[]
+ }
+
+ export interface Usages extends LexerToken {
+ type: 'usages'
+ content: Usage[]
+ }
}
diff --git a/packages/parser/src/inline.ts b/packages/parser/src/inline.ts
new file mode 100644
index 0000000..62263af
--- /dev/null
+++ b/packages/parser/src/inline.ts
@@ -0,0 +1,88 @@
+import { InlineLexer } from '@marklet/core'
+import { LexerConfig } from './index'
+
+function escape(html: string): string {
+ return html
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+ .replace(/'/g, ''')
+}
+
+export default class MarkletInlineLexer extends InlineLexer {
+ constructor(config: LexerConfig = {}) {
+ super([
+ {
+ regex: /(?=\n[ \t]*(\n|$))/,
+ pop: true
+ },
+ {
+ type: 'escape',
+ regex: /\\([\s\S])/,
+ token: (cap) => cap[1]
+ },
+ {
+ type: 'newline',
+ regex: /\n/,
+ token: '
'
+ },
+ {
+ type: 'code',
+ regex: /(`+)\s*([\s\S]*?[^`]?)\s*\1(?!`)/,
+ token: (cap) => `${escape(cap[2])}
`
+ },
+ {
+ type: 'strikeout',
+ regex: /-(?=\S)([\s\S]*?\S)-(?!-)/,
+ token: (cap) => `${cap.inner}`
+ },
+ {
+ type: 'underline',
+ regex: /_(?=\S)([\s\S]*?\S)_(?!_)/,
+ token: (cap) => `${cap.inner}`
+ },
+ {
+ type: 'bold',
+ regex: /\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,
+ token: (cap) => `${cap.inner}`
+ },
+ {
+ type: 'italic',
+ regex: /\*(?=\S)([\s\S]*?\S)\*(?!\*)/,
+ token: (cap) => `${cap.inner}`
+ },
+ {
+ type: 'comment',
+ regex: /\(\((?=\S)([\s\S]*?\S)\)\)(?!\))/,
+ token: (cap) => ``
+ },
+ {
+ type: 'package',
+ regex: /{{(?=\S)([\s\S]*?\S)}}(?!})/,
+ token: (cap) => `${cap.inner}
`
+ },
+ {
+ type: 'link',
+ regex: /\[(?:([^\]|]+)\|)?([^\]]+)\]/,
+ token(cap) {
+ let text, match
+ if (cap[1]) {
+ text = cap[1]
+ } else if (match = cap[2].match(/^\$\w+(#\w+)$/)) {
+ text = match[1]
+ // } else if (this.resolve(cap[2]) in this.options.dictionary) { // FIXME: function not added yet
+ // text = this.options.dictionary[this.resolve(cap[2])]
+ } else if (cap[2].includes('#') || cap[2].includes('/')) {
+ text = cap[2].match(/[#/]([^#/]+)$/)[1]
+ } else {
+ text = cap[2]
+ }
+ return cap[2][0] === '!' ?
+ `` : // TODO: special treatment like necessary?
+ `${text}`
+ }
+ }
+ ], config)
+ }
+}
diff --git a/packages/parser/tsconfig.json b/packages/parser/tsconfig.json
index 8a214b6..4ad9fb3 100644
--- a/packages/parser/tsconfig.json
+++ b/packages/parser/tsconfig.json
@@ -9,7 +9,5 @@
},
"references": [
{ "path": "../core" },
- { "path": "../inline" },
- { "path": "../lexer" }
]
}
\ No newline at end of file
diff --git a/packages/renderer/README.md b/packages/renderer/README.md
new file mode 100644
index 0000000..4b92087
--- /dev/null
+++ b/packages/renderer/README.md
@@ -0,0 +1,8 @@
+# @marklet/renderer
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Frenderer)](https://github.com/obstudio/Marklet/blob/master/packages/renderer/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/renderer.svg)](https://www.npmjs.com/package/@marklet/renderer)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/renderer.svg)](https://www.npmjs.com/package/@marklet/renderer)
+
+A html renderer for marklet.
diff --git a/packages/renderer/comp/codeblock.vue b/packages/renderer/comp/codeblock.vue
new file mode 100644
index 0000000..ce07ae5
--- /dev/null
+++ b/packages/renderer/comp/codeblock.vue
@@ -0,0 +1,53 @@
+
+
+
+
+
+
+
diff --git a/packages/renderer/comp/collapse.vue b/packages/renderer/comp/collapse.vue
new file mode 100644
index 0000000..8557f5d
--- /dev/null
+++ b/packages/renderer/comp/collapse.vue
@@ -0,0 +1,99 @@
+
+
+
+
+
+
+
diff --git a/packages/renderer/comp/heading.vue b/packages/renderer/comp/heading.vue
index c7f8305..c91f26f 100644
--- a/packages/renderer/comp/heading.vue
+++ b/packages/renderer/comp/heading.vue
@@ -1,35 +1,42 @@
-
+
-
diff --git a/packages/renderer/comp/inlinelist.vue b/packages/renderer/comp/inlinelist.vue
index 61dcbcb..4aa1f31 100644
--- a/packages/renderer/comp/inlinelist.vue
+++ b/packages/renderer/comp/inlinelist.vue
@@ -1,13 +1,19 @@
-
-
diff --git a/packages/renderer/comp/list-item.vue b/packages/renderer/comp/list-item.vue
new file mode 100644
index 0000000..47900d6
--- /dev/null
+++ b/packages/renderer/comp/list-item.vue
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/packages/renderer/comp/list.vue b/packages/renderer/comp/list.vue
new file mode 100644
index 0000000..dc4717d
--- /dev/null
+++ b/packages/renderer/comp/list.vue
@@ -0,0 +1,107 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/packages/renderer/comp/nodes.vue b/packages/renderer/comp/nodes.vue
index 792767e..c0d4831 100644
--- a/packages/renderer/comp/nodes.vue
+++ b/packages/renderer/comp/nodes.vue
@@ -17,11 +17,15 @@ module.exports = {
-
-
-
-
+
+
diff --git a/packages/renderer/comp/quote.vue b/packages/renderer/comp/quote.vue
index 8a7ea77..b4c0dbc 100644
--- a/packages/renderer/comp/quote.vue
+++ b/packages/renderer/comp/quote.vue
@@ -1,5 +1,5 @@
-
+
diff --git a/packages/renderer/comp/section.vue b/packages/renderer/comp/section.vue
new file mode 100644
index 0000000..7cb89b8
--- /dev/null
+++ b/packages/renderer/comp/section.vue
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
diff --git a/packages/renderer/comp/table.vue b/packages/renderer/comp/table.vue
new file mode 100644
index 0000000..1cea964
--- /dev/null
+++ b/packages/renderer/comp/table.vue
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/packages/renderer/comp/transitions/collapse-transition.vue b/packages/renderer/comp/transitions/collapse-transition.vue
new file mode 100644
index 0000000..31edc16
--- /dev/null
+++ b/packages/renderer/comp/transitions/collapse-transition.vue
@@ -0,0 +1,102 @@
+
\ No newline at end of file
diff --git a/packages/renderer/comp/transitions/utils.ts b/packages/renderer/comp/transitions/utils.ts
new file mode 100644
index 0000000..0d268b7
--- /dev/null
+++ b/packages/renderer/comp/transitions/utils.ts
@@ -0,0 +1,44 @@
+/** Turn camelCase into kebab-case. */
+function toKebab(string: string) {
+ return string.replace(/[A-Z]/g, char => '_' + char.toLowerCase())
+}
+
+/** Generate css transition styles. */
+export function style(
+ properties: string[],
+ duration: number,
+ timingFunction: string
+) {
+ return properties
+ .filter(name => !name.startsWith('transition'))
+ .map(name => `${duration}s ${timingFunction} ${toKebab(name)}`)
+ .join(',')
+}
+
+/** Emit events in functional components. */
+export function emit(
+ listeners: Record,
+ eventName: string,
+ ...args: any[]
+) {
+ const listener = listeners[eventName]
+ if (listener instanceof Array) {
+ listener.forEach(func => func(...args))
+ } else if (listener instanceof Function) {
+ listener(...args)
+ }
+}
+
+/** Store properties into dataset. */
+export function store(element: HTMLElement, properties: string[]) {
+ properties.forEach((name: any) => {
+ element.dataset[name] = element.style[name]
+ })
+}
+
+/** Restore properties from dataset. */
+export function restore(element: HTMLElement, properties: string[]) {
+ properties.forEach((name: any) => {
+ element.style[name] = element.dataset[name]
+ })
+}
diff --git a/packages/renderer/comp/usages.vue b/packages/renderer/comp/usages.vue
index f1af879..6882885 100644
--- a/packages/renderer/comp/usages.vue
+++ b/packages/renderer/comp/usages.vue
@@ -1,10 +1,9 @@
@@ -13,7 +12,7 @@
blockquote {
border: 1px solid !important;
- margin-bottom: -1px;
+ margin: 0 0 -1px 0;
padding: 8px 1em;
&:first-child {
diff --git a/packages/renderer/package.json b/packages/renderer/package.json
index 15538f3..9a77942 100644
--- a/packages/renderer/package.json
+++ b/packages/renderer/package.json
@@ -1,12 +1,12 @@
{
"name": "@marklet/renderer",
- "version": "1.1.2",
+ "version": "1.3.1",
"description": "A html renderer for marklet.",
"author": "jjyyxx <1449843302@qq.com>",
"contributors": [
"shigma <1700011071@pku.edu.cn>"
],
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/renderer",
"license": "MIT",
"main": "dist/renderer.min.js",
"typings": "index.d.ts",
@@ -21,9 +21,10 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/core": "^2.0.0-beta.0"
+ "@marklet/core": "^3.2.2"
},
"peerDependencies": {
+ "neat-scroll": "^2.0.1",
"vue": "^2.5.17"
}
}
\ No newline at end of file
diff --git a/packages/renderer/src/index.js b/packages/renderer/src/index.js
index d6238d2..9ce956e 100644
--- a/packages/renderer/src/index.js
+++ b/packages/renderer/src/index.js
@@ -1,23 +1,23 @@
-const HeadingComponent = require('@/heading.vue')
-const InlinelistComponent = require('@/inlinelist.vue')
-const NodesComponent = require('@/nodes.vue')
-const ParagraphComponent = require('@/paragraph.vue')
-const QuoteComponent = require('@/quote.vue')
-const SeparatorComponent = require('@/separator.vue')
-const UsagesComponent = require('@/usages.vue')
-
let _Vue = null, ASTNodes = null
const Renderer = {
install(Vue) {
_Vue = Vue
- Vue.component('ml-heading', HeadingComponent)
- Vue.component('ml-inlinelist', InlinelistComponent)
- Vue.component('ml-nodes', NodesComponent)
- Vue.component('ml-paragraph', ParagraphComponent)
- Vue.component('ml-quote', QuoteComponent)
- Vue.component('ml-separator', SeparatorComponent)
- Vue.component('ml-usages', UsagesComponent)
+ Vue.component('collapse-transition', require('../temp/transitions/collapse-transition.vue'))
+ Vue.component('mkl-codeblock', require('../temp/codeblock.vue'))
+ Vue.component('mkl-collapse', require('../temp/collapse.vue'))
+ Vue.component('mkl-heading', require('../temp/heading.vue'))
+ Vue.component('mkl-inlinelist', require('../temp/inlinelist.vue'))
+ Vue.component('mkl-list-item', require('../temp/list-item.vue'))
+ Vue.component('mkl-list', require('../temp/list.vue'))
+ Vue.component('mkl-nodes', require('../temp/nodes.vue'))
+ Vue.component('mkl-paragraph', require('../temp/paragraph.vue'))
+ Vue.component('mkl-quote', require('../temp/quote.vue'))
+ Vue.component('mkl-scroll', require('../temp/scroll.vue'))
+ Vue.component('mkl-section', require('../temp/section.vue'))
+ Vue.component('mkl-separator', require('../temp/separator.vue'))
+ Vue.component('mkl-table', require('../temp/table.vue'))
+ Vue.component('mkl-usages', require('../temp/usages.vue'))
},
embed(element, content = []) {
if (!_Vue) {
@@ -27,7 +27,7 @@ const Renderer = {
throw new Error('No vue constructor was found.')
}
}
- if (!ASTNodes) ASTNodes = _Vue.extend(NodesComponent)
+ if (!ASTNodes) ASTNodes = _Vue.extend(require('../temp/nodes.vue'))
new ASTNodes({ propsData: { content } }).$mount(element)
}
}
diff --git a/packages/renderer/tsconfig.json b/packages/renderer/tsconfig.json
new file mode 100644
index 0000000..05cce1d
--- /dev/null
+++ b/packages/renderer/tsconfig.json
@@ -0,0 +1,10 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "include": [
+ "comp"
+ ],
+ "compilerOptions": {
+ "outDir": "temp",
+ "rootDir": "comp"
+ }
+}
\ No newline at end of file
diff --git a/packages/syntax/README.md b/packages/syntax/README.md
new file mode 100644
index 0000000..e32e048
--- /dev/null
+++ b/packages/syntax/README.md
@@ -0,0 +1,8 @@
+# @marklet/syntax
+
+[![Build Status](https://travis-ci.com/obstudio/Marklet.svg?branch=dev)](https://travis-ci.com/obstudio/Marklet)
+[![dependency](https://img.shields.io/david/obstudio/Marklet.svg?path=packages%2Fsyntax)](https://github.com/obstudio/Marklet/blob/master/packages/syntax/package.json)
+[![npm](https://img.shields.io/npm/v/@marklet/syntax.svg)](https://www.npmjs.com/package/@marklet/syntax)
+[![npm bundle size (minified)](https://img.shields.io/bundlephobia/min/@marklet/syntax.svg)](https://www.npmjs.com/package/@marklet/syntax)
+
+A common language lexer for marklet.
diff --git a/packages/syntax/package.json b/packages/syntax/package.json
index 9000d29..03caee6 100644
--- a/packages/syntax/package.json
+++ b/packages/syntax/package.json
@@ -1,9 +1,9 @@
{
"name": "@marklet/syntax",
- "version": "1.0.10",
+ "version": "1.0.16",
"description": "A common language lexer for marklet.",
"author": "shigma <1700011071@pku.edu.cn>",
- "homepage": "https://github.com/obstudio/Marklet",
+ "homepage": "https://github.com/obstudio/Marklet/tree/dev/packages/syntax",
"license": "MIT",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
@@ -18,7 +18,7 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/lexer": "^1.0.10",
+ "@marklet/core": "^3.2.2",
"js-yaml": "^3.12.0"
}
}
\ No newline at end of file
diff --git a/packages/syntax/src/index.ts b/packages/syntax/src/index.ts
deleted file mode 100644
index 7ee1b73..0000000
--- a/packages/syntax/src/index.ts
+++ /dev/null
@@ -1,53 +0,0 @@
-import { Lexer, LexerContexts } from '@marklet/lexer'
-
-type SyntaxRule = SyntaxMetaRule | SyntaxIncludeRule | SyntaxRegexRule
-interface SyntaxToken { scope: string, text: string }
-interface SyntaxMetaRule { meta: string }
-interface SyntaxIncludeRule { include: string }
-interface SyntaxRegexRule {
- regex?: string
- flags?: string
- scope?: string
- capture?: { [key: number]: string }
- push?: string | SyntaxRule[]
- pop?: boolean
- eol?: boolean
- context_begins?: boolean
- top_level?: boolean
- ignore_case?: boolean
- token?: (capture: RegExpExecArray, content: SyntaxToken[]) => SyntaxToken | SyntaxToken[]
-}
-
-export class SyntaxLexer extends Lexer {
- constructor(contexts: Record, macros: Record = {}) {
- function traverse(context: SyntaxRule[]): void {
- let meta = '', firstRule = context[0]
- if ('meta' in firstRule) {
- meta = firstRule.meta
- context.splice(0, 1).push({ regex: '[\s\S]', scope: meta })
- }
- context.forEach((rule) => {
- if ('meta' in rule) {
- throw new Error("'meta' can only be the first rule.")
- } else if (!('include' in rule)) {
- if (!rule.capture) rule.capture = {}
- if (rule.scope) {
- rule.capture[0] = rule.scope
- delete rule.scope
- }
- rule.token = (cap, cont) => {
- const result: SyntaxToken[] = []
- for (let i = 0; i < cap.length; i += 1) {
- result.push({ scope: rule.capture[i] || rule.capture[0] || meta, text: cap[i] })
- }
- return result.concat(cont)
- }
- delete rule.capture
- if (rule.push instanceof Array) traverse(rule.push)
- }
- })
- }
- for (const key in contexts) traverse(contexts[key])
- super(contexts as LexerContexts, { macros })
- }
-}
diff --git a/packages/syntax/tsconfig.json b/packages/syntax/tsconfig.json
deleted file mode 100644
index 004deda..0000000
--- a/packages/syntax/tsconfig.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "extends": "../../tsconfig.base.json",
- "include": [
- "src"
- ],
- "compilerOptions": {
- "outDir": "dist",
- "rootDir": "src"
- },
- "references": [
- { "path": "../lexer" }
- ]
-}
\ No newline at end of file
diff --git a/packages/test/README.md b/packages/test/README.md
new file mode 100644
index 0000000..6d75974
--- /dev/null
+++ b/packages/test/README.md
@@ -0,0 +1,3 @@
+# @marklet/test
+
+Tests for marklet.
diff --git a/packages/test/benchmark.js b/packages/test/benchmark.js
deleted file mode 100644
index 33ddd27..0000000
--- a/packages/test/benchmark.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const { performance } = require('perf_hooks')
-
-const Marklet = require('markletjs')
-
-module.exports = ({name, content}) => {
- const start1 = performance.now()
- Marklet.parse({ input: content })
- const end1 = performance.now()
- const start2 = performance.now()
- for(let i = 0; i < 1000; ++i) {
- Marklet.parse({ input: content })
- }
- const end2 = performance.now()
- console.log(name, 'initial:', end1 - start1, 'ms')
- console.log(name, 'average:', (end2 - start2) / 1000, 'ms')
-}
diff --git a/packages/test/data/codeblock.mkl b/packages/test/data/codeblock.mkl
new file mode 100644
index 0000000..7b0913a
--- /dev/null
+++ b/packages/test/data/codeblock.mkl
@@ -0,0 +1,14 @@
+# Code Block Test
+
+```
+some code
+```
+
+```c
+int code = 2;
+```
+
+````
+```some code
+````
+
diff --git a/packages/test/data/test1.mkl b/packages/test/data/demo.mkl
similarity index 100%
rename from packages/test/data/test1.mkl
rename to packages/test/data/demo.mkl
diff --git a/packages/test/data/inlinelist.mkl b/packages/test/data/inlinelist.mkl
index cdf1661..56366da 100644
--- a/packages/test/data/inlinelist.mkl
+++ b/packages/test/data/inlinelist.mkl
@@ -1,3 +1 @@
-+ 123 + 123 +
-+ 123 + 123 + 123
-+ 123 +
++ 123 + 123 + 123 + 123 + 123 + 123
diff --git a/packages/test/data/list.mkl b/packages/test/data/list.mkl
new file mode 100644
index 0000000..58e00d0
--- /dev/null
+++ b/packages/test/data/list.mkl
@@ -0,0 +1,13 @@
+# List Test
+
+- item 1
+- item 2
+ - item 2.1
+ - item 2.1.1
+- item 3
+ 1. item 3.1
+ 2. item 3.2
+
+- item from another list
+
+- item from another list
diff --git a/packages/test/data/section.mkl b/packages/test/data/section.mkl
new file mode 100644
index 0000000..0f10ae4
--- /dev/null
+++ b/packages/test/data/section.mkl
@@ -0,0 +1,30 @@
+# Section Test
+
+^ Section 1
+
+Some text
+
+^^ Section 1.1
+
+Section 1.1 end
+
+^^ Section 1.2
+
+Section 1.2 end
+
+---
+
+> Blockquote
+^^ Not a section
+
+^^ Section 1.3
+
+Section 1.3 end
+
+^ Section 2
+
+Section 2 end
+
+===
+
+Some text
diff --git a/packages/test/data/table.mkl b/packages/test/data/table.mkl
new file mode 100644
index 0000000..586cbed
--- /dev/null
+++ b/packages/test/data/table.mkl
@@ -0,0 +1,8 @@
+# 表格测试
+
+下面是一张表格:
+
+= *< >
+*居中* 左对齐 _右对齐_
+看上去 -很- 正确
+OH YEAH ♪ 看不到我吧
\ No newline at end of file
diff --git a/packages/test/data/text.mkl b/packages/test/data/text.mkl
new file mode 100644
index 0000000..fcbe44d
--- /dev/null
+++ b/packages/test/data/text.mkl
@@ -0,0 +1,20 @@
+# Inline Test
+
+**bo*ld**
+
+`co`de`
+
+*ita*lic*
+
+_under_line_
+
+-strike-out-
+
+new
+line
+
+*esc\*ape*
+
+[marklet-link|#inline-test]
+
+[markdown-link](#inline-test)
diff --git a/packages/test/index.js b/packages/test/index.js
new file mode 100644
index 0000000..6309ba3
--- /dev/null
+++ b/packages/test/index.js
@@ -0,0 +1,65 @@
+const { DocumentLexer } = require('@marklet/parser')
+const chalk = require('chalk')
+const path = require('path')
+const fs = require('fs')
+
+const lexer = new DocumentLexer()
+
+const testFiles = fs.readdirSync(path.join(__dirname, 'data')).map((file) => ({
+ name: file.slice(0, -4),
+ content: fs.readFileSync(path.join(__dirname, 'data', file)).toString()
+}))
+
+const parsedFiles = testFiles.map((file) => ({
+ name: file.name,
+ content: lexer.parse(file.content)
+}))
+
+class TaskManager {
+ constructor() {
+ this.tasks = []
+ }
+
+ add(task) {
+ this.tasks.push(task)
+ return this
+ }
+
+ runTask({ title, test, raw }) {
+ console.log(chalk.blueBright(`Test Started: ${title}`))
+ const testData = raw ? testFiles : parsedFiles
+ let failed = false
+ for (const testUnit of testData) {
+ let result
+ try {
+ result = test(testUnit)
+ } catch (error) {
+ console.log(error)
+ result = true
+ }
+ failed |= result
+ if (result) {
+ console.log(chalk.redBright(`Test '${title}' on ${testUnit.name} failed.`))
+ } else if (result !== false) {
+ console.log(`Test '${title}' on ${testUnit.name} succeeded.`)
+ }
+ }
+ if (!failed) {
+ console.log(chalk.greenBright(`Test '${title}' succeeded.`))
+ }
+ console.log()
+ return failed
+ }
+
+ run() {
+ if (this.tasks.some(task => this.runTask(task))) {
+ process.exit(1)
+ }
+ }
+}
+
+new TaskManager()
+ .add(require('./tasks/schema'))
+ .add(require('./tasks/detok'))
+ .add(require('./tasks/benchmark'))
+ .run()
diff --git a/packages/test/package.json b/packages/test/package.json
index 8a6ecc3..faccb99 100644
--- a/packages/test/package.json
+++ b/packages/test/package.json
@@ -1,8 +1,12 @@
{
"name": "@marklet/test",
- "version": "1.0.10",
+ "version": "2.1.2",
"private": true,
+ "main": "index.js",
"author": "jjyyxx <1449843302@qq.com>",
+ "contributors": [
+ "shigma <1700011071@pku.edu.cn>"
+ ],
"homepage": "https://github.com/obstudio/Marklet",
"license": "MIT",
"repository": {
@@ -16,7 +20,8 @@
"url": "https://github.com/obstudio/Marklet/issues"
},
"dependencies": {
- "@marklet/detok": "^1.0.4",
- "markletjs": "^1.1.13"
+ "@marklet/detok": "^1.1.3",
+ "@marklet/parser": "^1.5.1",
+ "chalk": "^2.4.1"
}
}
\ No newline at end of file
diff --git a/packages/test/runner.js b/packages/test/runner.js
deleted file mode 100644
index fa83d43..0000000
--- a/packages/test/runner.js
+++ /dev/null
@@ -1,53 +0,0 @@
-const path = require('path')
-const fs = require('fs')
-const equal = require('fast-deep-equal')
-const detok = require('@marklet/detok')
-const Marklet = require('markletjs')
-const benchmark = require('./benchmark')
-const schema = require('./schema')
-
-const testFiles = fs.readdirSync(path.join(__dirname, 'data')).map((file) => ({
- name: file,
- content: fs.readFileSync(path.join(__dirname, 'data', file), 'utf8')
-}))
-const parsedFiles = testFiles.map((file) => ({name: file.name, content: Marklet.parse({ input: file.content })}))
-
-const testProcess = {
- tests: [],
- add(title, test, raw = false) {
- this.tests.push({ title, test, raw })
- return this
- },
- run() {
- for (const { title, test, raw } of this.tests) {
- console.log('Test:', title)
- const testData = raw ? testFiles : parsedFiles
- let res = true
- for (const testUnit of testData) {
- if (!test(testUnit)) {
- res = false
- console.log(`Test on ${testUnit.name} failed`)
- break
- }
- }
- if (res) {
- console.log(`Test ${title} succeeded`)
- } else {
- console.log(`Test ${title} failed\n`)
- }
- }
- }
-}
-
-testProcess
- .add('Shape correctness', (res) => {
- return schema(res.content)
- })
- .add('Detok', (res) => {
- return equal(Marklet.parse({ input: detok.detokenize(res.content) }), res.content)
- })
- .add('Benchmark', (res) => {
- benchmark(res)
- return true
- }, true)
- .run()
diff --git a/packages/test/schema.js b/packages/test/schema.js
deleted file mode 100644
index 1f17463..0000000
--- a/packages/test/schema.js
+++ /dev/null
@@ -1,16 +0,0 @@
-const cheerio = require('cheerio')
-const Ajv = require('ajv')
-
-const validator = new Ajv({
-})
-
-const textTags = ['br', 'code', 'em', 'strong', 'del', 'span', 'a']
-validator.addFormat('text', (str) =>
- cheerio.load(str)('body *').toArray().every((element) =>
- element.type === 'text' || element.type === 'tag' && textTags.includes(element.tagName)))
-
-const validate = validator.compile(require('./schema.json'))
-
-module.exports = (res) => {
- return validate(res)
-}
\ No newline at end of file
diff --git a/packages/test/schema.json b/packages/test/schema.json
deleted file mode 100644
index ccc1763..0000000
--- a/packages/test/schema.json
+++ /dev/null
@@ -1,299 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-07/schema#",
- "definitions": {
- "text": {
- "type": "string",
- "format": "text"
- },
- "Element": {
- "anyOf": [
- {
- "$ref": "#/definitions/CodeBlock"
- },
- {
- "$ref": "#/definitions/Heading"
- },
- {
- "$ref": "#/definitions/InlineList"
- },
- {
- "$ref": "#/definitions/List"
- },
- {
- "$ref": "#/definitions/Paragraph"
- },
- {
- "$ref": "#/definitions/Quote"
- },
- {
- "$ref": "#/definitions/Section"
- },
- {
- "$ref": "#/definitions/Separator"
- },
- {
- "$ref": "#/definitions/Table"
- },
- {
- "$ref": "#/definitions/Usages"
- }
- ]
- },
- "Elements": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/Element"
- }
- },
- "CodeBlock": {
- "type": "object",
- "properties": {
- "type": {
- "const": "codeblock"
- },
- "lang": {
- "type": "string"
- },
- "text": {
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "text",
- "lang"
- ]
- },
- "Heading": {
- "type": "object",
- "properties": {
- "type": {
- "const": "heading"
- },
- "level": {
- "type": "integer",
- "maximum": 4,
- "minimum": 1
- },
- "text": {
- "$ref": "#/definitions/text"
- },
- "center": {
- "type": "boolean"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "level",
- "text",
- "center"
- ]
- },
- "InlineList": {
- "type": "object",
- "properties": {
- "type": {
- "const": "inlinelist"
- },
- "content": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/text"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "content"
- ]
- },
- "List": {
- "type": "object",
- "properties": {
- "type": {
- "const": "list"
- },
- "content": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "type": {
- "const": "item"
- },
- "ordered": {
- "type": "boolean"
- },
- "indent": {
- "type": "integer",
- "minimum": 0
- },
- "text": {
- "$ref": "#/definitions/text"
- }
- },
- "required": [
- "type",
- "ordered",
- "indent",
- "content"
- ],
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "content"
- ]
- },
- "Paragraph": {
- "type": "object",
- "properties": {
- "type": {
- "const": "paragraph"
- },
- "text": {
- "$ref": "#/definitions/text"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "text"
- ]
- },
- "Quote": {
- "type": "object",
- "properties": {
- "type": {
- "const": "quote"
- },
- "style": {
- "type": "string"
- },
- "content": {
- "$ref": "#/definitions/Elements"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "style",
- "content"
- ]
- },
- "Section": {
- "type": "object",
- "properties": {
- "type": {
- "const": "section"
- },
- "level": {
- "type": "integer",
- "maximum": 4,
- "minimum": 1
- },
- "text": {
- "$ref": "#/definitions/text"
- },
- "content": {
- "$ref": "#/definitions/Elements"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "level",
- "text"
- ]
- },
- "Separator": {
- "type": "object",
- "properties": {
- "type": {
- "const": "separator"
- },
- "style": {
- "enum": [
- "normal",
- "dashed",
- "dotted"
- ]
- },
- "thick": {
- "type": "boolean"
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "style",
- "thick"
- ]
- },
- "Table": {
- "type": "object",
- "properties": {
- "type": {
- "const": "table"
- },
- "content": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/Elements"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "content"
- ]
- },
- "Usages": {
- "type": "object",
- "properties": {
- "type": {
- "const": "usages"
- },
- "content": {
- "type": "array",
- "items": {
- "type": "object",
- "properties": {
- "type": {
- "const": "usage"
- },
- "text": {
- "$ref": "#/definitions/text"
- },
- "content": {
- "$ref": "#/definitions/Elements"
- }
- },
- "required": [
- "type",
- "text",
- "content"
- ],
- "additionalProperties": false
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "type",
- "content"
- ]
- }
- },
- "type": "array",
- "items": {
- "$ref": "#/definitions/Element"
- }
-}
\ No newline at end of file
diff --git a/packages/test/tasks/benchmark.js b/packages/test/tasks/benchmark.js
new file mode 100644
index 0000000..6f88e07
--- /dev/null
+++ b/packages/test/tasks/benchmark.js
@@ -0,0 +1,29 @@
+const { performance } = require('perf_hooks')
+const { DocumentLexer } = require('@marklet/parser')
+
+function timing(callback) {
+ const start = performance.now()
+ callback()
+ return performance.now() - start
+}
+
+module.exports = {
+ title: 'Benchmark',
+ test({ name, content }) {
+ name = name.padEnd(12)
+ let lexer
+ const initial = timing(() => {
+ lexer = new DocumentLexer()
+ lexer.parse(content)
+ })
+ const average = timing(() => {
+ for (let i = 0; i < 1000; ++i) {
+ lexer.parse(content)
+ }
+ })
+ console.log(`${name}initial: ${initial.toFixed(6)} ms.`)
+ console.log(`${name}average: ${(average / 1000).toFixed(6)} ms.`)
+ return false
+ },
+ raw: true
+}
diff --git a/packages/test/tasks/detok.js b/packages/test/tasks/detok.js
new file mode 100644
index 0000000..2ecd7dc
--- /dev/null
+++ b/packages/test/tasks/detok.js
@@ -0,0 +1,27 @@
+const { DocumentLexer } = require('@marklet/parser')
+const equal = require('fast-deep-equal')
+const detok = require('@marklet/detok')
+const util = require('util')
+
+const lexer = new DocumentLexer()
+
+const inspectOptions = {
+ showHidden: false,
+ depth: null,
+ colors: true,
+ breakLength: 50,
+}
+
+module.exports = {
+ title: 'Detokenize',
+ test(result) {
+ const content = result.content
+ const parseResult = lexer.parse(detok(result.content))
+ result = !equal(parseResult, content)
+ if (result) {
+ console.log(util.inspect(content, inspectOptions))
+ console.log(util.inspect(parseResult, inspectOptions))
+ return true
+ }
+ }
+}
diff --git a/packages/test/tasks/schema.js b/packages/test/tasks/schema.js
new file mode 100644
index 0000000..51aa2c9
--- /dev/null
+++ b/packages/test/tasks/schema.js
@@ -0,0 +1,28 @@
+const cheerio = require('cheerio')
+const yaml = require('js-yaml')
+const path = require('path')
+const Ajv = require('ajv')
+const fs = require('fs')
+
+const textTags = ['br', 'code', 'em', 'strong', 'del', 'span', 'a']
+
+const validator = new Ajv()
+ .addFormat('text', (source) => {
+ return cheerio.load(source)('body *').toArray().every((element) => {
+ return element.type === 'text' || element.type === 'tag' && textTags.includes(element.tagName)
+ })
+ })
+
+const schema = yaml.safeLoad(fs.readFileSync(path.resolve(__dirname, 'schema.yaml')))
+const validate = validator.compile(schema)
+
+module.exports = {
+ title: 'Shape Correctness',
+ test(data) {
+ const result = !validate(data.content)
+ if (result) {
+ console.log(JSON.stringify(data, null, 2))
+ return true
+ }
+ }
+}
diff --git a/packages/test/tasks/schema.yaml b/packages/test/tasks/schema.yaml
new file mode 100644
index 0000000..63cf2c3
--- /dev/null
+++ b/packages/test/tasks/schema.yaml
@@ -0,0 +1,238 @@
+$schema: http://json-schema.org/draft-07/schema#
+
+$ref: '#/definitions/nodes'
+
+definitions:
+ text:
+ type: string
+ format: text
+
+ nodes:
+ type: array
+ items:
+ anyOf:
+ - $ref: '#/definitions/codeblock'
+ - $ref: '#/definitions/heading'
+ - $ref: '#/definitions/inlinelist'
+ - $ref: '#/definitions/list'
+ - $ref: '#/definitions/paragraph'
+ - $ref: '#/definitions/quote'
+ - $ref: '#/definitions/section'
+ - $ref: '#/definitions/separator'
+ - $ref: '#/definitions/table'
+ - $ref: '#/definitions/usages'
+
+ codeblock:
+ type: object
+ properties:
+ type:
+ const: codeblock
+ lang:
+ type: string
+ text:
+ type: string
+ additionalProperties: false
+ required:
+ - type
+ - text
+ - lang
+
+ heading:
+ type: object
+ properties:
+ type:
+ const: heading
+ center:
+ type: boolean
+ level:
+ type: integer
+ maximum: 4
+ minimum: 1
+ text:
+ $ref: '#/definitions/text'
+ additionalProperties: false
+ required:
+ - type
+ - level
+ - text
+ - center
+
+ inlinelist:
+ type: object
+ properties:
+ type:
+ const: inlinelist
+ content:
+ type: array
+ items:
+ $ref: '#/definitions/text'
+ additionalProperties: false
+ required:
+ - type
+ - content
+
+ list:
+ type: object
+ properties:
+ type:
+ const: list
+ children:
+ type: array
+ items:
+ $ref: '#/definitions/list-item'
+ additionalProperties: false
+ required:
+ - type
+ - children
+
+ list-item:
+ type: object
+ properties:
+ type:
+ const: list-item
+ text:
+ $ref: '#/definitions/text'
+ order:
+ type: string
+ pattern: '[a-zA-Z\d]*'
+ children:
+ type: array
+ items:
+ $ref: '#/definitions/list-item'
+ additionalProperties: false
+ required:
+ - type
+ - text
+ - order
+
+ paragraph:
+ type: object
+ properties:
+ type:
+ const: paragraph
+ text:
+ $ref: '#/definitions/text'
+ additionalProperties: false
+ required:
+ - type
+ - text
+
+ quote:
+ type: object
+ properties:
+ type:
+ const: quote
+ style:
+ type: string
+ content:
+ $ref: '#/definitions/nodes'
+ additionalProperties: false
+ required:
+ - type
+ - style
+ - content
+
+ section:
+ type: object
+ properties:
+ type:
+ const: section
+ initial:
+ enum:
+ - open
+ - closed
+ level:
+ type: integer
+ maximum: 4
+ minimum: 1
+ text:
+ $ref: '#/definitions/text'
+ content:
+ $ref: '#/definitions/nodes'
+ additionalProperties: false
+ required:
+ - type
+ - level
+ - text
+ - initial
+ - content
+
+ separator:
+ type: object
+ properties:
+ type:
+ const: separator
+ style:
+ enum:
+ - normal
+ - dashed
+ - dotted
+ thick:
+ type: boolean
+ additionalProperties: false
+ required:
+ - type
+ - style
+ - thick
+
+ table:
+ type: object
+ properties:
+ type:
+ const: table
+ columns:
+ type: array
+ items:
+ type: object
+ properties:
+ bold:
+ type: boolean
+ align:
+ enum:
+ - left
+ - center
+ - right
+ additionalProperties: false
+ required:
+ - bold
+ - align
+ data:
+ type: array
+ items:
+ type: array
+ items:
+ $ref: '#/definitions/text'
+ additionalProperties: false
+ required:
+ - type
+ - data
+
+ usage:
+ type: object
+ properties:
+ type:
+ const: usage
+ text:
+ $ref: '#/definitions/text'
+ content:
+ $ref: '#/definitions/nodes'
+ additionalProperties: false
+ required:
+ - type
+ - text
+ - content
+
+ usages:
+ type: object
+ properties:
+ type:
+ const: usages
+ content:
+ type: array
+ items:
+ $ref: '#/definitions/usage'
+ additionalProperties: false
+ required:
+ - type
+ - content
+
\ No newline at end of file
diff --git a/tsconfig.json b/tsconfig.json
index 6dc9dbf..0314820 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -3,10 +3,8 @@
{ "path": "./packages/core" },
{ "path": "./packages/dev-server" },
{ "path": "./packages/detok" },
- { "path": "./packages/inline" },
- { "path": "./packages/lexer" },
{ "path": "./packages/parser" },
- { "path": "./packages/syntax" }
+ { "path": "./packages/renderer" },
],
"files": []
}
\ No newline at end of file