From 01c197151a3ef0c22d5a71e668fc72b31f9f4acc Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 5 Oct 2021 09:43:47 +0200 Subject: [PATCH 001/170] :seedling: Update rocksdb --- elements/lisk-db/package.json | 4 ++-- elements/lisk-db/src/kv_store.ts | 11 ++++++--- yarn.lock | 40 +++++++++++++------------------- 3 files changed, 26 insertions(+), 29 deletions(-) diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json index 3f60154b120..f9453524dde 100644 --- a/elements/lisk-db/package.json +++ b/elements/lisk-db/package.json @@ -38,7 +38,7 @@ "dependencies": { "debug": "4.3.1", "levelup": "4.4.0", - "rocksdb": "4.1.0" + "rocksdb": "5.1.1" }, "devDependencies": { "@types/debug": "4.1.5", @@ -47,7 +47,7 @@ "@types/jest-when": "2.7.2", "@types/levelup": "4.3.0", "@types/node": "12.20.6", - "@types/rocksdb": "3.0.0", + "@types/rocksdb": "3.0.1", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-db/src/kv_store.ts b/elements/lisk-db/src/kv_store.ts index fbe0e4dda65..c5749ec44d4 100644 --- a/elements/lisk-db/src/kv_store.ts +++ b/elements/lisk-db/src/kv_store.ts @@ -15,9 +15,13 @@ import * as fs from 'fs'; import * as path from 'path'; import { debug } from 'debug'; import levelup, { LevelUp } from 'levelup'; -import rocksDB from 'rocksdb'; import { NotFoundError } from './errors'; +// rocksdb removed the default export. However, @types/rocksdb still only exposes default. +// Therefore, temporally requiree with below syntax. +// eslint-disable-next-line import/order +import rocksDB = require('rocksdb'); + const logger = debug('db'); export interface Options { @@ -44,7 +48,7 @@ export interface ReadStreamOptions extends Options { } export class KVStore { - private readonly _db: LevelUp; + private readonly _db: LevelUp; public constructor(filePath: string) { logger('opening file', { filePath }); @@ -52,7 +56,8 @@ export class KVStore { if (!fs.existsSync(parentDir)) { throw new Error(`${parentDir} does not exist`); } - this._db = levelup(rocksDB(filePath)); + // eslint-disable-next-line @typescript-eslint/no-unsafe-call,@typescript-eslint/no-explicit-any + this._db = levelup((rocksDB as any)(filePath)); } public async close(): Promise { diff --git a/yarn.lock b/yarn.lock index 0839a091916..639d9ce9b6d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3891,10 +3891,10 @@ dependencies: "@types/node" "*" -"@types/rocksdb@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/rocksdb/-/rocksdb-3.0.0.tgz#6d2cacb6809a53c7eeb09c078699e3009b2d8fe2" - integrity sha512-idP99dvZ0P8Q1cGXkUQhcvJrKdFJIfYQ46plfgoe8RB3/OkE88n8RmgL/LE8+gbe6mTRyvb6MOaeRZu59qL4cQ== +"@types/rocksdb@3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/rocksdb/-/rocksdb-3.0.1.tgz#bb1e43ca3bfb5d7969211525979160ada09c00bc" + integrity sha512-fQhnc9CeRAi9dnDXlaaItYtm3FhqE8KZYhvj3zJve2pT57pdbySah3uELxrFt15jVcSoKsLHBuwUhU5TqQgnVw== dependencies: "@types/abstract-leveldown" "*" "@types/node" "*" @@ -4517,14 +4517,6 @@ abstract-leveldown@^7.0.0: level-supports "^2.0.0" queue-microtask "^1.2.3" -abstract-leveldown@~6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.1.1.tgz#f44bad5862d71c7b418110d7698ac25bedf24396" - integrity sha512-7fK/KySVqzKIomdhkSWzX4YBQhzkzEMbWSiaB6mSN9e+ZdV3KEeKxia/8xQzCkATA5xnnukdP88cFR0D2FsFXw== - dependencies: - level-concat-iterator "~2.0.0" - xtend "~4.0.0" - abstract-leveldown@~6.2.1: version "6.2.3" resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz#036543d87e3710f2528e47040bc3261b77a9a8eb" @@ -12890,7 +12882,7 @@ nanomatch@^1.2.9: snapdragon "^0.8.1" to-regex "^3.0.1" -napi-macros@~2.0.0: +napi-macros@^2.0.0, napi-macros@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== @@ -12965,10 +12957,10 @@ node-gyp-build@^4.2.0, node-gyp-build@~4.2.1: resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" integrity sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg== -node-gyp-build@~4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" - integrity sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ== +node-gyp-build@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.3.0.tgz#9f256b03e5826150be39c764bf51e993946d71a3" + integrity sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q== node-gyp@^5.0.2: version "5.1.1" @@ -15959,14 +15951,14 @@ ripemd160@^2.0.0, ripemd160@^2.0.1: hash-base "^3.0.0" inherits "^2.0.1" -rocksdb@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/rocksdb/-/rocksdb-4.1.0.tgz#79c2cb716ffb9b317bdb48f80d1985410179ecac" - integrity sha512-NtjezNMuxk9NsC1kGybZ8Y9eWKqAohug88xbtxCwb5n7MhDDgtKJuL7VYUk8s8Q9U4Mzolt026k3hD8eHtoUvQ== +rocksdb@5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/rocksdb/-/rocksdb-5.1.1.tgz#ab5a996d4a4f24f84f609a53e1e2c332ee193f9d" + integrity sha512-eHQKJCa1gsvzK31nYJIwZvscIx/MSPbC4ipted2zdSv99OoJSzYCyaEbhujQFo7m+WVR0XC3xTT/parjBn2Uzw== dependencies: - abstract-leveldown "~6.1.1" - napi-macros "~2.0.0" - node-gyp-build "~4.1.0" + abstract-leveldown "^7.0.0" + napi-macros "^2.0.0" + node-gyp-build "^4.3.0" rollup-plugin-babel@^4.3.3: version "4.4.0" From c2f4277b7969892db631f87432369d228cf84f76 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 17 Mar 2022 16:41:08 +0100 Subject: [PATCH 002/170] :arrow_up: Bump nodejs version --- commander/package.json | 6 +- .../templates/init/package-template.json | 6 +- .../templates/init_plugin/package.json | 6 +- elements/lisk-api-client/package.json | 6 +- elements/lisk-bft/package.json | 6 +- elements/lisk-chain/package.json | 6 +- elements/lisk-client/package.json | 6 +- elements/lisk-codec/package.json | 6 +- elements/lisk-cryptography/package.json | 6 +- elements/lisk-db/package.json | 6 +- elements/lisk-elements/package.json | 6 +- elements/lisk-genesis/package.json | 6 +- elements/lisk-p2p/package.json | 6 +- elements/lisk-passphrase/package.json | 6 +- elements/lisk-transaction-pool/package.json | 6 +- elements/lisk-transactions/package.json | 6 +- elements/lisk-tree/package.json | 6 +- elements/lisk-utils/package.json | 6 +- elements/lisk-validator/package.json | 6 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 6 +- .../lisk-framework-forger-plugin/package.json | 6 +- .../package.json | 6 +- .../package.json | 6 +- .../package.json | 6 +- framework/package.json | 6 +- package.json | 6 +- protocol-specs/package.json | 14 +- sdk/package.json | 6 +- templates/package.json.tmpl | 4 +- yarn.lock | 185 ++---------------- 31 files changed, 109 insertions(+), 262 deletions(-) diff --git a/commander/package.json b/commander/package.json index a052ae00e69..b36980c5155 100644 --- a/commander/package.json +++ b/commander/package.json @@ -20,8 +20,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist/index.js", "bin": { @@ -147,7 +147,7 @@ "@typescript-eslint/parser": "4.19.0", "copyfiles": "2.4.1", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 978b6886ee0..527268d1827 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -14,8 +14,8 @@ "homepage": "", "repository": {}, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist/index.js", "scripts": { @@ -122,7 +122,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "globby": "10.0.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 0cc2ac411f1..44b1a6c2428 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -11,8 +11,8 @@ "url": "" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -38,7 +38,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index f92aa2b5b48..74132a4af19 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -52,7 +52,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index b46512d6622..0d51fc1421d 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -53,7 +53,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "faker": "4.1.0", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 04e2b990ba8..457d574e4cf 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -55,7 +55,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "faker": "4.1.0", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 74dd9cc43e4..6952b3d037f 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "exports": { @@ -75,7 +75,7 @@ "cypress": "5.6.0", "cypress-jest-adapter": "0.1.1", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 9ac9e06ddc7..cc60ea9b58a 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -47,7 +47,7 @@ "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index aef079773f7..5a828876a41 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -53,7 +53,7 @@ "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json index f9453524dde..71645b06db6 100644 --- a/elements/lisk-db/package.json +++ b/elements/lisk-db/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -52,7 +52,7 @@ "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 4b1b217daaa..0bd2ce4611a 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -58,7 +58,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 4e5549a423c..e4594265c99 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -50,7 +50,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index b8b9ed5d279..a8763b7ca82 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -64,7 +64,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 41eb99e07c1..e5e8f026f0c 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -46,7 +46,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 499af3c7dcb..d93f6306747 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "typings": "dist-node/index.d.ts", @@ -49,7 +49,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index e55c4cd4ddd..dd8e515dcae 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -47,7 +47,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index e293afbd319..5413943dc2f 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -47,7 +47,7 @@ "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 949e15a4238..115b1215f0a 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -46,7 +46,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index fd09183ec05..b78ae0869b0 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -53,7 +53,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index fa1a559a9a7..693cbee9788 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -80,7 +80,7 @@ "dotenv": "8.2.0", "dotenv-expand": "5.1.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-config-react-app": "^6.0.0", "eslint-plugin-flowtype": "^5.2.0", "eslint-plugin-import": "2.22.1", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 7cc4946ef51..ae9625600c9 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -82,7 +82,7 @@ "dotenv": "8.2.0", "dotenv-expand": "5.1.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-config-react-app": "^6.0.0", "eslint-plugin-flowtype": "^5.2.0", "eslint-plugin-import": "2.22.1", diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index a6be69ffdf8..b2f5c65abe5 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -69,7 +69,7 @@ "@typescript-eslint/parser": "4.19.0", "axios": "0.21.1", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "fs-extra": "9.0.0", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 5f0c2e598be..a08d5b50c2b 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -60,7 +60,7 @@ "@typescript-eslint/parser": "4.19.0", "axios": "0.21.1", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "fs-extra": "9.1.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index fa2a890fec9..13480fb7b32 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -59,7 +59,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 9e17ff138e9..c8d8e03d2b2 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -62,7 +62,7 @@ "@typescript-eslint/parser": "4.19.0", "axios": "0.21.1", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/framework/package.json b/framework/package.json index 67fdb4e818b..8231c635700 100644 --- a/framework/package.json +++ b/framework/package.json @@ -20,8 +20,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -73,7 +73,7 @@ "@typescript-eslint/parser": "4.19.0", "copyfiles": "2.2.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/package.json b/package.json index 55361153b13..5d5a8150a87 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "workspaces": { "packages": [ @@ -62,7 +62,7 @@ "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "husky": "6.0.0", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index cc00e8c922c..79fc83a71df 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -14,20 +14,20 @@ "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.1.0", - "@liskhq/lisk-cryptography": "2.4.2", - "@liskhq/lisk-passphrase": "3.0.0", - "@liskhq/lisk-validator": "0.5.1", + "@liskhq/lisk-codec": "0.2.1", + "@liskhq/lisk-cryptography": "3.2.0", + "@liskhq/lisk-passphrase": "3.1.0", + "@liskhq/lisk-validator": "0.6.1", "protobufjs": "6.9.0" }, "devDependencies": { "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "prettier": "2.2.1" diff --git a/sdk/package.json b/sdk/package.json index 239265019d3..8af89410639 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { @@ -51,7 +51,7 @@ }, "devDependencies": { "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.0", + "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", "eslint-plugin-jest": "24.3.2", "jest": "26.6.3", diff --git a/templates/package.json.tmpl b/templates/package.json.tmpl index c23ec9245c8..c5ee8fe837e 100644 --- a/templates/package.json.tmpl +++ b/templates/package.json.tmpl @@ -17,8 +17,8 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=12.13.0 <=12", - "npm": ">=6.12.0" + "node": ">=16.14.1 <=16", + "npm": ">=8.1.0" }, "main": "dist-node/index.js", "scripts": { diff --git a/yarn.lock b/yarn.lock index 639d9ce9b6d..723cff4c0a1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2515,73 +2515,6 @@ dependencies: "@types/node" "11.11.2" -"@liskhq/lisk-codec@0.1.0": - version "0.1.0" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-codec/-/lisk-codec-0.1.0.tgz#deea355d1dcbf83432f213a57c4df087b45f091d" - integrity sha512-VM5aF23JoLDuRI6pJ76QN3UymcxT6isgfRTtdD0DkUzoYkB1VrM2TvrmJenF7FRApJc0bCh/cGBtVKDTLyto5g== - dependencies: - "@liskhq/lisk-utils" "^0.1.0" - "@liskhq/lisk-validator" "^0.5.0" - -"@liskhq/lisk-cryptography@2.4.2": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-cryptography/-/lisk-cryptography-2.4.2.tgz#77eecaa2d36bfa00bb43ef92bdc4683ecaadff8b" - integrity sha512-y3T/Jg/OvSIhieqY7NKpmnkajpbz+JSxWUx6DyebW/jAXZAUVDoh5n64NPNe78op5TXsMIwqPFao6GE+D3MKsA== - dependencies: - "@liskhq/bignum" "1.3.1" - "@types/ed2curve" "0.2.2" - "@types/node" "12.12.11" - buffer-reverse "1.0.1" - ed2curve "0.2.1" - tweetnacl "1.0.1" - varuint-bitcoin "1.1.0" - optionalDependencies: - sodium-native "2.4.6" - -"@liskhq/lisk-passphrase@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-passphrase/-/lisk-passphrase-3.0.0.tgz#14e5034446d28008a966b90e906bd5ca8ebee28e" - integrity sha512-j/+vSCw6UrYb0PTOLJGbdHCHW/oGNWkHqVKzvUMWiQjLUwIF738GwM1l66eppsx2v7RDm+P7hKTTkGuVtqcSGQ== - dependencies: - "@types/bip39" "2.4.1" - "@types/node" "12.12.11" - bip39 "2.5.0" - -"@liskhq/lisk-utils@^0.1.0": - version "0.1.0" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-utils/-/lisk-utils-0.1.0.tgz#b8f31e0bde5a8d0bf2bee9f6f7e6b9bfe2cd45b0" - integrity sha512-PR36Rxk6Nhg8Z6vvEIOpbeTuISaw23It6WhVyxEibH2RN2UPpUwDWR60BcIqZtR1FCK5vEcDMTvBXu1FgawbdA== - dependencies: - lodash.clonedeep "4.5.0" - -"@liskhq/lisk-validator@0.5.1": - version "0.5.1" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-validator/-/lisk-validator-0.5.1.tgz#3cb2a83684dfd3c0d729400579e7f015f8e8527d" - integrity sha512-mIFRetkasdU7QoaoOmRWxHZaUXvKXWjAgA3wwn8wnSANArSVtWyM23xvfDVBLzPFRnMH9LfvSt66U8uYkGdIPg== - dependencies: - "@liskhq/lisk-cryptography" "^3.0.1" - "@types/node" "12.12.11" - "@types/semver" "7.1.0" - "@types/validator" "12.0.1" - ajv "6.12.0" - debug "4.1.1" - semver "7.1.3" - validator "12.2.0" - -"@liskhq/lisk-validator@^0.5.0": - version "0.5.2" - resolved "https://registry.yarnpkg.com/@liskhq/lisk-validator/-/lisk-validator-0.5.2.tgz#64240b6cc51fe1a2e7a1fc29b7f1cad0c43f8c16" - integrity sha512-NiIq7vxHBNWcuOyhzImwtGxqstOBKdcFhckkW+ZMlRFZbyEC1eCFbeVd0S7TivBWS3JfHWUeTEbOczavGo669A== - dependencies: - "@liskhq/lisk-cryptography" "^3.0.2" - "@types/node" "12.12.11" - "@types/semver" "7.1.0" - "@types/validator" "12.0.1" - ajv "6.12.0" - debug "4.1.1" - semver "7.1.3" - validator "12.2.0" - "@nodelib/fs.scandir@2.1.3": version "2.1.3" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" @@ -3344,13 +3277,6 @@ dependencies: "@babel/types" "^7.3.0" -"@types/bip39@2.4.1": - version "2.4.1" - resolved "https://registry.yarnpkg.com/@types/bip39/-/bip39-2.4.1.tgz#1a47b453b59a50d7b5856819b834c74798915eb3" - integrity sha512-QHx0qI6JaTIW/S3zxE/bXrwOWu6Boos+LZ4438xmFAHY5k+qHkExMdAnb/DENEt2RBnOdZ6c5J+SHrnLEhUohQ== - dependencies: - "@types/node" "*" - "@types/bip39@3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/bip39/-/bip39-3.0.0.tgz#4b5b9e89196e0c6c3793f1950724b197018daf70" @@ -3766,11 +3692,6 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-11.11.6.tgz#df929d1bb2eee5afdda598a41930fe50b43eaa6a" integrity sha512-Exw4yUWMBXM3X+8oqzJNRqZSwUAaS4+7NdvHqQuFi/d+synz++xmX3QIf+BFqneW8N31R8Ky+sikfZUXq07ggQ== -"@types/node@12.12.11": - version "12.12.11" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.11.tgz#bec2961975888d964196bf0016a2f984d793d3ce" - integrity sha512-O+x6uIpa6oMNTkPuHDa9MhMMehlxLAd5QcOvKRjAFsBVpeFWTOPnXbDvILvFgFFZfQ1xh1EZi1FbXxUix+zpsQ== - "@types/node@12.20.6": version "12.20.6" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.6.tgz#7b73cce37352936e628c5ba40326193443cfba25" @@ -4057,11 +3978,6 @@ dependencies: source-map "^0.6.1" -"@types/validator@12.0.1": - version "12.0.1" - resolved "https://registry.yarnpkg.com/@types/validator/-/validator-12.0.1.tgz#73dbc7f5f730ff7131754bca682824eb3c260b79" - integrity sha512-l57fIANZLMe8DArz+SDb+7ATXnDm15P7u2wHBw5mb0aSMd+UuvmvhouBF2hdLgQPDMJ39sh9g2MJO4GkZ0VAdQ== - "@types/validator@13.1.3": version "13.1.3" resolved "https://registry.yarnpkg.com/@types/validator/-/validator-13.1.3.tgz#366b394aa3fbeed2392bf0a20ded606fa4a3d35e" @@ -4657,16 +4573,6 @@ ajv-keywords@^3.1.0, ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@6.12.0: - version "6.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.0.tgz#06d60b96d87b8454a5adaba86e7854da629db4b7" - integrity sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - ajv@8.1.0, ajv@^8.0.0: version "8.1.0" resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.1.0.tgz#45d5d3d36c7cdd808930cc3e603cf6200dbeb736" @@ -5416,17 +5322,6 @@ bip39@*: pbkdf2 "^3.0.9" randombytes "^2.0.1" -bip39@2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/bip39/-/bip39-2.5.0.tgz#51cbd5179460504a63ea3c000db3f787ca051235" - integrity sha512-xwIx/8JKoT2+IPJpFEfXoWdYwP7UVAoUxxLNfGCfVowaJE7yg1Y5B1BVPqlUNsBq5/nGwmFkwRJ8xDW4sX8OdA== - dependencies: - create-hash "^1.1.0" - pbkdf2 "^3.0.9" - randombytes "^2.0.1" - safe-buffer "^5.0.1" - unorm "^1.3.3" - bip39@3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/bip39/-/bip39-3.0.3.tgz#4a8b79067d6ed2e74f9199ac994a2ab61b176760" @@ -7312,13 +7207,6 @@ debug@4, debug@4.3.1, debug@^4.0.0, debug@^4.2.0, debug@^4.3.1: dependencies: ms "2.1.2" -debug@4.1.1, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - debug@=3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" @@ -7333,6 +7221,13 @@ debug@^3.1.0, debug@^3.1.1, debug@^3.2.5: dependencies: ms "^2.1.1" +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" + integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== + dependencies: + ms "^2.1.1" + debuglog@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" @@ -7770,13 +7665,6 @@ ecdsa-sig-formatter@1.0.11: dependencies: safe-buffer "^5.0.1" -ed2curve@0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/ed2curve/-/ed2curve-0.2.1.tgz#22e6aaa3569e3c4dbf4eefa29612ec329e58190c" - integrity sha1-Iuaqo1aePE2/Tu+ilhLsMp5YGQw= - dependencies: - tweetnacl "0.x.x" - ed2curve@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/ed2curve/-/ed2curve-0.3.0.tgz#322b575152a45305429d546b071823a93129a05d" @@ -8099,10 +7987,10 @@ eslint-config-airbnb-base@14.2.1: object.assign "^4.1.2" object.entries "^1.1.2" -eslint-config-lisk-base@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/eslint-config-lisk-base/-/eslint-config-lisk-base-2.0.0.tgz#4241a599466ec0345e3447cf492fecaf9837e596" - integrity sha512-Y24PBz8jXzqmR/Z20yTWTrzL14qj9CzvLzYlbaOYkxiDxIjhLhnjINuG16jmJOSj77kPdoAZz8sMPgb5X2ZmdQ== +eslint-config-lisk-base@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/eslint-config-lisk-base/-/eslint-config-lisk-base-2.0.1.tgz#f9886032bafd4643bc3940c129c3fcfe5d684ff1" + integrity sha512-A6fK9qOChiRc/7r0QyDIXCStxKQd/TmhOJlZnN/IK5viW5TAs9XBhlFcG1cXVNPYtRBhhNX2nosBTWPwjLfyiQ== dependencies: "@typescript-eslint/eslint-plugin" "4.19.0" "@typescript-eslint/parser" "4.19.0" @@ -12947,11 +12835,6 @@ node-forge@^0.10.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== -node-gyp-build@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.0.tgz#2c2b05f461f4178641a6ce2d7159f04094e9376d" - integrity sha512-4oiumOLhCDU9Rronz8PZ5S4IvT39H5+JEv/hps9V8s7RSLhsac0TCP78ulnHXOo8X1wdpPiTayGlM1jr4IbnaQ== - node-gyp-build@^4.2.0, node-gyp-build@~4.2.1: version "4.2.3" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" @@ -16236,11 +16119,6 @@ semver@7.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== -semver@7.1.3: - version "7.1.3" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.1.3.tgz#e4345ce73071c53f336445cfc19efb1c311df2a6" - integrity sha512-ekM0zfiA9SCBlsKa2X1hxyxiI4L3B6EbVJkkdgQXnSEEaHlGdvyodMruTiulSRWMMB4NeIuYNMC9rTKTz97GxA== - semver@7.3.5, semver@^7.1.1, semver@^7.3.4, semver@^7.3.5: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" @@ -16615,15 +16493,6 @@ socks@^2.6.1: ip "^1.1.5" smart-buffer "^4.1.0" -sodium-native@2.4.6: - version "2.4.6" - resolved "https://registry.yarnpkg.com/sodium-native/-/sodium-native-2.4.6.tgz#8a8173095e8cf4f997de393a2ba106c34870cac2" - integrity sha512-Ro9lhTjot8M01nwKLXiqLSmjR7B8o+Wg4HmJUjEShw/q6XPlNMzjPkA1VJKaMH8SO8fJ/sggAKVwreTaFszS2Q== - dependencies: - ini "^1.3.5" - nan "^2.14.0" - node-gyp-build "^4.1.0" - sodium-native@3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/sodium-native/-/sodium-native-3.2.0.tgz#68a9469b96edadffef320cbce51294ad5f72a37f" @@ -17864,21 +17733,16 @@ tunnel-agent@^0.6.0: dependencies: safe-buffer "^5.0.1" -tweetnacl@0.x.x, tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= - -tweetnacl@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.1.tgz#2594d42da73cd036bd0d2a54683dd35a6b55ca17" - integrity sha512-kcoMoKTPYnoeS50tzoqjPY3Uv9axeuuFAZY9M/9zFnhoVvRfxz9K29IMPD7jGmt2c8SW7i3gT9WqDl2+nV7p4A== - tweetnacl@1.0.3, tweetnacl@1.x.x: version "1.0.3" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596" integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw== +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + tweetnacl@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.2.tgz#39fba8d1f7a34706a3fc1fd8c04afcad7bb9e689" @@ -18108,11 +17972,6 @@ universalify@^2.0.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== -unorm@^1.3.3: - version "1.6.0" - resolved "https://registry.yarnpkg.com/unorm/-/unorm-1.6.0.tgz#029b289661fba714f1a9af439eb51d9b16c205af" - integrity sha512-b2/KCUlYZUeA7JFUuRJZPUtr4gZvBh7tavtv4fvk4+KV9pfGiR6CQAQAWl49ZpR3ts2dk4FYkP7EIgDJoiOLDA== - unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" @@ -18293,11 +18152,6 @@ validate-npm-package-name@^3.0.0: dependencies: builtins "^1.0.3" -validator@12.2.0: - version "12.2.0" - resolved "https://registry.yarnpkg.com/validator/-/validator-12.2.0.tgz#660d47e96267033fd070096c3b1a6f2db4380a0a" - integrity sha512-jJfE/DW6tIK1Ek8nCfNFqt8Wb3nzMoAbocBF6/Icgg1ZFSBpObdnwVY2jQj6qUqzhx5jc71fpvBWyLGO7Xl+nQ== - validator@13.5.2: version "13.5.2" resolved "https://registry.yarnpkg.com/validator/-/validator-13.5.2.tgz#c97ae63ed4224999fb6f42c91eaca9567fe69a46" @@ -18308,13 +18162,6 @@ value-equal@^1.0.1: resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" integrity sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw== -varuint-bitcoin@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/varuint-bitcoin/-/varuint-bitcoin-1.1.0.tgz#7a343f50537607af6a3059312b9782a170894540" - integrity sha512-jCEPG+COU/1Rp84neKTyDJQr478/hAfVp5xxYn09QEH0yBjbmPeMfuuQIrp+BUD83hybtYZKhr5elV3bvdV1bA== - dependencies: - safe-buffer "^5.1.1" - varuint-bitcoin@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/varuint-bitcoin/-/varuint-bitcoin-1.1.2.tgz#e76c138249d06138b480d4c5b40ef53693e24e92" From abb0146339bb44ec12f1a152450dda6dba28a251 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 17 Mar 2022 19:07:58 +0100 Subject: [PATCH 003/170] :arrow_up: Bump dependencies --- commander/package.json | 20 +- commander/src/base.ts | 3 +- commander/src/base_bootstrap_command.ts | 2 +- .../src/bootstrapping/commands/account/get.ts | 2 +- .../commands/account/validate.ts | 2 +- .../bootstrapping/commands/base_forging.ts | 4 +- .../src/bootstrapping/commands/block/get.ts | 2 +- .../commands/blockchain/import.ts | 2 +- .../commands/blockchain/reset.ts | 2 +- .../src/bootstrapping/commands/config/show.ts | 4 +- .../commands/forger-info/import.ts | 2 +- .../bootstrapping/commands/forging/config.ts | 6 +- .../bootstrapping/commands/forging/disable.ts | 1 - .../bootstrapping/commands/forging/enable.ts | 1 - .../commands/genesis-block/create.ts | 14 +- .../src/bootstrapping/commands/hash-onion.ts | 6 +- .../src/bootstrapping/commands/node/info.ts | 2 +- .../commands/passphrase/encrypt.ts | 4 +- commander/src/bootstrapping/commands/start.ts | 20 +- .../commands/transaction/create.ts | 17 +- .../bootstrapping/commands/transaction/get.ts | 2 +- .../commands/transaction/sign.ts | 12 +- commander/src/commands/generate/plugin.ts | 6 +- commander/src/commands/init.ts | 2 +- commander/src/utils/flags.ts | 12 +- .../external_types/pm2-axon/index.d.ts | 3 + elements/lisk-api-client/package.json | 4 +- elements/lisk-bft/package.json | 8 +- elements/lisk-chain/package.json | 8 +- elements/lisk-client/package.json | 2 +- elements/lisk-codec/package.json | 2 +- elements/lisk-cryptography/package.json | 2 +- elements/lisk-db/package.json | 6 +- elements/lisk-db/src/utils.ts | 2 +- elements/lisk-elements/package.json | 2 +- elements/lisk-genesis/package.json | 2 +- elements/lisk-p2p/package.json | 2 +- elements/lisk-passphrase/package.json | 4 +- elements/lisk-transaction-pool/package.json | 6 +- elements/lisk-transactions/package.json | 2 +- elements/lisk-tree/package.json | 2 +- elements/lisk-tree/src/merkle_tree.ts | 2 +- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 6 +- .../package.json | 8 +- .../lisk-framework-faucet-plugin/package.json | 8 +- .../lisk-framework-forger-plugin/package.json | 8 +- .../package.json | 6 +- .../src/http_api_plugin.ts | 2 +- .../package.json | 4 +- .../src/monitor_plugin.ts | 2 +- .../package.json | 6 +- framework/external_types/pm2-axon/index.d.ts | 3 + framework/package.json | 8 +- package.json | 2 +- tsconfig.json | 1 + yarn.lock | 809 ++++++++++++------ 57 files changed, 703 insertions(+), 381 deletions(-) diff --git a/commander/package.json b/commander/package.json index b36980c5155..fa89b30ff5f 100644 --- a/commander/package.json +++ b/commander/package.json @@ -102,14 +102,14 @@ "@liskhq/lisk-transactions": "^5.2.1", "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", - "@oclif/command": "1.8.0", - "@oclif/config": "1.17.0", - "@oclif/errors": "1.2.2", - "@oclif/parser": "3.8.5", - "@oclif/plugin-autocomplete": "0.3.0", - "@oclif/plugin-help": "3.2.2", + "@oclif/command": "1.8.16", + "@oclif/config": "1.18.3", + "@oclif/errors": "1.3.5", + "@oclif/parser": "3.8.7", + "@oclif/plugin-autocomplete": "1.2.0", + "@oclif/plugin-help": "5.1.12", "axios": "0.21.1", - "bip39": "3.0.3", + "bip39": "3.0.4", "chalk": "4.1.0", "cli-table3": "0.6.0", "fs-extra": "9.1.0", @@ -119,14 +119,14 @@ "progress": "2.0.3", "semver": "7.3.5", "strip-ansi": "6.0.0", - "tar": "6.0.1", + "tar": "6.1.11", "ts-morph": "9.1.0", "tslib": "1.14.1", "yeoman-environment": "3.3.0", "yeoman-generator": "5.2.0" }, "devDependencies": { - "@oclif/dev-cli": "1.26.0", + "@oclif/dev-cli": "1.26.10", "@oclif/test": "1.2.8", "@types/bip39": "3.0.0", "@types/expect": "24.3.0", @@ -136,7 +136,7 @@ "@types/jest-when": "2.7.2", "@types/jquery": "3.3.33", "@types/listr": "0.14.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/progress": "2.0.3", "@types/semver": "7.1.0", "@types/strip-ansi": "5.2.1", diff --git a/commander/src/base.ts b/commander/src/base.ts index 4375aa3c150..5c14dd568ba 100644 --- a/commander/src/base.ts +++ b/commander/src/base.ts @@ -30,7 +30,8 @@ interface PrintFlags { } export default abstract class BaseCommand extends Command { - static flags = { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static flags: flagParser.Input = { json: flagParser.boolean({ char: 'j', description: jsonDescription, diff --git a/commander/src/base_bootstrap_command.ts b/commander/src/base_bootstrap_command.ts index 6db49417f69..dfbb8d7faed 100644 --- a/commander/src/base_bootstrap_command.ts +++ b/commander/src/base_bootstrap_command.ts @@ -21,7 +21,7 @@ interface BootstrapFlags { } export default abstract class BaseBootstrapCommand extends Command { - static flags = { + static flags: flagParser.Input = { template: flagParser.string({ char: 't', description: diff --git a/commander/src/bootstrapping/commands/account/get.ts b/commander/src/bootstrapping/commands/account/get.ts index 91253e3c8bf..b76e6b910ce 100644 --- a/commander/src/bootstrapping/commands/account/get.ts +++ b/commander/src/bootstrapping/commands/account/get.ts @@ -56,7 +56,7 @@ export abstract class GetCommand extends BaseIPCClientCommand { if (/^Specified key accounts:address:(.*)does not exist/.test((errors as Error).message)) { this.error(`Account with address '${address}' was not found.`); } else { - this.error(errorMessage); + this.error(errorMessage as string); } } } diff --git a/commander/src/bootstrapping/commands/account/validate.ts b/commander/src/bootstrapping/commands/account/validate.ts index c09fbcdd434..1fed86d149e 100644 --- a/commander/src/bootstrapping/commands/account/validate.ts +++ b/commander/src/bootstrapping/commands/account/validate.ts @@ -49,7 +49,7 @@ export class ValidateCommand extends Command { ); } catch (error) { // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - this.error(error.message); + this.error((error as Error).message); } } } diff --git a/commander/src/bootstrapping/commands/base_forging.ts b/commander/src/bootstrapping/commands/base_forging.ts index 9d94c34e600..e67de141e96 100644 --- a/commander/src/bootstrapping/commands/base_forging.ts +++ b/commander/src/bootstrapping/commands/base_forging.ts @@ -43,7 +43,7 @@ export abstract class BaseForgingCommand extends BaseIPCClientCommand { overwrite: flagParser.boolean({ description: 'Overwrites the forger info', default: false, - }), + }) as flagParser.IFlag, }; protected forging!: boolean; @@ -96,7 +96,7 @@ export abstract class BaseForgingCommand extends BaseIPCClientCommand { this.log('Forging status:'); this.printJSON(result); } catch (error) { - this.error(error); + this.error(error as Error); } } } diff --git a/commander/src/bootstrapping/commands/block/get.ts b/commander/src/bootstrapping/commands/block/get.ts index 97aec79c1c8..3c998e80cf4 100644 --- a/commander/src/bootstrapping/commands/block/get.ts +++ b/commander/src/bootstrapping/commands/block/get.ts @@ -77,7 +77,7 @@ export abstract class GetCommand extends BaseIPCClientCommand { this.error('Block with given id or height was not found.'); } } else { - this.error(errorMessage); + this.error(errorMessage as string); } } } diff --git a/commander/src/bootstrapping/commands/blockchain/import.ts b/commander/src/bootstrapping/commands/blockchain/import.ts index 67db4717ca9..3c07ab4b3e5 100644 --- a/commander/src/bootstrapping/commands/blockchain/import.ts +++ b/commander/src/bootstrapping/commands/blockchain/import.ts @@ -43,7 +43,7 @@ export class ImportCommand extends Command { char: 'f', description: 'Delete and overwrite existing blockchain data', default: false, - }), + }) as flagParser.IFlag, }; async run(): Promise { diff --git a/commander/src/bootstrapping/commands/blockchain/reset.ts b/commander/src/bootstrapping/commands/blockchain/reset.ts index 7e14b2393a2..854e7cdd72f 100644 --- a/commander/src/bootstrapping/commands/blockchain/reset.ts +++ b/commander/src/bootstrapping/commands/blockchain/reset.ts @@ -33,7 +33,7 @@ export class ResetCommand extends Command { yes: flagParser.boolean({ char: 'y', description: 'Skip confirmation prompt.', - }), + }) as flagParser.IFlag, }; async run(): Promise { diff --git a/commander/src/bootstrapping/commands/config/show.ts b/commander/src/bootstrapping/commands/config/show.ts index 1bd28480e26..05586ef82d9 100644 --- a/commander/src/bootstrapping/commands/config/show.ts +++ b/commander/src/bootstrapping/commands/config/show.ts @@ -33,7 +33,7 @@ export class ShowCommand extends Command { 'config:show --config ./custom-config.json --data-path ./data', ]; - static flags = { + static flag = { 'data-path': flagsWithParser.dataPath, config: flagsWithParser.config, pretty: flagsWithParser.pretty, @@ -41,6 +41,7 @@ export class ShowCommand extends Command { async run(): Promise { const { flags } = this.parse(ShowCommand); + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment const dataPath = flags['data-path'] ? flags['data-path'] : getDefaultPath(this.config.pjson.name); @@ -50,6 +51,7 @@ export class ShowCommand extends Command { const configDir = getConfigDirs(dataPath); // If config file exist, do not copy unless overwrite-config is specified if (configDir.length !== 1) { + // eslint-disable-next-line @typescript-eslint/restrict-template-expressions this.error(`Folder in ${dataPath} does not contain valid config`); } // If genesis block file exist, do not copy unless overwrite-config is specified diff --git a/commander/src/bootstrapping/commands/forger-info/import.ts b/commander/src/bootstrapping/commands/forger-info/import.ts index fad900444f6..a6b84c839ee 100644 --- a/commander/src/bootstrapping/commands/forger-info/import.ts +++ b/commander/src/bootstrapping/commands/forger-info/import.ts @@ -44,7 +44,7 @@ export abstract class ImportCommand extends Command { force: flagParser.boolean({ char: 'f', description: 'To overwrite the existing data if present.', - }), + }) as flagParser.IFlag, }; async run(): Promise { diff --git a/commander/src/bootstrapping/commands/forging/config.ts b/commander/src/bootstrapping/commands/forging/config.ts index afa7eebd3ab..fd88e0f6f65 100644 --- a/commander/src/bootstrapping/commands/forging/config.ts +++ b/commander/src/bootstrapping/commands/forging/config.ts @@ -33,18 +33,18 @@ export class ConfigCommand extends Command { ]; static flags = { - password: flagsWithParser.password, + password: flagsWithParser.password as flagParser.IFlag, passphrase: flagsWithParser.passphrase, count: flagParser.integer({ char: 'c', description: 'Total number of hashes to produce', default: 1000000, - }), + }) as flagParser.IFlag, distance: flagParser.integer({ char: 'd', description: 'Distance between each hashes', default: 1000, - }), + }) as flagParser.IFlag, output: flagsWithParser.output, pretty: flagsWithParser.pretty, }; diff --git a/commander/src/bootstrapping/commands/forging/disable.ts b/commander/src/bootstrapping/commands/forging/disable.ts index a595e0a92b5..9070a126006 100644 --- a/commander/src/bootstrapping/commands/forging/disable.ts +++ b/commander/src/bootstrapping/commands/forging/disable.ts @@ -12,7 +12,6 @@ * Removal or modification of this copyright notice is prohibited. * */ - import { BaseForgingCommand } from '../base_forging'; export abstract class DisableCommand extends BaseForgingCommand { diff --git a/commander/src/bootstrapping/commands/forging/enable.ts b/commander/src/bootstrapping/commands/forging/enable.ts index eb4d8dbcb07..4e570f54fbc 100644 --- a/commander/src/bootstrapping/commands/forging/enable.ts +++ b/commander/src/bootstrapping/commands/forging/enable.ts @@ -12,7 +12,6 @@ * Removal or modification of this copyright notice is prohibited. * */ - import { BaseForgingCommand } from '../base_forging'; export abstract class EnableCommand extends BaseForgingCommand { diff --git a/commander/src/bootstrapping/commands/genesis-block/create.ts b/commander/src/bootstrapping/commands/genesis-block/create.ts index c0fa995177e..152e7c5f9f6 100644 --- a/commander/src/bootstrapping/commands/genesis-block/create.ts +++ b/commander/src/bootstrapping/commands/genesis-block/create.ts @@ -65,34 +65,34 @@ export abstract class BaseGenesisBlockCommand extends Command { char: 'o', description: 'Output folder path of the generated genesis block', default: 'config', - }), + }) as flagParser.IFlag, accounts: flagParser.integer({ char: 'a', description: 'Number of non-validator accounts to generate', default: 10, - }), + }) as flagParser.IFlag, validators: flagParser.integer({ char: 'v', description: 'Number of validator accounts to generate', default: 103, - }), + }) as flagParser.IFlag, 'token-distribution': flagParser.integer({ char: 't', description: 'Amount of tokens distributed to each account', default: 100000000000, - }), + }) as flagParser.IFlag, 'validators-passphrase-encryption-iterations': flagParser.integer({ description: 'Number of iterations to use for passphrase encryption', default: 1000000, - }), + }) as flagParser.IFlag, 'validators-hash-onion-count': flagParser.integer({ description: 'Number of hashes to produce for each hash-onion', default: 100000, - }), + }) as flagParser.IFlag, 'validators-hash-onion-distance': flagParser.integer({ description: 'Distance between each hashes for hash-onion', default: 1000, - }), + }) as flagParser.IFlag, }; async run(): Promise { diff --git a/commander/src/bootstrapping/commands/hash-onion.ts b/commander/src/bootstrapping/commands/hash-onion.ts index ba66e53a9c9..ecdcff776ab 100644 --- a/commander/src/bootstrapping/commands/hash-onion.ts +++ b/commander/src/bootstrapping/commands/hash-onion.ts @@ -32,17 +32,17 @@ export class HashOnionCommand extends Command { output: flagParser.string({ char: 'o', description: 'Output file path', - }), + }) as flagParser.IFlag, count: flagParser.integer({ char: 'c', description: 'Total number of hashes to produce', default: 1000000, - }), + }) as flagParser.IFlag, distance: flagParser.integer({ char: 'd', description: 'Distance between each hashes', default: 1000, - }), + }) as flagParser.IFlag, pretty: flagsWithParser.pretty, }; diff --git a/commander/src/bootstrapping/commands/node/info.ts b/commander/src/bootstrapping/commands/node/info.ts index 0ef88c6bdab..0e4e908df44 100644 --- a/commander/src/bootstrapping/commands/node/info.ts +++ b/commander/src/bootstrapping/commands/node/info.ts @@ -36,7 +36,7 @@ export abstract class InfoCommand extends BaseIPCClientCommand { ? errors.map(err => (err as Error).message).join(',') : errors; - this.error(errorMessage); + this.error(errorMessage as string); } } } diff --git a/commander/src/bootstrapping/commands/passphrase/encrypt.ts b/commander/src/bootstrapping/commands/passphrase/encrypt.ts index 4251e038bcc..73cd96ad147 100644 --- a/commander/src/bootstrapping/commands/passphrase/encrypt.ts +++ b/commander/src/bootstrapping/commands/passphrase/encrypt.ts @@ -37,7 +37,7 @@ export class EncryptCommand extends Command { passphrase: flagsWithParser.passphrase, 'output-public-key': flagParser.boolean({ description: outputPublicKeyOptionDescription, - }), + }) as flagParser.IFlag, pretty: flagsWithParser.pretty, }; @@ -53,7 +53,7 @@ export class EncryptCommand extends Command { const passphrase = passphraseSource ?? (await getPassphraseFromPrompt('passphrase', true)); const password = passwordSource ?? (await getPasswordFromPrompt('password', true)); - const result = encryptPassphrase(passphrase, password, outputPublicKey); + const result = encryptPassphrase(passphrase, password, !!outputPublicKey); this.printJSON(result, pretty); } diff --git a/commander/src/bootstrapping/commands/start.ts b/commander/src/bootstrapping/commands/start.ts index 14c951120d2..eaeda49a4db 100644 --- a/commander/src/bootstrapping/commands/start.ts +++ b/commander/src/bootstrapping/commands/start.ts @@ -53,50 +53,50 @@ export abstract class StartCommand extends Command { 'overwrite-config': flagParser.boolean({ description: 'Overwrite network configs if they exist already', default: false, - }), + }) as flagParser.IFlag, port: flagParser.integer({ char: 'p', description: 'Open port for the peer to peer incoming connections. Environment variable "LISK_PORT" can also be used.', env: 'LISK_PORT', - }), + }) as flagParser.IFlag, 'api-ipc': flagParser.boolean({ description: 'Enable IPC communication. This will load plugins as a child process and communicate over IPC. Environment variable "LISK_API_IPC" can also be used.', env: 'LISK_API_IPC', default: false, exclusive: ['api-ws'], - }), + }) as flagParser.IFlag, 'api-ws': flagParser.boolean({ description: 'Enable websocket communication for api-client. Environment variable "LISK_API_WS" can also be used.', env: 'LISK_API_WS', default: false, exclusive: ['api-ipc'], - }), + }) as flagParser.IFlag, 'api-ws-port': flagParser.integer({ description: 'Port to be used for api-client websocket. Environment variable "LISK_API_WS_PORT" can also be used.', env: 'LISK_API_WS_PORT', dependsOn: ['api-ws'], - }), + }) as flagParser.IFlag, 'console-log': flagParser.string({ description: 'Console log level. Environment variable "LISK_CONSOLE_LOG_LEVEL" can also be used.', env: 'LISK_CONSOLE_LOG_LEVEL', options: LOG_OPTIONS, - }), + }) as flagParser.IFlag, log: flagParser.string({ char: 'l', description: 'File log level. Environment variable "LISK_FILE_LOG_LEVEL" can also be used.', env: 'LISK_FILE_LOG_LEVEL', options: LOG_OPTIONS, - }), + }) as flagParser.IFlag, 'seed-peers': flagParser.string({ env: 'LISK_SEED_PEERS', description: 'Seed peers to initially connect to in format of comma separated "ip:port". IP can be DNS name or IPV4 format. Environment variable "LISK_SEED_PEERS" can also be used.', - }), + }) as flagParser.IFlag, }; async run(): Promise { @@ -223,7 +223,9 @@ export abstract class StartCommand extends Command { await app.run(); } catch (errors) { this.error( - Array.isArray(errors) ? errors.map(err => (err as Error).message).join(',') : errors, + Array.isArray(errors) + ? errors.map(err => (err as Error).message).join(',') + : (errors as string), ); } } diff --git a/commander/src/bootstrapping/commands/transaction/create.ts b/commander/src/bootstrapping/commands/transaction/create.ts index d9be13e48b9..f22b6656f84 100644 --- a/commander/src/bootstrapping/commands/transaction/create.ts +++ b/commander/src/bootstrapping/commands/transaction/create.ts @@ -23,7 +23,7 @@ import * as validator from '@liskhq/lisk-validator'; import Command, { flags as flagParser } from '@oclif/command'; import { Application, PartialApplicationConfig, RegisteredSchema } from 'lisk-framework'; import { PromiseResolvedType } from '../../../types'; -import { flagsWithParser } from '../../../utils/flags'; +import { flags as defaultFlags, flagsWithParser } from '../../../utils/flags'; import { getDefaultPath } from '../../../utils/path'; import { getAssetFromPrompt, getPassphraseFromPrompt } from '../../../utils/reader'; import { @@ -43,12 +43,13 @@ interface CreateFlags { 'network-identifier'?: string; passphrase?: string; asset?: string; - pretty: boolean; + pretty?: boolean; offline: boolean; 'data-path'?: string; 'no-signature': boolean; 'sender-public-key'?: string; nonce?: string; + json?: boolean; } interface Transaction { @@ -150,7 +151,7 @@ const createTransactionOffline = async ( ) => { const asset = await getAssetObject(registeredSchema, flags, args); const { passphrase, publicKey } = await getPassphraseAddressAndPublicKey(flags); - transaction.nonce = BigInt(flags.nonce); + transaction.nonce = BigInt(flags.nonce ?? 0); transaction.asset = asset; transaction.senderPublicKey = publicKey || Buffer.from(flags['sender-public-key'] as string, 'hex'); @@ -237,7 +238,7 @@ export abstract class CreateCommand extends Command { 'transaction:create 2 0 100000000 --offline --network mainnet --network-identifier 873da85a2cee70da631d90b0f17fada8c3ac9b83b2613f4ca5fddd374d1034b3 --nonce 1 --asset=\'{"amount":100000000,"recipientAddress":"ab0041a7d3f7b2c290b5b834d46bdc7b7eb85815","data":"send token"}\'', ]; - static flags = { + static flags: flagParser.Input = { passphrase: flagsWithParser.passphrase, asset: flagParser.string({ char: 'a', @@ -246,7 +247,7 @@ export abstract class CreateCommand extends Command { json: flagsWithParser.json, // We can't specify default value with `dependsOn` https://github.com/oclif/oclif/issues/211 offline: flagParser.boolean({ - ...flagsWithParser.offline, + description: defaultFlags.offline.description, dependsOn: ['network-identifier', 'nonce'], exclusive: ['data-path'], }), @@ -312,16 +313,16 @@ export abstract class CreateCommand extends Command { } if (flags.json) { - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: encodeTransaction(this._schema, transactionObject, this._client).toString( 'hex', ), }); - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: transactionToJSON(this._schema, transactionObject, this._client), }); } else { - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: encodeTransaction(this._schema, transactionObject, this._client).toString( 'hex', ), diff --git a/commander/src/bootstrapping/commands/transaction/get.ts b/commander/src/bootstrapping/commands/transaction/get.ts index b3e7339e029..1aac1e932bb 100644 --- a/commander/src/bootstrapping/commands/transaction/get.ts +++ b/commander/src/bootstrapping/commands/transaction/get.ts @@ -57,7 +57,7 @@ export abstract class GetCommand extends BaseIPCClientCommand { if (/^Specified key transactions:id:(.*)does not exist/.test((errors as Error).message)) { this.error(`Transaction with id '${transactionId}' was not found.`); } else { - this.error(errorMessage); + this.error(errorMessage as string); } } } diff --git a/commander/src/bootstrapping/commands/transaction/sign.ts b/commander/src/bootstrapping/commands/transaction/sign.ts index 9ad96129fdd..55e40c54dae 100644 --- a/commander/src/bootstrapping/commands/transaction/sign.ts +++ b/commander/src/bootstrapping/commands/transaction/sign.ts @@ -45,7 +45,9 @@ interface SignFlags { 'network-identifier': string | undefined; passphrase: string | undefined; 'include-sender': boolean; - offline: boolean; + offline?: boolean; + json?: boolean; + pretty?: boolean; 'data-path': string | undefined; 'sender-public-key': string | undefined; 'mandatory-keys': string[]; @@ -178,7 +180,7 @@ export abstract class SignCommand extends Command { }, ]; - static flags = { + static flags: flagParser.Input = { passphrase: flagsWithParser.passphrase, json: flagsWithParser.json, offline: { @@ -240,16 +242,16 @@ export abstract class SignCommand extends Command { } if (flags.json) { - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: encodeTransaction(this._schema, signedTransaction, this._client).toString( 'hex', ), }); - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: transactionToJSON(this._schema, signedTransaction, this._client), }); } else { - this.printJSON(flags.pretty, { + this.printJSON(!!flags.pretty, { transaction: encodeTransaction(this._schema, signedTransaction, this._client).toString( 'hex', ), diff --git a/commander/src/commands/generate/plugin.ts b/commander/src/commands/generate/plugin.ts index e8ccf754c82..06b0a40afc3 100644 --- a/commander/src/commands/generate/plugin.ts +++ b/commander/src/commands/generate/plugin.ts @@ -35,16 +35,16 @@ export default class PluginCommand extends BaseBootstrapCommand { ...BaseBootstrapCommand.flags, standalone: flagParser.boolean({ description: 'Create a standalone plugin package.', - }), + }) as flagParser.IFlag, output: flagParser.string({ description: 'Path to create the plugin.', char: 'o', dependsOn: ['standalone'], - }), + }) as flagParser.IFlag, registry: flagParser.string({ description: 'URL of a registry to download dependencies from.', dependsOn: ['standalone'], - }), + }) as flagParser.IFlag, }; async run(): Promise { diff --git a/commander/src/commands/init.ts b/commander/src/commands/init.ts index f9b437d16a5..9244927f023 100644 --- a/commander/src/commands/init.ts +++ b/commander/src/commands/init.ts @@ -27,7 +27,7 @@ export default class InitCommand extends BaseBootstrapCommand { 'init /project/path --template lisk-ts', ]; - static flags = { + static flags: flagParser.Input = { ...BaseBootstrapCommand.flags, registry: flagParser.string({ description: 'URL of a registry to download dependencies from.', diff --git a/commander/src/utils/flags.ts b/commander/src/utils/flags.ts index fca04e6491c..45acae6e2e8 100644 --- a/commander/src/utils/flags.ts +++ b/commander/src/utils/flags.ts @@ -144,24 +144,24 @@ export const flagsWithParser = { dataPath: flagParser.string({ ...flags.dataPath, env: 'LISK_DATA_PATH', - }), + }) as flagParser.IFlag, network: flagParser.string({ ...flags.network, env: 'LISK_NETWORK', default: DEFAULT_NETWORK, - }), + }) as flagParser.IFlag, config: flagParser.string({ ...flags.config, env: 'LISK_CONFIG_FILE', - }), - pretty: flagParser.boolean(flags.pretty), + }) as flagParser.IFlag, + pretty: flagParser.boolean(flags.pretty) as flagParser.IFlag, passphrase: flagParser.string(flags.passphrase), output: flagParser.string(flags.output), password: flagParser.string(flags.password), offline: flagParser.boolean({ ...flags.offline, - }), - json: flagParser.boolean(flags.json), + }) as flagParser.IFlag, + json: flagParser.boolean(flags.json) as flagParser.IFlag, senderPublicKey: flagParser.string(flags.senderPublicKey), networkIdentifier: flagParser.string(flags.networkIdentifier), }; diff --git a/elements/lisk-api-client/external_types/pm2-axon/index.d.ts b/elements/lisk-api-client/external_types/pm2-axon/index.d.ts index da32591f8e3..462d129116d 100644 --- a/elements/lisk-api-client/external_types/pm2-axon/index.d.ts +++ b/elements/lisk-api-client/external_types/pm2-axon/index.d.ts @@ -6,6 +6,9 @@ declare module 'pm2-axon' { import NetSocket = NodeJS.Socket; export class Socket extends EventEmitter { + public on(name: string, val: any): void; + public removeAllListeners(name?: string): void; + public set(name: string, val: any): Socket; public get(name: string): any; diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 74132a4af19..726d100949b 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -42,13 +42,13 @@ "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", - "ws": "7.4.6" + "ws": "8.5.0" }, "devDependencies": { "@liskhq/lisk-chain": "^0.3.3", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index 0d51fc1421d..53143b5ac3c 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -41,11 +41,11 @@ "@liskhq/lisk-cryptography": "^3.2.0", "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", - "@types/node": "12.20.6", - "debug": "4.3.1" + "@types/node": "16.11.26", + "debug": "4.3.4" }, "devDependencies": { - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", @@ -63,7 +63,7 @@ "prettier": "2.2.1", "randomstring": "1.1.5", "source-map-support": "0.5.19", - "stampit": "4.3.1", + "stampit": "4.3.2", "ts-jest": "26.5.4", "ts-node": "9.1.1", "tsconfig-paths": "3.9.0", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 457d574e4cf..53d0839b2b2 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -42,15 +42,15 @@ "@liskhq/lisk-tree": "^0.2.1", "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", - "debug": "4.3.1" + "debug": "4.3.4" }, "devDependencies": { "@liskhq/lisk-passphrase": "^3.1.0", - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/randomstring": "1.1.6", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", @@ -66,7 +66,7 @@ "prettier": "2.2.1", "randomstring": "1.1.5", "source-map-support": "0.5.19", - "stampit": "4.3.1", + "stampit": "4.3.2", "ts-jest": "26.5.4", "ts-node": "9.1.1", "tsconfig-paths": "3.9.0", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 6952b3d037f..27ca9b5c9b8 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -68,7 +68,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "browserify": "16.5.0", diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index cc60ea9b58a..d5a378c43ea 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -42,7 +42,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 5a828876a41..9dcab3982a0 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -48,7 +48,7 @@ "@types/ed2curve": "0.2.2", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json index 71645b06db6..14ad3eef88d 100644 --- a/elements/lisk-db/package.json +++ b/elements/lisk-db/package.json @@ -36,17 +36,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "debug": "4.3.1", + "debug": "4.3.4", "levelup": "4.4.0", "rocksdb": "5.1.1" }, "devDependencies": { - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/encoding-down": "5.0.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/levelup": "4.3.0", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/rocksdb": "3.0.1", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", diff --git a/elements/lisk-db/src/utils.ts b/elements/lisk-db/src/utils.ts index d0396d60da6..47694841466 100644 --- a/elements/lisk-db/src/utils.ts +++ b/elements/lisk-db/src/utils.ts @@ -42,6 +42,6 @@ export const smartConvert = (message: string, delimiter: string, format: string) if (isASCIIChar(s)) { return s; } - return Buffer.from(s, 'binary').toString(format); + return Buffer.from(s, 'binary').toString(format as BufferEncoding); }) .join(delimiter); diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 0bd2ce4611a..e6d0f876468 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -54,7 +54,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index e4594265c99..80ce265086a 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -46,7 +46,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index a8763b7ca82..31d83e62885 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -56,7 +56,7 @@ "@types/jest-when": "2.7.2", "@types/jsonwebtoken": "8.3.8", "@types/lodash.shuffle": "4.2.6", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/semver": "7.1.0", "@types/socketcluster-client": "13.0.0", "@types/socketcluster-server": "14.2.2", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index e5e8f026f0c..67ec73c9f7f 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -36,13 +36,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "bip39": "3.0.3" + "bip39": "3.0.4" }, "devDependencies": { "@types/bip39": "3.0.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index d93f6306747..fc9a47e0ceb 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -39,13 +39,13 @@ "dependencies": { "@liskhq/lisk-cryptography": "^3.2.0", "@liskhq/lisk-utils": "^0.2.0", - "debug": "4.3.1" + "debug": "4.3.4" }, "devDependencies": { - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index dd8e515dcae..2167e1a33ae 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -43,7 +43,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 5413943dc2f..39fa916d5ac 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -42,7 +42,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-tree/src/merkle_tree.ts b/elements/lisk-tree/src/merkle_tree.ts index ca6c3507a72..8c03e3569b4 100644 --- a/elements/lisk-tree/src/merkle_tree.ts +++ b/elements/lisk-tree/src/merkle_tree.ts @@ -241,7 +241,7 @@ export class MerkleTree { public clear(): void { this._width = 0; this._root = EMPTY_HASH; - this._hashToValueMap = { [this._root.toString('2')]: Buffer.alloc(0) }; + this._hashToValueMap = { [this._root.toString('binary')]: Buffer.alloc(0) }; } public toString(): string { diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 115b1215f0a..aeb6e4d1fbe 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -42,7 +42,7 @@ "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/lodash.clonedeep": "4.5.6", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index b78ae0869b0..6298d64813b 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -40,14 +40,14 @@ "@liskhq/lisk-cryptography": "^3.2.0", "ajv": "8.1.0", "ajv-formats": "2.0.2", - "debug": "4.3.1", + "debug": "4.3.4", "semver": "7.3.5", - "validator": "13.5.2" + "validator": "13.7.0" }, "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/semver": "7.3.4", "@types/validator": "13.1.3", "@typescript-eslint/eslint-plugin": "4.19.0", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 693cbee9788..d58dffaf57b 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -43,7 +43,7 @@ "@liskhq/lisk-client": "^5.2.1", "@liskhq/lisk-cryptography": "^3.2.0", "@liskhq/lisk-utils": "^0.2.0", - "express": "4.17.1", + "express": "4.17.3", "json-format-highlight": "1.0.4", "lisk-framework": "^0.9.1", "react": "^17.0.1", @@ -56,13 +56,13 @@ "@pmmmwh/react-refresh-webpack-plugin": "0.4.3", "@svgr/webpack": "5.5.0", "@types/cors": "2.8.6", - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/react": "^17.0.0", "@types/react-dom": "^17.0.0", "@types/react-router-dom": "5.1.7", @@ -105,7 +105,7 @@ "postcss-preset-env": "6.7.0", "postcss-safe-parser": "5.0.2", "prettier": "2.2.1", - "prompts": "2.4.0", + "prompts": "2.4.2", "react-app-polyfill": "^2.0.0", "react-dev-utils": "^11.0.2", "react-refresh": "^0.8.3", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index ae9625600c9..3e4d53095c2 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -48,7 +48,7 @@ "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", "axios": "0.21.1", - "express": "4.17.1", + "express": "4.17.3", "lisk-framework": "^0.9.1", "react": "^17.0.1", "react-dom": "^17.0.1", @@ -59,13 +59,13 @@ "@pmmmwh/react-refresh-webpack-plugin": "0.4.3", "@svgr/webpack": "5.5.0", "@types/cors": "2.8.6", - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@types/react": "^17.0.0", "@types/react-dom": "^17.0.0", "@types/react-router-dom": "5.1.7", @@ -107,7 +107,7 @@ "postcss-preset-env": "6.7.0", "postcss-safe-parser": "5.0.2", "prettier": "2.2.1", - "prompts": "2.4.0", + "prompts": "2.4.2", "react-app-polyfill": "^2.0.0", "react-dev-utils": "^11.0.2", "react-refresh": "^0.8.3", diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index b2f5c65abe5..139157fb30a 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -47,8 +47,8 @@ "@liskhq/lisk-validator": "^0.6.1", "axios": "0.21.1", "cors": "2.8.5", - "debug": "4.3.1", - "express": "4.17.1", + "debug": "4.3.4", + "express": "4.17.3", "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", @@ -58,13 +58,13 @@ "@liskhq/lisk-api-client": "^5.1.5", "@liskhq/lisk-genesis": "^0.2.3", "@types/cors": "2.8.6", - "@types/debug": "4.1.5", + "@types/debug": "4.1.7", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "axios": "0.21.1", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index a08d5b50c2b..96eb0a22fb8 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -41,7 +41,7 @@ "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", "cors": "2.8.5", - "express": "4.17.1", + "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", "lisk-framework": "^0.9.1" @@ -55,10 +55,10 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", - "axios": "0.21.1", + "axios": "0.26.1", "eslint": "7.22.0", "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", diff --git a/framework-plugins/lisk-framework-http-api-plugin/src/http_api_plugin.ts b/framework-plugins/lisk-framework-http-api-plugin/src/http_api_plugin.ts index 09eab295657..a12ee9a5613 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/src/http_api_plugin.ts +++ b/framework-plugins/lisk-framework-http-api-plugin/src/http_api_plugin.ts @@ -90,7 +90,7 @@ export class HTTPAPIPlugin extends BasePlugin { private _registerMiddlewares(options: Options): void { // Register middlewares this._app.use(cors(options.cors)); - this._app.use(express.json()); + this._app.use(express.json() as express.RequestHandler); this._app.use(rateLimit(options.limits)); this._app.use(middlewares.whiteListMiddleware(options)); } diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 13480fb7b32..aaa0b834fa6 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -43,7 +43,7 @@ "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", "cors": "2.8.5", - "express": "4.17.1", + "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", "lisk-framework": "^0.9.1" @@ -55,7 +55,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/src/monitor_plugin.ts b/framework-plugins/lisk-framework-monitor-plugin/src/monitor_plugin.ts index 18fed473a37..cf19071fb65 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/src/monitor_plugin.ts +++ b/framework-plugins/lisk-framework-monitor-plugin/src/monitor_plugin.ts @@ -125,7 +125,7 @@ export class MonitorPlugin extends BasePlugin { private _registerMiddlewares(options: Options): void { // Register middlewares this._app.use(cors(options.cors)); - this._app.use(express.json()); + this._app.use(express.json() as express.RequestHandler); this._app.use(rateLimit(options.limits)); this._app.use(middlewares.whiteListMiddleware(options)); } diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index c8d8e03d2b2..3393fb8c2a8 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -46,7 +46,7 @@ "@liskhq/lisk-transactions": "^5.2.1", "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", - "debug": "4.3.1", + "debug": "4.3.4", "fs-extra": "9.1.0", "lisk-framework": "^0.9.1" }, @@ -57,10 +57,10 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", - "axios": "0.21.1", + "axios": "0.26.1", "eslint": "7.22.0", "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", diff --git a/framework/external_types/pm2-axon/index.d.ts b/framework/external_types/pm2-axon/index.d.ts index da32591f8e3..462d129116d 100644 --- a/framework/external_types/pm2-axon/index.d.ts +++ b/framework/external_types/pm2-axon/index.d.ts @@ -6,6 +6,9 @@ declare module 'pm2-axon' { import NetSocket = NodeJS.Socket; export class Socket extends EventEmitter { + public on(name: string, val: any): void; + public removeAllListeners(name?: string): void; + public set(name: string, val: any): Socket; public get(name: string): any; diff --git a/framework/package.json b/framework/package.json index 8231c635700..34fdcc25069 100644 --- a/framework/package.json +++ b/framework/package.json @@ -54,21 +54,21 @@ "@liskhq/lisk-utils": "^0.2.0", "@liskhq/lisk-validator": "^0.6.1", "bunyan": "1.8.15", - "debug": "4.3.1", - "eventemitter2": "6.4.4", + "debug": "4.3.4", + "eventemitter2": "6.4.5", "fs-extra": "9.1.0", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ps-list": "7.0.0", "sodium-native": "3.2.0", - "ws": "7.4.6" + "ws": "8.5.0" }, "devDependencies": { "@liskhq/lisk-passphrase": "^3.1.0", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "copyfiles": "2.2.0", diff --git a/package.json b/package.json index 5d5a8150a87..06c5bbc888e 100644 --- a/package.json +++ b/package.json @@ -73,6 +73,6 @@ "lint-staged": "10.5.4", "prettier": "2.2.1", "typescript": "4.2.3", - "yarn": "1.22.10" + "yarn": "1.22.17" } } diff --git a/tsconfig.json b/tsconfig.json index 44a3f4b848f..d025be796b0 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,6 +10,7 @@ "noImplicitReturns": true, "noUnusedLocals": true, "noUnusedParameters": true, + "skipLibCheck": true, "pretty": true, "removeComments": true, "resolveJsonModule": true, diff --git a/yarn.lock b/yarn.lock index 723cff4c0a1..12b18c1b666 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2671,34 +2671,22 @@ node-gyp "^7.1.0" read-package-json-fast "^2.0.1" -"@oclif/command@1.8.0", "@oclif/command@^1.5.20", "@oclif/command@^1.8.0": - version "1.8.0" - resolved "https://registry.yarnpkg.com/@oclif/command/-/command-1.8.0.tgz#c1a499b10d26e9d1a611190a81005589accbb339" - integrity sha512-5vwpq6kbvwkQwKqAoOU3L72GZ3Ta8RRrewKj9OJRolx28KLJJ8Dg9Rf7obRwt5jQA9bkYd8gqzMTrI7H3xLfaw== - dependencies: - "@oclif/config" "^1.15.1" - "@oclif/errors" "^1.3.3" - "@oclif/parser" "^3.8.3" - "@oclif/plugin-help" "^3" +"@oclif/command@1.8.16", "@oclif/command@^1.8.14", "@oclif/command@^1.8.15": + version "1.8.16" + resolved "https://registry.yarnpkg.com/@oclif/command/-/command-1.8.16.tgz#bea46f81b2061b47e1cda318a0b923e62ca4cc0c" + integrity sha512-rmVKYEsKzurfRU0xJz+iHelbi1LGlihIWZ7Qvmb/CBz1EkhL7nOkW4SVXmG2dA5Ce0si2gr88i6q4eBOMRNJ1w== + dependencies: + "@oclif/config" "^1.18.2" + "@oclif/errors" "^1.3.5" + "@oclif/help" "^1.0.1" + "@oclif/parser" "^3.8.6" debug "^4.1.1" semver "^7.3.2" -"@oclif/command@^1.5.1", "@oclif/command@^1.5.13": - version "1.5.19" - resolved "https://registry.yarnpkg.com/@oclif/command/-/command-1.5.19.tgz#13f472450eb83bd6c6871a164c03eadb5e1a07ed" - integrity sha512-6+iaCMh/JXJaB2QWikqvGE9//wLEVYYwZd5sud8aLoLKog1Q75naZh2vlGVtg5Mq/NqpqGQvdIjJb3Bm+64AUQ== - dependencies: - "@oclif/config" "^1" - "@oclif/errors" "^1.2.2" - "@oclif/parser" "^3.8.3" - "@oclif/plugin-help" "^2" - debug "^4.1.1" - semver "^5.6.0" - -"@oclif/config@1.17.0", "@oclif/config@^1.13.0", "@oclif/config@^1.15.1", "@oclif/config@^1.17.0": - version "1.17.0" - resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.17.0.tgz#ba8639118633102a7e481760c50054623d09fcab" - integrity sha512-Lmfuf6ubjQ4ifC/9bz1fSCHc6F6E653oyaRXxg+lgT4+bYf9bk+nqrUpAbrXyABkCqgIBiFr3J4zR/kiFdE1PA== +"@oclif/config@1.18.2": + version "1.18.2" + resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.18.2.tgz#5bfe74a9ba6a8ca3dceb314a81bd9ce2e15ebbfe" + integrity sha512-cE3qfHWv8hGRCP31j7fIS7BfCflm/BNZ2HNqHexH+fDrdF2f1D5S8VmXWLC77ffv3oDvWyvE9AZeR0RfmHCCaA== dependencies: "@oclif/errors" "^1.3.3" "@oclif/parser" "^3.8.0" @@ -2707,26 +2695,63 @@ is-wsl "^2.1.1" tslib "^2.0.0" -"@oclif/config@^1": - version "1.14.0" - resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.14.0.tgz#0af93facd5c5087f804489f1603c4f3bc0c45014" - integrity sha512-KsOP/mx9lzTah+EtGqLUXN3PDL0J3zb9/dTneFyiUK2K6T7vFEGhV6OasmqTh4uMZHGYTGrNPV8x/Yw6qZNL6A== +"@oclif/config@1.18.3", "@oclif/config@^1.18.2": + version "1.18.3" + resolved "https://registry.yarnpkg.com/@oclif/config/-/config-1.18.3.tgz#ddfc144fdab66b1658c2f1b3478fa7fbfd317e79" + integrity sha512-sBpko86IrTscc39EvHUhL+c++81BVTsIZ3ETu/vG+cCdi0N6vb2DoahR67A9FI2CGnxRRHjnTfa3m6LulwNATA== dependencies: - "@oclif/errors" "^1.0.0" + "@oclif/errors" "^1.3.5" "@oclif/parser" "^3.8.0" debug "^4.1.1" - tslib "^1.9.3" + globby "^11.0.1" + is-wsl "^2.1.1" + tslib "^2.3.1" -"@oclif/dev-cli@1.26.0": - version "1.26.0" - resolved "https://registry.yarnpkg.com/@oclif/dev-cli/-/dev-cli-1.26.0.tgz#e3ec294b362c010ffc8948003d3770955c7951fd" - integrity sha512-272udZP+bG4qahoAcpWcMTJKiA+V42kRMqQM7n4tgW35brYb2UP5kK+p08PpF8sgSfRTV8MoJVJG9ax5kY82PA== +"@oclif/core@^1.2.0", "@oclif/core@^1.3.6": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@oclif/core/-/core-1.6.0.tgz#a91333275cd43a49097158f4ae8e15ccf718bd48" + integrity sha512-JHerjgRd29EtUVpDIrzohq2XdxJfgmZVGHAFlf75QVhLGFaleopZAQNBXkHkxG//kGib0LhyVGW7azcFKzr1eQ== dependencies: - "@oclif/command" "^1.8.0" - "@oclif/config" "^1.17.0" - "@oclif/errors" "^1.3.3" - "@oclif/plugin-help" "^3.2.0" - cli-ux "^5.2.1" + "@oclif/linewrap" "^1.0.0" + "@oclif/screen" "^3.0.2" + ansi-escapes "^4.3.2" + ansi-styles "^4.3.0" + cardinal "^2.1.1" + chalk "^4.1.2" + clean-stack "^3.0.1" + cli-progress "^3.10.0" + debug "^4.3.3" + ejs "^3.1.6" + fs-extra "^9.1.0" + get-package-type "^0.1.0" + globby "^11.1.0" + hyperlinker "^1.0.0" + indent-string "^4.0.0" + is-wsl "^2.2.0" + js-yaml "^3.14.1" + lodash "^4.17.21" + natural-orderby "^2.0.3" + object-treeify "^1.1.33" + password-prompt "^1.1.2" + semver "^7.3.5" + string-width "^4.2.3" + strip-ansi "^6.0.1" + supports-color "^8.1.1" + supports-hyperlinks "^2.2.0" + tslib "^2.3.1" + widest-line "^3.1.0" + wrap-ansi "^7.0.0" + +"@oclif/dev-cli@1.26.10": + version "1.26.10" + resolved "https://registry.yarnpkg.com/@oclif/dev-cli/-/dev-cli-1.26.10.tgz#d8df3a79009b68552f5e7f249d1d19ca52278382" + integrity sha512-dJ+II9rVXckzFvG+82PbfphMTnoqiHvsuAAbcHrLdZWPBnFAiDKhNYE0iHnA/knAC4VGXhogsrAJ3ERT5d5r2g== + dependencies: + "@oclif/command" "^1.8.15" + "@oclif/config" "^1.18.2" + "@oclif/errors" "^1.3.5" + "@oclif/plugin-help" "3.2.18" + cli-ux "5.6.7" debug "^4.1.1" find-yarn-workspace-root "^2.0.0" fs-extra "^8.1" @@ -2736,16 +2761,16 @@ qqjs "^0.3.10" tslib "^2.0.3" -"@oclif/errors@1.2.2", "@oclif/errors@^1.0.0", "@oclif/errors@^1.2.1", "@oclif/errors@^1.2.2": - version "1.2.2" - resolved "https://registry.yarnpkg.com/@oclif/errors/-/errors-1.2.2.tgz#9d8f269b15f13d70aa93316fed7bebc24688edc2" - integrity sha512-Eq8BFuJUQcbAPVofDxwdE0bL14inIiwt5EaKRVY9ZDIG11jwdXZqiQEECJx0VfnLyUZdYfRd/znDI/MytdJoKg== +"@oclif/errors@1.3.5", "@oclif/errors@^1.3.5": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@oclif/errors/-/errors-1.3.5.tgz#a1e9694dbeccab10fe2fe15acb7113991bed636c" + integrity sha512-OivucXPH/eLLlOT7FkCMoZXiaVYf8I/w1eTAM1+gKzfhALwWTusxEx7wBmW0uzvkSg/9ovWLycPaBgJbM3LOCQ== dependencies: - clean-stack "^1.3.0" - fs-extra "^7.0.0" - indent-string "^3.2.0" - strip-ansi "^5.0.0" - wrap-ansi "^4.0.0" + clean-stack "^3.0.0" + fs-extra "^8.1" + indent-string "^4.0.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" "@oclif/errors@^1.3.3": version "1.3.4" @@ -2758,22 +2783,37 @@ strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +"@oclif/help@^1.0.0", "@oclif/help@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@oclif/help/-/help-1.0.1.tgz#fd96a3dd9fb2314479e6c8584c91b63754a7dff5" + integrity sha512-8rsl4RHL5+vBUAKBL6PFI3mj58hjPCp2VYyXD4TAa7IMStikFfOH2gtWmqLzIlxAED2EpD0dfYwo9JJxYsH7Aw== + dependencies: + "@oclif/config" "1.18.2" + "@oclif/errors" "1.3.5" + chalk "^4.1.2" + indent-string "^4.0.0" + lodash "^4.17.21" + string-width "^4.2.0" + strip-ansi "^6.0.0" + widest-line "^3.1.0" + wrap-ansi "^6.2.0" + "@oclif/linewrap@^1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@oclif/linewrap/-/linewrap-1.0.0.tgz#aedcb64b479d4db7be24196384897b5000901d91" integrity sha512-Ups2dShK52xXa8w6iBWLgcjPJWjais6KPJQq3gQ/88AY6BXoTX+MIGFPrWQO1KLMiQfoTpcLnUwloN4brrVUHw== -"@oclif/parser@3.8.5": - version "3.8.5" - resolved "https://registry.yarnpkg.com/@oclif/parser/-/parser-3.8.5.tgz#c5161766a1efca7343e1f25d769efbefe09f639b" - integrity sha512-yojzeEfmSxjjkAvMRj0KzspXlMjCfBzNRPkWw8ZwOSoNWoJn+OCS/m/S+yfV6BvAM4u2lTzX9Y5rCbrFIgkJLg== +"@oclif/parser@3.8.7", "@oclif/parser@^3.8.6": + version "3.8.7" + resolved "https://registry.yarnpkg.com/@oclif/parser/-/parser-3.8.7.tgz#236d48db05d0b00157d3b42d31f9dac7550d2a7c" + integrity sha512-b11xBmIUK+LuuwVGJpFs4LwQN2xj2cBWj2c4z1FtiXGrJ85h9xV6q+k136Hw0tGg1jQoRXuvuBnqQ7es7vO9/Q== dependencies: - "@oclif/errors" "^1.2.2" + "@oclif/errors" "^1.3.5" "@oclif/linewrap" "^1.0.0" - chalk "^2.4.2" - tslib "^1.9.3" + chalk "^4.1.0" + tslib "^2.3.1" -"@oclif/parser@^3.8.0", "@oclif/parser@^3.8.3": +"@oclif/parser@^3.8.0": version "3.8.4" resolved "https://registry.yarnpkg.com/@oclif/parser/-/parser-3.8.4.tgz#1a90fc770a42792e574fb896325618aebbe8c9e4" integrity sha512-cyP1at3l42kQHZtqDS3KfTeyMvxITGwXwH1qk9ktBYvqgMp5h4vHT+cOD74ld3RqJUOZY/+Zi9lb4Tbza3BtuA== @@ -2782,54 +2822,50 @@ chalk "^2.4.2" tslib "^1.9.3" -"@oclif/plugin-autocomplete@0.3.0": - version "0.3.0" - resolved "https://registry.yarnpkg.com/@oclif/plugin-autocomplete/-/plugin-autocomplete-0.3.0.tgz#eec788596a88a4ca5170a9103b6c2835119a8fbd" - integrity sha512-gCuIUCswvoU1BxDDvHSUGxW8rFagiacle8jHqE49+WnuniXD/N8NmJvnzmlNyc8qLE192CnKK+qYyAF+vaFQBg== +"@oclif/plugin-autocomplete@1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@oclif/plugin-autocomplete/-/plugin-autocomplete-1.2.0.tgz#c807d4ee0fd745296ea745c0c8ca28a0c6233bf3" + integrity sha512-Y64uhbhQLcLms2N6kvoIb40s2czOECeMzGs0ATf/3kNojY2nsYaQ0mI6PghQs/JgpVg4DnZOJivleYBr+XPn7Q== dependencies: - "@oclif/command" "^1.5.13" - "@oclif/config" "^1.13.0" + "@oclif/core" "^1.2.0" chalk "^4.1.0" - cli-ux "^5.2.1" debug "^4.0.0" fs-extra "^9.0.1" - moment "^2.22.1" -"@oclif/plugin-help@3.2.2", "@oclif/plugin-help@^3", "@oclif/plugin-help@^3.2.0": - version "3.2.2" - resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.2.tgz#063ee08cee556573a5198fbdfdaa32796deba0ed" - integrity sha512-SPZ8U8PBYK0n4srFjCLedk0jWU4QlxgEYLCXIBShJgOwPhTTQknkUlsEwaMIevvCU4iCQZhfMX+D8Pz5GZjFgA== +"@oclif/plugin-help@3.2.18": + version "3.2.18" + resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-3.2.18.tgz#f2bf6ba86719c174fc0e4c2149f73b46006bfdbd" + integrity sha512-5n5Pkz4L0duknIvFwx2Ko9Xda3miT6RZP8bgaaK3Q/9fzVBrhi4bOM0u05/OThI6V+3NsSdxYS2o1NLcXToWDg== dependencies: - "@oclif/command" "^1.5.20" - "@oclif/config" "^1.15.1" - "@oclif/errors" "^1.2.2" - chalk "^4.1.0" + "@oclif/command" "^1.8.14" + "@oclif/config" "1.18.2" + "@oclif/errors" "1.3.5" + "@oclif/help" "^1.0.0" + chalk "^4.1.2" indent-string "^4.0.0" - lodash.template "^4.4.0" + lodash "^4.17.21" string-width "^4.2.0" strip-ansi "^6.0.0" widest-line "^3.1.0" - wrap-ansi "^4.0.0" + wrap-ansi "^6.2.0" -"@oclif/plugin-help@^2": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-2.2.3.tgz#b993041e92047f0e1762668aab04d6738ac06767" - integrity sha512-bGHUdo5e7DjPJ0vTeRBMIrfqTRDBfyR5w0MP41u0n3r7YG5p14lvMmiCXxi6WDaP2Hw5nqx3PnkAIntCKZZN7g== +"@oclif/plugin-help@5.1.12": + version "5.1.12" + resolved "https://registry.yarnpkg.com/@oclif/plugin-help/-/plugin-help-5.1.12.tgz#24a18631eb9b22cb55e1a3b8e4f6039fd42727e6" + integrity sha512-HvH/RubJxqCinP0vUWQLTOboT+SfjfL8h40s+PymkWaldIcXlpoRaJX50vz+SjZIs7uewZwEk8fzLqpF/BWXlg== dependencies: - "@oclif/command" "^1.5.13" - chalk "^2.4.1" - indent-string "^4.0.0" - lodash.template "^4.4.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - widest-line "^2.0.1" - wrap-ansi "^4.0.0" + "@oclif/core" "^1.3.6" -"@oclif/screen@^1.0.3": +"@oclif/screen@^1.0.4": version "1.0.4" resolved "https://registry.yarnpkg.com/@oclif/screen/-/screen-1.0.4.tgz#b740f68609dfae8aa71c3a6cab15d816407ba493" integrity sha512-60CHpq+eqnTxLZQ4PGHYNwUX572hgpMHGPtTWMjdTMsAvlm69lZV/4ly6O3sAYkomo4NggGcomrDpBe34rxUqw== +"@oclif/screen@^3.0.2": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@oclif/screen/-/screen-3.0.2.tgz#969054308fe98d130c02844a45cc792199b75670" + integrity sha512-S/SF/XYJeevwIgHFmVDAFRUvM3m+OjhvCAYMk78ZJQCYCQ5wS7j+LTt1ZEv2jpEEGg2tx/F6TYYWxddNAYHrFQ== + "@oclif/test@1.2.8": version "1.2.8" resolved "https://registry.yarnpkg.com/@oclif/test/-/test-1.2.8.tgz#a5b2ebd747832217d9af65ac30b58780c4c17c5e" @@ -3304,11 +3340,6 @@ resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.8.tgz#c8d645506db0d15f4aafd4dfa873f443ad87ea59" integrity sha512-U1bQiWbln41Yo6EeHMr+34aUhvrMVyrhn9lYfPSpLTCrZlGxU4Rtn1bocX+0p2Fc/Jkd2FanCEXdw0WNfHHM0w== -"@types/color-name@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" - integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== - "@types/component-emitter@*": version "1.2.7" resolved "https://registry.yarnpkg.com/@types/component-emitter/-/component-emitter-1.2.7.tgz#d49a2c65a89c8b594e7355a92e43cf1d278b577c" @@ -3333,11 +3364,18 @@ dependencies: "@types/express" "*" -"@types/debug@*", "@types/debug@4.1.5": +"@types/debug@*": version "4.1.5" resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd" integrity sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ== +"@types/debug@4.1.7": + version "4.1.7" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" + integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg== + dependencies: + "@types/ms" "*" + "@types/diff@*": version "5.0.0" resolved "https://registry.yarnpkg.com/@types/diff/-/diff-5.0.0.tgz#eb71e94feae62548282c4889308a3dfb57e36020" @@ -3677,6 +3715,11 @@ resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-7.0.1.tgz#5d7ec2a789a1f77c59b7ad071b9d50bf1abbfc9e" integrity sha512-L/Nw/2e5KUaprNJoRA33oly+M8X8n0K+FwLTbYqwTcR14wdPWeRkigBLfSFpN/Asf9ENZTMZwLxjtjeYucAA4Q== +"@types/ms@*": + version "0.7.31" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" + integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== + "@types/node@*": version "14.14.35" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.35.tgz#42c953a4e2b18ab931f72477e7012172f4ffa313" @@ -3697,6 +3740,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.6.tgz#7b73cce37352936e628c5ba40326193443cfba25" integrity sha512-sRVq8d+ApGslmkE9e3i+D3gFGk7aZHAT+G4cIpIEdLJYPsWiSPwcAnJEjddLQQDqV3Ra2jOclX/Sv6YrvGYiWA== +"@types/node@16.11.26": + version "16.11.26" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.26.tgz#63d204d136c9916fb4dcd1b50f9740fe86884e47" + integrity sha512-GZ7bu5A6+4DtG7q9GsoHXy3ALcgeIHP4NnL0Vv2wu0uUB/yQex26v0tf6/na1mm0+bS9Uw+0DFex7aaKr2qawQ== + "@types/node@^13.7.0": version "13.13.9" resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.9.tgz#79df4ae965fb76d31943b54a6419599307a21394" @@ -4452,6 +4500,14 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: mime-types "~2.1.24" negotiator "0.6.2" +accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + acorn-globals@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" @@ -4583,7 +4639,7 @@ ajv@8.1.0, ajv@^8.0.0: require-from-string "^2.0.2" uri-js "^4.2.2" -ajv@^6.1.0, ajv@^6.12.3, ajv@^6.12.5: +ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.5: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -4593,16 +4649,6 @@ ajv@^6.1.0, ajv@^6.12.3, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^6.10.0, ajv@^6.10.2: - version "6.12.2" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd" - integrity sha512-k+V+hzjm5q/Mr8ef/1Y9goCmlsK4I6Sm74teeyGvFk1XrOsbsKLjEdrvny42CZ+a8sXbk8KWpY/bDwS+FLL2UQ== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - ajv@^6.12.4: version "6.12.4" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.4.tgz#0614facc4522127fa713445c6bfd3ebd376e2234" @@ -4662,6 +4708,13 @@ ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: dependencies: type-fest "^0.11.0" +ansi-escapes@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + ansi-html@0.0.7, ansi-html@^0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e" @@ -4682,7 +4735,7 @@ ansi-regex@^4.0.0, ansi-regex@^4.1.0: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== -ansi-regex@^5.0.0: +ansi-regex@^5.0.0, ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== @@ -4699,15 +4752,7 @@ ansi-styles@^3.2.0, ansi-styles@^3.2.1: dependencies: color-convert "^1.9.0" -ansi-styles@^4.0.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359" - integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA== - dependencies: - "@types/color-name" "^1.1.1" - color-convert "^2.0.1" - -ansi-styles@^4.1.0: +ansi-styles@^4.0.0, ansi-styles@^4.1.0, ansi-styles@^4.2.0, ansi-styles@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== @@ -5029,6 +5074,13 @@ axios@0.21.1: dependencies: follow-redirects "^1.10.0" +axios@0.26.1: + version "0.26.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9" + integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA== + dependencies: + follow-redirects "^1.14.8" + axios@^0.19.2: version "0.19.2" resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.2.tgz#3ea36c5d8818d0d5f8a8a97a6d36b86cdc00cb27" @@ -5322,10 +5374,10 @@ bip39@*: pbkdf2 "^3.0.9" randombytes "^2.0.1" -bip39@3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/bip39/-/bip39-3.0.3.tgz#4a8b79067d6ed2e74f9199ac994a2ab61b176760" - integrity sha512-P0dKrz4g0V0BjXfx7d9QNkJ/Txcz/k+hM9TnjqjUaXtuOfAvxXSw2rJw8DX0e3ZPwnK/IgDxoRqf0bvoVCqbMg== +bip39@3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/bip39/-/bip39-3.0.4.tgz#5b11fed966840b5e1b8539f0f54ab6392969b2a0" + integrity sha512-YZKQlb752TrUWqHWj7XAwCSjYEgGAk+/Aas3V7NyjQeZYsztO8JnQUaCWhcnL4T+jL8nvB8typ2jRPzTlgugNw== dependencies: "@types/node" "11.11.6" create-hash "^1.1.0" @@ -5370,6 +5422,22 @@ body-parser@1.19.0: raw-body "2.4.0" type-is "~1.6.17" +body-parser@1.19.2: + version "1.19.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.2.tgz#4714ccd9c157d44797b8b5607d72c0b89952f26e" + integrity sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "~1.1.2" + http-errors "1.8.1" + iconv-lite "0.4.24" + on-finished "~2.3.0" + qs "6.9.7" + raw-body "2.4.3" + type-is "~1.6.18" + bonjour@^3.5.0: version "3.5.0" resolved "https://registry.yarnpkg.com/bonjour/-/bonjour-3.5.0.tgz#8e890a183d8ee9a2393b3844c691a42bcf7bc9f5" @@ -5712,6 +5780,11 @@ bytes@3.1.0: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + cacache@^12.0.2: version "12.0.4" resolved "https://registry.yarnpkg.com/cacache/-/cacache-12.0.4.tgz#668bcbd105aeb5f1d92fe25570ec9525c8faa40c" @@ -5981,6 +6054,14 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + char-regex@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" @@ -6035,7 +6116,7 @@ chokidar@^2.1.8: optionalDependencies: fsevents "^1.2.7" -chownr@^1.1.1, chownr@^1.1.3, chownr@^1.1.4: +chownr@^1.1.1, chownr@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== @@ -6087,17 +6168,12 @@ clean-css@^4.2.3: dependencies: source-map "~0.6.0" -clean-stack@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-1.3.0.tgz#9e821501ae979986c46b1d66d2d432db2fd4ae31" - integrity sha1-noIVAa6XmYbEax1m0tQy2y/UrjE= - clean-stack@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -clean-stack@^3.0.0: +clean-stack@^3.0.0, clean-stack@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-3.0.1.tgz#155bf0b2221bf5f4fba89528d24c5953f17fe3a8" integrity sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg== @@ -6125,6 +6201,13 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-progress@^3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.10.0.tgz#63fd9d6343c598c93542fdfa3563a8b59887d78a" + integrity sha512-kLORQrhYCAtUPLZxqsAt2YJGOvRdt34+O6jl5cQGb7iF3dM55FQZlTR+rQyIK9JUcO9bBMwZsTlND+3dmFU2Cw== + dependencies: + string-width "^4.2.0" + cli-progress@^3.4.0: version "3.6.0" resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.6.0.tgz#20317e6a653c3e5636fb5f03a7d67cd48ebc215a" @@ -6167,37 +6250,37 @@ cli-truncate@^2.1.0: slice-ansi "^3.0.0" string-width "^4.2.0" -cli-ux@^5.2.1: - version "5.4.5" - resolved "https://registry.yarnpkg.com/cli-ux/-/cli-ux-5.4.5.tgz#1b9e6648754307a1fa59a0c5a9c6be0ed899c2cd" - integrity sha512-5A6FuU0wPUlfCWUjtizUvNIbXElp6jN9QUJsDibs6F9cVX1kTgaMR3m6KT0R3iriEXpMrmPKV6yYS8XICNuQ6Q== +cli-ux@5.6.7: + version "5.6.7" + resolved "https://registry.yarnpkg.com/cli-ux/-/cli-ux-5.6.7.tgz#32ef9e6cb2b457be834280cc799028a11c8235a8" + integrity sha512-dsKAurMNyFDnO6X1TiiRNiVbL90XReLKcvIq4H777NMqXGBxBws23ag8ubCJE97vVZEgWG2eSUhsyLf63Jv8+g== dependencies: - "@oclif/command" "^1.5.1" - "@oclif/errors" "^1.2.1" + "@oclif/command" "^1.8.15" + "@oclif/errors" "^1.3.5" "@oclif/linewrap" "^1.0.0" - "@oclif/screen" "^1.0.3" - ansi-escapes "^3.1.0" - ansi-styles "^3.2.1" + "@oclif/screen" "^1.0.4" + ansi-escapes "^4.3.0" + ansi-styles "^4.2.0" cardinal "^2.1.1" - chalk "^2.4.1" - clean-stack "^2.0.0" + chalk "^4.1.0" + clean-stack "^3.0.0" cli-progress "^3.4.0" - extract-stack "^1.0.0" - fs-extra "^7.0.1" + extract-stack "^2.0.0" + fs-extra "^8.1" hyperlinker "^1.0.0" indent-string "^4.0.0" - is-wsl "^1.1.0" + is-wsl "^2.2.0" js-yaml "^3.13.1" - lodash "^4.17.11" + lodash "^4.17.21" natural-orderby "^2.0.1" + object-treeify "^1.1.4" password-prompt "^1.1.2" - semver "^5.6.0" - string-width "^3.1.0" - strip-ansi "^5.1.0" - supports-color "^5.5.0" - supports-hyperlinks "^1.0.1" - treeify "^1.1.0" - tslib "^1.9.3" + semver "^7.3.2" + string-width "^4.2.0" + strip-ansi "^6.0.0" + supports-color "^8.1.0" + supports-hyperlinks "^2.1.0" + tslib "^2.0.0" cli-width@^3.0.0: version "3.0.0" @@ -6548,6 +6631,13 @@ content-disposition@0.5.3: dependencies: safe-buffer "5.1.2" +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + content-type@^1.0.4, content-type@~1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" @@ -6665,6 +6755,11 @@ cookie@0.4.0: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== +cookie@0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + copy-concurrently@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/copy-concurrently/-/copy-concurrently-1.0.5.tgz#92297398cae34937fcafd6ec8139c18051f0b5e0" @@ -7207,6 +7302,13 @@ debug@4, debug@4.3.1, debug@^4.0.0, debug@^4.2.0, debug@^4.3.1: dependencies: ms "2.1.2" +debug@4.3.4, debug@^4.3.3: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + debug@=3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" @@ -8295,10 +8397,10 @@ event-stream@=3.3.4: stream-combiner "~0.0.4" through "~2.3.1" -eventemitter2@6.4.4: - version "6.4.4" - resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-6.4.4.tgz#aa96e8275c4dbeb017a5d0e03780c65612a1202b" - integrity sha512-HLU3NDY6wARrLCEwyGKRBvuWYyvW6mHYv72SJJAH3iJN3a6eVUvkjFkcxah1bcTgGVBBrFdIopBJPhCQFMLyXw== +eventemitter2@6.4.5: + version "6.4.5" + resolved "https://registry.yarnpkg.com/eventemitter2/-/eventemitter2-6.4.5.tgz#97380f758ae24ac15df8353e0cc27f8b95644655" + integrity sha512-bXE7Dyc1i6oQElDG0jMRZJrRAn9QR2xyyFGmBdZleNmyQX0FqGYmhZIrIrpPfm/w//LTo4tVQGOGQcGCb5q9uw== eventemitter2@^6.4.2: version "6.4.3" @@ -8498,7 +8600,43 @@ express-rate-limit@5.1.3: resolved "https://registry.yarnpkg.com/express-rate-limit/-/express-rate-limit-5.1.3.tgz#656bacce3f093034976346958a0f0199902c9174" integrity sha512-TINcxve5510pXj4n9/1AMupkj3iWxl3JuZaWhCdYDlZeoCPqweGZrxbrlqTCFb1CT5wli7s8e2SH/Qz2c9GorA== -express@4.17.1, express@^4.17.1: +express@4.17.3: + version "4.17.3" + resolved "https://registry.yarnpkg.com/express/-/express-4.17.3.tgz#f6c7302194a4fb54271b73a1fe7a06478c8f85a1" + integrity sha512-yuSQpz5I+Ch7gFrPCk4/c+dIBKlQUxtgwqzph132bsT6qhuzss6I8cLJQz7B3rFblzd6wtcI0ZbGltH/C4LjUg== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.19.2" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.4.2" + cookie-signature "1.0.6" + debug "2.6.9" + depd "~1.1.2" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "~1.1.2" + fresh "0.5.2" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "~2.3.0" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.9.7" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.17.2" + serve-static "1.14.2" + setprototypeof "1.2.0" + statuses "~1.5.0" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== @@ -8584,10 +8722,10 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extract-stack@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/extract-stack/-/extract-stack-1.0.0.tgz#b97acaf9441eea2332529624b732fc5a1c8165fa" - integrity sha1-uXrK+UQe6iMyUpYktzL8WhyBZfo= +extract-stack@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/extract-stack/-/extract-stack-2.0.0.tgz#11367bc865bfcd9bc0db3123e5edb57786f11f9b" + integrity sha512-AEo4zm+TenK7zQorGK1f9mJ8L14hnTDi2ZQPR+Mub1NX8zimka1mXpV5LpH8x9HoUmFSHZCfLHqWvp0Y4FxxzQ== extract-zip@^1.7.0: version "1.7.0" @@ -8668,6 +8806,17 @@ fast-glob@^3.2.4: micromatch "^4.0.2" picomatch "^2.2.1" +fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" @@ -8923,10 +9072,10 @@ follow-redirects@1.5.10: dependencies: debug "=3.1.0" -follow-redirects@^1.0.0, follow-redirects@^1.10.0: - version "1.14.7" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.7.tgz#2004c02eb9436eee9a21446a6477debf17e81685" - integrity "sha1-IATALrlDbu6aIURqZHfevxfoFoU= sha512-+hbxoLbFMbRKDwohX8GkTataGqO6Jb7jGwpAlwgy2bIz25XtRm7KEzJM76R1WiNT5SwZkX4Y75SwBolkpmE7iQ==" +follow-redirects@^1.0.0, follow-redirects@^1.10.0, follow-redirects@^1.14.8: + version "1.14.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7" + integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w== for-in@^1.0.2: version "1.0.2" @@ -8960,6 +9109,11 @@ form-data@~2.3.2: combined-stream "^1.0.6" mime-types "^2.1.12" +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + forwarded@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" @@ -9014,7 +9168,7 @@ fs-extra@^6.0.1: jsonfile "^4.0.0" universalify "^0.1.0" -fs-extra@^7.0.0, fs-extra@^7.0.1: +fs-extra@^7.0.0: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== @@ -9142,6 +9296,11 @@ get-own-enumerable-property-symbols@^3.0.0: resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + get-pkg-repo@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz#c73b489c06d80cc5536c2c853f9e05232056972d" @@ -9277,7 +9436,7 @@ glob-parent@^3.1.0: is-glob "^3.1.0" path-dirname "^1.0.0" -glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@~5.1.0: +glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -9387,6 +9546,18 @@ globby@^11.0.2: merge2 "^1.3.0" slash "^3.0.0" +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + globby@^6.1.0: version "6.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" @@ -9398,11 +9569,16 @@ globby@^6.1.0: pify "^2.0.0" pinkie-promise "^2.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.5, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.2: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.5, graceful-fs@^4.2.2: version "4.2.6" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== +graceful-fs@^4.1.6, graceful-fs@^4.2.0: + version "4.2.9" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96" + integrity sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ== + graceful-fs@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" @@ -9483,11 +9659,6 @@ has-bigints@^1.0.0: resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== -has-flag@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" - integrity sha1-6CB68cx7MNRGzHC3NLXovhj4jVE= - has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" @@ -9737,6 +9908,17 @@ http-errors@1.7.2: statuses ">= 1.5.0 < 2" toidentifier "1.0.0" +http-errors@1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c" + integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g== + dependencies: + depd "~1.1.2" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.1" + http-errors@~1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" @@ -9905,6 +10087,11 @@ ignore@^5.1.4: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + immediate@^3.2.3: version "3.2.3" resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.2.3.tgz#d140fa8f614659bd6541233097ddaac25cdd991c" @@ -9981,7 +10168,7 @@ indent-string@^2.1.0: dependencies: repeating "^2.0.0" -indent-string@^3.0.0, indent-string@^3.2.0: +indent-string@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" integrity sha1-Sl/W0nzDMvN+VBmlBNu4NxBckok= @@ -11297,7 +11484,7 @@ jquery@^3.4.0: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^3.13.0, js-yaml@^3.13.1: +js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.14.1: version "3.14.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -11986,7 +12173,7 @@ lodash.sortby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash.template@^4.4.0, lodash.template@^4.5.0: +lodash.template@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab" integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A== @@ -12317,7 +12504,7 @@ merge-stream@^2.0.0: resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -merge2@^1.2.3: +merge2@^1.2.3, merge2@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== @@ -12364,6 +12551,14 @@ micromatch@^4.0.2: braces "^3.0.1" picomatch "^2.0.5" +micromatch@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" + integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + dependencies: + braces "^3.0.1" + picomatch "^2.2.3" + miller-rabin@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" @@ -12382,6 +12577,11 @@ mime-db@1.46.0, "mime-db@>= 1.43.0 < 2": resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.46.0.tgz#6267748a7f799594de3cbc8cde91def349661cee" integrity sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ== +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + mime-types@^2.1.12, mime-types@^2.1.27, mime-types@~2.1.17, mime-types@~2.1.19: version "2.1.29" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.29.tgz#1d4ab77da64b91f5f72489df29236563754bb1b2" @@ -12396,6 +12596,13 @@ mime-types@~2.1.24: dependencies: mime-db "1.44.0" +mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + mime@1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" @@ -12557,14 +12764,6 @@ minizlib@^2.0.0, minizlib@^2.1.1: minipass "^3.0.0" yallist "^4.0.0" -minizlib@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.0.tgz#fd52c645301ef09a63a2c209697c294c6ce02cf3" - integrity sha512-EzTZN/fjSvifSX0SlqUERCN39o6T40AMarPbv0MrarSFtIITCBh7bi+dU8nxGFHuqs9jdIAeoYoKuQAAASsPPA== - dependencies: - minipass "^3.0.0" - yallist "^4.0.0" - mississippi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022" @@ -12663,7 +12862,7 @@ moment@^2.19.3: resolved "https://registry.yarnpkg.com/moment/-/moment-2.27.0.tgz#8bff4e3e26a236220dfe3e36de756b6ebaa0105d" integrity sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ== -moment@^2.22.1, moment@^2.27.0: +moment@^2.27.0: version "2.29.1" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== @@ -12695,7 +12894,7 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.0.0, ms@^2.1.1: +ms@2.1.3, ms@^2.0.0, ms@^2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -12787,7 +12986,7 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= -natural-orderby@^2.0.1: +natural-orderby@^2.0.1, natural-orderby@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/natural-orderby/-/natural-orderby-2.0.3.tgz#8623bc518ba162f8ff1cdb8941d74deb0fdcc016" integrity sha512-p7KTHxU0CUrcOXe62Zfrb5Z13nLvPhSWR/so3kFulUQU0sgUll2Z0LwpsLN351eOOD+hRGu/F1g+6xDfPeD++Q== @@ -12802,6 +13001,11 @@ negotiator@0.6.2, negotiator@^0.6.2: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + neo-async@^2.5.0, neo-async@^2.6.0, neo-async@^2.6.1, neo-async@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" @@ -12826,9 +13030,11 @@ no-case@^3.0.4: tslib "^2.0.3" node-fetch@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" - integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" node-forge@^0.10.0: version "0.10.0" @@ -13189,6 +13395,11 @@ object-keys@^1.0.12, object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== +object-treeify@^1.1.33, object-treeify@^1.1.4: + version "1.1.33" + resolved "https://registry.yarnpkg.com/object-treeify/-/object-treeify-1.1.33.tgz#f06fece986830a3cba78ddd32d4c11d1f76cdf40" + integrity sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A== + object-visit@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" @@ -13864,6 +14075,11 @@ picomatch@^2.2.1, picomatch@^2.2.2: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== +picomatch@^2.2.3: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + pify@^2.0.0, pify@^2.2.0, pify@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" @@ -14796,6 +15012,14 @@ prompts@2.4.0: kleur "^3.0.3" sisteransi "^1.0.5" +prompts@2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + prompts@^2.0.1: version "2.3.1" resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.3.1.tgz#b63a9ce2809f106fa9ae1277c275b167af46ea05" @@ -14857,6 +15081,14 @@ proxy-addr@~2.0.5: forwarded "~0.1.2" ipaddr.js "1.9.1" +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -14960,6 +15192,11 @@ qs@6.7.0: resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== +qs@6.9.7: + version "6.9.7" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.7.tgz#4610846871485e1e048f44ae3b94033f0e675afe" + integrity sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw== + qs@^6.9.4: version "6.10.1" resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.1.tgz#4931482fa8d647a5aab799c5271d2133b981fb6a" @@ -15078,6 +15315,16 @@ raw-body@2.4.0: iconv-lite "0.4.24" unpipe "1.0.0" +raw-body@2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.3.tgz#8f80305d11c2a0a545c2d9d89d7a0286fcead43c" + integrity sha512-UlTNLIcu0uzb4D2f4WltY6cVjLi+/jEN4lgEUj3E04tpMDpUlkBo/eSn6zou9hum2VMNpCCUone0O0WeJim07g== + dependencies: + bytes "3.1.2" + http-errors "1.8.1" + iconv-lite "0.4.24" + unpipe "1.0.0" + react-app-polyfill@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-2.0.0.tgz#a0bea50f078b8a082970a9d853dc34b6dcc6a3cf" @@ -15938,7 +16185,7 @@ safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -16157,6 +16404,25 @@ send@0.17.1: range-parser "~1.2.1" statuses "~1.5.0" +send@0.17.2: + version "0.17.2" + resolved "https://registry.yarnpkg.com/send/-/send-0.17.2.tgz#926622f76601c41808012c8bf1688fe3906f7820" + integrity sha512-UJYB6wFSJE3G00nEivR5rgWp8c2xXvJ3OPWPhmuteU0IKj8nKbG3DrjiOmLwpnHGYWAVwA69zmTm++YG0Hmwww== + dependencies: + debug "2.6.9" + depd "~1.1.2" + destroy "~1.0.4" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "1.8.1" + mime "1.6.0" + ms "2.1.3" + on-finished "~2.3.0" + range-parser "~1.2.1" + statuses "~1.5.0" + serialize-javascript@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" @@ -16194,6 +16460,16 @@ serve-static@1.14.1: parseurl "~1.3.3" send "0.17.1" +serve-static@1.14.2: + version "1.14.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.2.tgz#722d6294b1d62626d41b43a013ece4598d292bfa" + integrity sha512-+TMNA9AFxUEGuC0z2mevogSnn9MXKb4fa7ngeRMJaaGv8vTwnIEkKi+QGvPt33HSnf8pRS+WGM0EbMtCJLKMBQ== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.17.2" + set-blocking@^2.0.0, set-blocking@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" @@ -16224,6 +16500,11 @@ setprototypeof@1.1.1: resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + sha.js@^2.4.0, sha.js@^2.4.8, sha.js@~2.4.4: version "2.4.11" resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" @@ -16725,10 +17006,10 @@ stackframe@^1.1.1: resolved "https://registry.yarnpkg.com/stackframe/-/stackframe-1.2.0.tgz#52429492d63c62eb989804c11552e3d22e779303" integrity sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA== -stampit@4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/stampit/-/stampit-4.3.1.tgz#90d813671af18f9fc9dcd6816085d7f99174a454" - integrity sha512-pcCXPy7VUXsxKE+oN2xroBmaQRGIIaRSIeD+HICoZb8w9CS5ojGmKPp3OfkXnO4D9UklcKCm9WmkAvAdrrlfZg== +stampit@4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/stampit/-/stampit-4.3.2.tgz#cfd3f607dd628a161ce6305621597994b4d56573" + integrity sha512-pE2org1+ZWQBnIxRPrBM2gVupkuDD0TTNIo1H6GdT/vO82NXli2z8lRE8cu/nBIHrcOCXFBAHpb9ZldrB2/qOA== start-server-and-test@1.11.6: version "1.11.6" @@ -16897,7 +17178,16 @@ string-width@^4.0.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" -string-width@^4.1.0, string-width@^4.2.0: +string-width@^4.1.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== @@ -17013,7 +17303,7 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@*, strip-ansi@6.0.0, strip-ansi@^6.0.0: +strip-ansi@*, strip-ansi@6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== @@ -17041,6 +17331,13 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-bom-buf@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-bom-buf/-/strip-bom-buf-1.0.0.tgz#1cb45aaf57530f4caf86c7f75179d2c9a51dd572" @@ -17158,7 +17455,7 @@ supports-color@^2.0.0: resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= -supports-color@^5.0.0, supports-color@^5.3.0, supports-color@^5.5.0: +supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== @@ -17186,13 +17483,12 @@ supports-color@^7.1.0, supports-color@^7.2.0: dependencies: has-flag "^4.0.0" -supports-hyperlinks@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz#71daedf36cc1060ac5100c351bb3da48c29c0ef7" - integrity sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw== +supports-color@^8.1.0, supports-color@^8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: - has-flag "^2.0.0" - supports-color "^5.0.0" + has-flag "^4.0.0" supports-hyperlinks@^2.0.0: version "2.1.0" @@ -17202,6 +17498,14 @@ supports-hyperlinks@^2.0.0: has-flag "^4.0.0" supports-color "^7.0.0" +supports-hyperlinks@^2.1.0, supports-hyperlinks@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + svg-parser@^2.0.2: version "2.0.4" resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" @@ -17279,15 +17583,15 @@ tar-stream@^2.0.0: inherits "^2.0.3" readable-stream "^3.1.1" -tar@6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.0.1.tgz#7b3bd6c313cb6e0153770108f8d70ac298607efa" - integrity sha512-bKhKrrz2FJJj5s7wynxy/fyxpE0CmCjmOQ1KV4KkgXFWOgoIT/NbTMnB1n+LFNrNk0SSBVGGxcK5AGsyC+pW5Q== +tar@6.1.11, tar@^6.0.2, tar@^6.1.0: + version "6.1.11" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" + integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== dependencies: - chownr "^1.1.3" + chownr "^2.0.0" fs-minipass "^2.0.0" minipass "^3.0.0" - minizlib "^2.1.0" + minizlib "^2.1.1" mkdirp "^1.0.3" yallist "^4.0.0" @@ -17304,18 +17608,6 @@ tar@^4.4.12: safe-buffer "^5.2.1" yallist "^3.1.1" -tar@^6.0.2, tar@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.0.tgz#d1724e9bcc04b977b18d5c573b333a2207229a83" - integrity sha512-DUCttfhsnLCjwoDoFcI+B2iJgYa93vBnDUATYEeRx6sntCTdN01VnqsIuTlALXla/LWooNg0yEGeB+Y8WdFxGA== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^3.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - temp-dir@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" @@ -17567,6 +17859,11 @@ toidentifier@1.0.0: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + tough-cookie@^2.3.3, tough-cookie@~2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" @@ -17591,10 +17888,10 @@ tr46@^2.0.2: dependencies: punycode "^2.1.1" -treeify@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/treeify/-/treeify-1.1.0.tgz#4e31c6a463accd0943879f30667c4fdaff411bb8" - integrity sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A== +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= treeverse@^1.0.4: version "1.0.4" @@ -17612,9 +17909,9 @@ trim-newlines@^3.0.0: integrity "sha1-Jgpdli2LdSQlsy86fbDcrNF2wUQ= sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==" trim-off-newlines@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.1.tgz#9f9ba9d9efa8764c387698bcbfeb2c848f11adb3" - integrity sha1-n5up2e+odkw4dpi8v+sshI8RrbM= + version "1.0.3" + resolved "https://registry.yarnpkg.com/trim-off-newlines/-/trim-off-newlines-1.0.3.tgz#8df24847fcb821b0ab27d58ab6efec9f2fe961a1" + integrity sha512-kh6Tu6GbeSNMGfrrZh6Bb/4ZEHV1QlB4xNDBeog8Y9/QwFlKTRyWvY3Fs9tRDAMZliVUwieMgEdIeL/FtqjkJg== tryer@^1.0.1: version "1.0.1" @@ -17709,6 +18006,11 @@ tslib@^2.0.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== +tslib@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" + integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== + tsutils@^3.17.1: version "3.17.1" resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.17.1.tgz#ed719917f11ca0dee586272b2ac49e015a2dd759" @@ -17782,6 +18084,11 @@ type-fest@^0.20.2: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + type-fest@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" @@ -18152,10 +18459,10 @@ validate-npm-package-name@^3.0.0: dependencies: builtins "^1.0.3" -validator@13.5.2: - version "13.5.2" - resolved "https://registry.yarnpkg.com/validator/-/validator-13.5.2.tgz#c97ae63ed4224999fb6f42c91eaca9567fe69a46" - integrity sha512-mD45p0rvHVBlY2Zuy3F3ESIe1h5X58GPfAtslBjY7EtTqGquZTj+VX/J4RnHWN8FKq0C9WRVt1oWAcytWRuYLQ== +validator@13.7.0: + version "13.7.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" + integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw== value-equal@^1.0.1: version "1.0.1" @@ -18290,6 +18597,11 @@ web-vitals@^1.0.1: resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-1.1.0.tgz#7f410d9a1f7a1cd5d952806b45776204b47dc274" integrity sha512-1cx54eRxY/+M0KNKdNpNnuXAXG+vJEvwScV4DiV9rOYDguHoeDIzm09ghBohOPtkqPO5OtPC14FWkNva3SDisg== +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + webidl-conversions@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" @@ -18443,6 +18755,14 @@ whatwg-mimetype@^2.3.0: resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + whatwg-url@^8.0.0: version "8.1.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.1.0.tgz#c628acdcf45b82274ce7281ee31dd3c839791771" @@ -18506,13 +18826,6 @@ wide-align@^1.1.0: dependencies: string-width "^1.0.2 || 2" -widest-line@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc" - integrity sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA== - dependencies: - string-width "^2.1.1" - widest-line@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" @@ -18705,15 +19018,6 @@ wrap-ansi@^3.0.1: string-width "^2.1.1" strip-ansi "^4.0.0" -wrap-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-4.0.0.tgz#b3570d7c70156159a2d42be5cc942e957f7b1131" - integrity sha512-uMTsj9rDb0/7kk1PbcbCcwvHUxp60fGDB/NNXpVa0Q+ic/e7y5+BwTxKfQ33VYgDppSwi/FBzpetYzo8s6tfbg== - dependencies: - ansi-styles "^3.2.0" - string-width "^2.1.1" - strip-ansi "^4.0.0" - wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" @@ -18810,6 +19114,11 @@ ws@7.4.6, ws@^7.2.3: resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== +ws@8.5.0: + version "8.5.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" + integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== + ws@^6.2.1: version "6.2.1" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" @@ -18934,10 +19243,10 @@ yargs@^16.1.0, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -yarn@1.22.10: - version "1.22.10" - resolved "https://registry.yarnpkg.com/yarn/-/yarn-1.22.10.tgz#c99daa06257c80f8fa2c3f1490724e394c26b18c" - integrity sha512-IanQGI9RRPAN87VGTF7zs2uxkSyQSrSPsju0COgbsKQOOXr5LtcVPeyXWgwVa0ywG3d8dg6kSYKGBuYK021qeA== +yarn@1.22.17: + version "1.22.17" + resolved "https://registry.yarnpkg.com/yarn/-/yarn-1.22.17.tgz#bf910747d22497b573131f7341c0e1d15c74036c" + integrity sha512-H0p241BXaH0UN9IeH//RT82tl5PfNraVpSpEoW+ET7lmopNC61eZ+A+IDvU8FM6Go5vx162SncDL8J1ZjRBriQ== yauzl@^2.10.0: version "2.10.0" From 2e73ed0ecce2ec0ab7316dccad7428ea6c908761 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 17 Mar 2022 19:09:45 +0100 Subject: [PATCH 004/170] :recycle: Move jenkins files --- Jenkinsfile.audit => .jenkins/Jenkinsfile.audit | 0 Jenkinsfile.sdk => .jenkins/Jenkinsfile.test | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename Jenkinsfile.audit => .jenkins/Jenkinsfile.audit (100%) rename Jenkinsfile.sdk => .jenkins/Jenkinsfile.test (100%) diff --git a/Jenkinsfile.audit b/.jenkins/Jenkinsfile.audit similarity index 100% rename from Jenkinsfile.audit rename to .jenkins/Jenkinsfile.audit diff --git a/Jenkinsfile.sdk b/.jenkins/Jenkinsfile.test similarity index 100% rename from Jenkinsfile.sdk rename to .jenkins/Jenkinsfile.test From 25b9d79d57efcd8c336411d4c18147c8ac256926 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 17 Mar 2022 20:36:31 +0100 Subject: [PATCH 005/170] :recycle: Update nvmrc --- .nvmrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.nvmrc b/.nvmrc index 9293735bb17..d5d3b29a47a 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -12.22.6 +16.14.1 From 964c854a5c4f208564380a75bc9f8642d9f9b3ab Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 18 Mar 2022 09:30:17 +0100 Subject: [PATCH 006/170] :recycle: Fix fs.write to use string --- framework/src/application.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/src/application.ts b/framework/src/application.ts index c986376cf66..8e12e28269c 100644 --- a/framework/src/application.ts +++ b/framework/src/application.ts @@ -523,7 +523,7 @@ export class Application { throw new DuplicateAppInstanceError(this.config.label, pidPath); } } - await fs.writeFile(pidPath, process.pid); + await fs.writeFile(pidPath, process.pid.toString()); } private _clearControllerPidFile() { From 0dd77c14ab49c6a484b1041bd65c702bf51fb5d7 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 18 Mar 2022 09:33:08 +0100 Subject: [PATCH 007/170] :recycle: Fix types on test --- .../integration/controller/ipc/ipc_client.spec.ts | 12 ++++++------ framework/test/unit/application.spec.ts | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/framework/test/integration/controller/ipc/ipc_client.spec.ts b/framework/test/integration/controller/ipc/ipc_client.spec.ts index c5120126fc7..1aabd5946e4 100644 --- a/framework/test/integration/controller/ipc/ipc_client.spec.ts +++ b/framework/test/integration/controller/ipc/ipc_client.spec.ts @@ -103,7 +103,7 @@ describe('IPCClient', () => { it('should be able to subscribe and receive event', async () => { // Act & Assert await new Promise(resolve => { - client.subSocket.on('message', data => { + client.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); @@ -119,14 +119,14 @@ describe('IPCClient', () => { server.pubSocket.send('myData'); await Promise.all([ new Promise(resolve => { - client.subSocket.on('message', data => { + client.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); }), await new Promise(resolve => { - client2.subSocket.on('message', data => { + client2.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); @@ -143,14 +143,14 @@ describe('IPCClient', () => { client.pubSocket.send('myData'); await Promise.all([ new Promise(resolve => { - client2.subSocket.on('message', data => { + client2.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); }), await new Promise(resolve => { - client3.subSocket.on('message', data => { + client3.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); @@ -162,7 +162,7 @@ describe('IPCClient', () => { // Act & Assert client.pubSocket.send('myData'); await new Promise(resolve => { - client.subSocket.on('message', data => { + client.subSocket.on('message', (data: string) => { expect(data).toEqual('myData'); resolve(); }); diff --git a/framework/test/unit/application.spec.ts b/framework/test/unit/application.spec.ts index bd7e54735b3..3aa4fa1a09a 100644 --- a/framework/test/unit/application.spec.ts +++ b/framework/test/unit/application.spec.ts @@ -666,7 +666,7 @@ describe('Application', () => { expect(fs.writeFile).toHaveBeenCalledWith( // eslint-disable-next-line @typescript-eslint/restrict-template-expressions `${dirs.pids}/controller.pid`, - expect.toBeNumber(), + expect.toBeString(), ); }); }); From 503e6e7674eaf4694fe9aafc3b6c7a9e74173544 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 18 Mar 2022 17:56:51 +0100 Subject: [PATCH 008/170] Change ws version and fix commander tests --- .../src/bootstrapping/commands/config/show.ts | 2 +- .../commands/transaction/create.ts | 8 ++++---- .../bootstrapping/commands/transaction/sign.ts | 8 ++++---- elements/lisk-api-client/package.json | 2 +- .../test/integration/ws_channel.spec.ts | 3 ++- elements/lisk-client/package.json | 2 +- framework/package.json | 2 +- yarn.lock | 18 +++++++++--------- 8 files changed, 23 insertions(+), 22 deletions(-) diff --git a/commander/src/bootstrapping/commands/config/show.ts b/commander/src/bootstrapping/commands/config/show.ts index 05586ef82d9..aa616d56545 100644 --- a/commander/src/bootstrapping/commands/config/show.ts +++ b/commander/src/bootstrapping/commands/config/show.ts @@ -33,7 +33,7 @@ export class ShowCommand extends Command { 'config:show --config ./custom-config.json --data-path ./data', ]; - static flag = { + static flags = { 'data-path': flagsWithParser.dataPath, config: flagsWithParser.config, pretty: flagsWithParser.pretty, diff --git a/commander/src/bootstrapping/commands/transaction/create.ts b/commander/src/bootstrapping/commands/transaction/create.ts index f22b6656f84..266f8b50219 100644 --- a/commander/src/bootstrapping/commands/transaction/create.ts +++ b/commander/src/bootstrapping/commands/transaction/create.ts @@ -313,16 +313,16 @@ export abstract class CreateCommand extends Command { } if (flags.json) { - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: encodeTransaction(this._schema, transactionObject, this._client).toString( 'hex', ), }); - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: transactionToJSON(this._schema, transactionObject, this._client), }); } else { - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: encodeTransaction(this._schema, transactionObject, this._client).toString( 'hex', ), @@ -330,7 +330,7 @@ export abstract class CreateCommand extends Command { } } - printJSON(pretty: boolean, message?: Record): void { + printJSON(pretty?: boolean, message?: Record): void { if (pretty) { this.log(JSON.stringify(message, undefined, ' ')); } else { diff --git a/commander/src/bootstrapping/commands/transaction/sign.ts b/commander/src/bootstrapping/commands/transaction/sign.ts index 55e40c54dae..97b446bbe12 100644 --- a/commander/src/bootstrapping/commands/transaction/sign.ts +++ b/commander/src/bootstrapping/commands/transaction/sign.ts @@ -242,16 +242,16 @@ export abstract class SignCommand extends Command { } if (flags.json) { - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: encodeTransaction(this._schema, signedTransaction, this._client).toString( 'hex', ), }); - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: transactionToJSON(this._schema, signedTransaction, this._client), }); } else { - this.printJSON(!!flags.pretty, { + this.printJSON(flags.pretty, { transaction: encodeTransaction(this._schema, signedTransaction, this._client).toString( 'hex', ), @@ -259,7 +259,7 @@ export abstract class SignCommand extends Command { } } - printJSON(pretty: boolean, message?: Record): void { + printJSON(pretty?: boolean, message?: Record): void { if (pretty) { this.log(JSON.stringify(message, undefined, ' ')); } else { diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 726d100949b..4e30b073f72 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -42,7 +42,7 @@ "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", - "ws": "8.5.0" + "ws": "7.5.7" }, "devDependencies": { "@liskhq/lisk-chain": "^0.3.3", diff --git a/elements/lisk-api-client/test/integration/ws_channel.spec.ts b/elements/lisk-api-client/test/integration/ws_channel.spec.ts index 1e5c5957929..6b26fe94fe5 100644 --- a/elements/lisk-api-client/test/integration/ws_channel.spec.ts +++ b/elements/lisk-api-client/test/integration/ws_channel.spec.ts @@ -14,6 +14,7 @@ import { createServer, Server } from 'http'; import * as WebSocket from 'isomorphic-ws'; +import { Socket } from 'net'; import { WSChannel } from '../../src/ws_channel'; jest.unmock('isomorphic-ws'); @@ -53,7 +54,7 @@ describe('WSChannel', () => { // https://github.com/websockets/ws/issues/377#issuecomment-462152231 http.on('upgrade', (request, socket, head) => { setTimeout(() => { - server.handleUpgrade(request, socket, head, ws => { + server.handleUpgrade(request, socket as Socket, head, ws => { server.emit('connection', ws, request); }); }, 3000); diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 27ca9b5c9b8..b422f8389fc 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -89,6 +89,6 @@ "ts-node": "9.1.1", "tsconfig-paths": "3.9.0", "typescript": "4.2.3", - "ws": "7.4.6" + "ws": "7.5.7" } } diff --git a/framework/package.json b/framework/package.json index 34fdcc25069..4a3f0cbde25 100644 --- a/framework/package.json +++ b/framework/package.json @@ -61,7 +61,7 @@ "pm2-axon-rpc": "0.7.1", "ps-list": "7.0.0", "sodium-native": "3.2.0", - "ws": "8.5.0" + "ws": "7.5.7" }, "devDependencies": { "@liskhq/lisk-passphrase": "^3.1.0", diff --git a/yarn.lock b/yarn.lock index 12b18c1b666..9e8d71bead6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -19109,15 +19109,10 @@ ws@7.1.0: dependencies: async-limiter "^1.0.0" -ws@7.4.6, ws@^7.2.3: - version "7.4.6" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" - integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== - -ws@8.5.0: - version "8.5.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" - integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== +ws@7.5.7: + version "7.5.7" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.7.tgz#9e0ac77ee50af70d58326ecff7e85eb3fa375e67" + integrity sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A== ws@^6.2.1: version "6.2.1" @@ -19126,6 +19121,11 @@ ws@^6.2.1: dependencies: async-limiter "~1.0.0" +ws@^7.2.3: + version "7.4.6" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" + integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== + xml-name-validator@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" From 96d449f81fa8efc2a2b797fbadac5df9034cb48b Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 22 Mar 2022 15:17:19 +0100 Subject: [PATCH 009/170] Update elements/lisk-db/src/kv_store.ts Co-authored-by: !shan --- elements/lisk-db/src/kv_store.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elements/lisk-db/src/kv_store.ts b/elements/lisk-db/src/kv_store.ts index c5749ec44d4..b9e1d8aaadd 100644 --- a/elements/lisk-db/src/kv_store.ts +++ b/elements/lisk-db/src/kv_store.ts @@ -18,7 +18,7 @@ import levelup, { LevelUp } from 'levelup'; import { NotFoundError } from './errors'; // rocksdb removed the default export. However, @types/rocksdb still only exposes default. -// Therefore, temporally requiree with below syntax. +// Therefore, temporarily require with below syntax. // eslint-disable-next-line import/order import rocksDB = require('rocksdb'); From c3051d5ab0e684a35d740d498bce8a4417171126 Mon Sep 17 00:00:00 2001 From: Ishan Date: Wed, 23 Mar 2022 17:27:33 +0100 Subject: [PATCH 010/170] Bump version 5.2.2-alpha.0 --- commander/package.json | 26 ++++++------ .../templates/init/package-template.json | 24 +++++------ .../templates/init_plugin/package.json | 4 +- elements/lisk-api-client/package.json | 10 ++--- elements/lisk-bft/package.json | 12 +++--- elements/lisk-chain/package.json | 16 ++++---- elements/lisk-client/package.json | 18 ++++----- elements/lisk-codec/package.json | 6 +-- elements/lisk-cryptography/package.json | 2 +- elements/lisk-db/package.json | 2 +- elements/lisk-elements/package.json | 30 +++++++------- elements/lisk-genesis/package.json | 12 +++--- elements/lisk-p2p/package.json | 8 ++-- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +-- elements/lisk-transactions/package.json | 8 ++-- elements/lisk-tree/package.json | 6 +-- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../package.json | 10 ++--- .../lisk-framework-faucet-plugin/package.json | 16 ++++---- .../lisk-framework-forger-plugin/package.json | 22 +++++----- .../package.json | 14 +++---- .../package.json | 14 +++---- .../package.json | 20 +++++----- framework/package.json | 30 +++++++------- protocol-specs/package.json | 8 ++-- sdk/package.json | 40 +++++++++---------- 28 files changed, 186 insertions(+), 186 deletions(-) diff --git a/commander/package.json b/commander/package.json index fa89b30ff5f..416cf1080fb 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.9", + "version": "5.1.10-alpha.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,17 +91,17 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-client": "^5.2.1", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-genesis": "^0.2.3", - "@liskhq/lisk-passphrase": "^3.1.0", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "@oclif/command": "1.8.16", "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.1", + "lisk-framework": "^0.9.2-alpha.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 527268d1827..fc9e4b0ffdb 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -98,24 +98,24 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.1.6", - "@liskhq/lisk-framework-faucet-plugin": "^0.1.6", - "@oclif/command": "1.8.0", - "@oclif/plugin-autocomplete": "0.3.0", - "@oclif/plugin-help": "3.2.2", - "fs-extra": "9.0.1", + "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7-alpha.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.1.7-alpha.0", + "@oclif/command": "1.8.16", + "@oclif/plugin-autocomplete": "1.2.0", + "@oclif/plugin-help": "5.1.12", + "fs-extra": "9.1.0", "inquirer": "7.3.2", - "lisk-commander": "^5.1.9", - "lisk-sdk": "^5.2.1", + "lisk-commander": "^5.1.10-alpha.0", + "lisk-sdk": "^5.2.2-alpha.0", "tar": "6.0.2", "tslib": "1.13.0", "axios": "0.21.1" }, "devDependencies": { - "@oclif/dev-cli": "1.22.2", - "@oclif/config": "1.15.1", - "@types/fs-extra": "9.0.1", - "@types/node": "12.20.6", + "@oclif/dev-cli": "1.26.10", + "@oclif/config": "1.18.3", + "@types/fs-extra": "8.1.0", + "@types/node": "16.11.26", "@types/tar": "4.0.3", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 44b1a6c2428..8e81f89d479 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -29,12 +29,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "lisk-sdk": "^5.2.1" + "lisk-sdk": "^5.2.2-alpha.0" }, "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 4e30b073f72..2a6f066a7e9 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.5", + "version": "5.1.6-alpha.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-transactions": "^5.2.1", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.3", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "16.11.26", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index 53143b5ac3c..d7e277d8012 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.3", + "version": "0.3.4-alpha.0", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "@types/node": "16.11.26", "debug": "4.3.4" }, diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 53d0839b2b2..6cf97d9a467 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.3", + "version": "0.3.4-alpha.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-tree": "^0.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-tree": "^0.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index b422f8389fc..c3d78f1f2e5 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.1", + "version": "5.2.2-alpha.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,14 +55,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-passphrase": "^3.1.0", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-tree": "^0.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-tree": "^0.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index d5a378c43ea..a2a211e1af4 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.2.1", + "version": "0.2.2-alpha.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1" + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 9dcab3982a0..364ec331669 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "3.2.0", + "version": "3.2.1-alpha.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json index 14ad3eef88d..9b7f7a80ed1 100644 --- a/elements/lisk-db/package.json +++ b/elements/lisk-db/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-db", - "version": "0.2.0", + "version": "0.2.1-alpha.0", "description": "A database access implementation for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index e6d0f876468..ce7695f96eb 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.1", + "version": "5.2.2-alpha.0", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,20 +36,20 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-bft": "^0.3.3", - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-genesis": "^0.2.3", - "@liskhq/lisk-p2p": "^0.7.2", - "@liskhq/lisk-passphrase": "^3.1.0", - "@liskhq/lisk-transaction-pool": "^0.5.2", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-tree": "^0.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1" + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-bft": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-p2p": "^0.7.3-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-tree": "^0.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 80ce265086a..87bf37a98e7 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.3", + "version": "0.2.4-alpha.0", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "lodash.clonedeep": "4.5.0" }, "devDependencies": { diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 31d83e62885..42a6e309c48 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.7.2", + "version": "0.7.3-alpha.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,9 +42,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "lodash.shuffle": "4.2.0", "semver": "7.3.5", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 67ec73c9f7f..14db756b7a1 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "3.1.0", + "version": "3.1.1-alpha.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index fc9a47e0ceb..1f93b1eb351 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.5.2", + "version": "0.5.3-alpha.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,8 +37,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 2167e1a33ae..7d955b17df7 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "5.2.1", + "version": "5.2.2-alpha.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,9 +36,9 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-validator": "^0.6.1" + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 39fa916d5ac..3c99c60f924 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.2.1", + "version": "0.2.2-alpha.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0" + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index aeb6e4d1fbe..793788be7ab 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.2.0", + "version": "0.2.1-alpha.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 6298d64813b..571682eea2b 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.6.1", + "version": "0.6.2-alpha.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "ajv": "8.1.0", "ajv-formats": "2.0.2", "debug": "4.3.4", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index d58dffaf57b..1fcd283f4a5 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.6", + "version": "0.1.7-alpha.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0", + "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.1", + "lisk-framework": "^0.9.2-alpha.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 3e4d53095c2..458c6258814 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.6", + "version": "0.1.7-alpha.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-client": "^5.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "axios": "0.21.1", "express": "4.17.3", - "lisk-framework": "^0.9.1", + "lisk-framework": "^0.9.2-alpha.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 139157fb30a..41b37a238c6 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.6", + "version": "0.2.7-alpha.0", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,13 +38,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "axios": "0.21.1", "cors": "2.8.5", "debug": "4.3.4", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.1" + "lisk-framework": "^0.9.2-alpha.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-genesis": "^0.2.3", + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.0", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 96eb0a22fb8..54b5684adc3 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.6", + "version": "0.2.7-alpha.0", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,18 +37,18 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.1" + "lisk-framework": "^0.9.2-alpha.0" }, "devDependencies": { - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-transactions": "^5.2.1", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", "@types/cors": "2.8.6", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index aaa0b834fa6..452a730ff2a 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.6", + "version": "0.2.7-alpha.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,16 +37,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.1" + "lisk-framework": "^0.9.2-alpha.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 3393fb8c2a8..11b5f59be2e 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.6", + "version": "0.2.7-alpha.0", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.3", - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-bft": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.1" + "lisk-framework": "^0.9.2-alpha.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index 4a3f0cbde25..836a8300cfd 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.1", + "version": "0.9.2-alpha.0", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,19 +40,19 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-bft": "^0.3.3", - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-genesis": "^0.2.3", - "@liskhq/lisk-p2p": "^0.7.2", - "@liskhq/lisk-transaction-pool": "^0.5.2", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-tree": "^0.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-bft": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-p2p": "^0.7.3-alpha.0", + "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-tree": "^0.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.5", @@ -64,7 +64,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 79fc83a71df..220898d0fd5 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.2.1", - "@liskhq/lisk-cryptography": "3.2.0", - "@liskhq/lisk-passphrase": "3.1.0", - "@liskhq/lisk-validator": "0.6.1", + "@liskhq/lisk-codec": "0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "3.2.1-alpha.0", + "@liskhq/lisk-passphrase": "3.1.1-alpha.0", + "@liskhq/lisk-validator": "0.6.2-alpha.0", "protobufjs": "6.9.0" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 8af89410639..10479629aec 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.1", + "version": "5.2.2-alpha.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.5", - "@liskhq/lisk-bft": "^0.3.3", - "@liskhq/lisk-chain": "^0.3.3", - "@liskhq/lisk-codec": "^0.2.1", - "@liskhq/lisk-cryptography": "^3.2.0", - "@liskhq/lisk-db": "^0.2.0", - "@liskhq/lisk-framework-forger-plugin": "^0.2.6", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.6", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.6", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.6", - "@liskhq/lisk-genesis": "^0.2.3", - "@liskhq/lisk-p2p": "^0.7.2", - "@liskhq/lisk-passphrase": "^3.1.0", - "@liskhq/lisk-transaction-pool": "^0.5.2", - "@liskhq/lisk-transactions": "^5.2.1", - "@liskhq/lisk-tree": "^0.2.1", - "@liskhq/lisk-utils": "^0.2.0", - "@liskhq/lisk-validator": "^0.6.1", - "lisk-framework": "^0.9.1" + "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-bft": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-codec": "^0.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-db": "^0.2.1-alpha.0", + "@liskhq/lisk-framework-forger-plugin": "^0.2.7-alpha.0", + "@liskhq/lisk-framework-http-api-plugin": "^0.2.7-alpha.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.2.7-alpha.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-p2p": "^0.7.3-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", + "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-tree": "^0.2.2-alpha.0", + "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "lisk-framework": "^0.9.2-alpha.0" }, "devDependencies": { "eslint": "7.22.0", From 75e36cb7e86bb9b3316d4d72ebb13775a13b372b Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Tue, 19 Apr 2022 18:02:26 +0200 Subject: [PATCH 011/170] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Use=20roundLength?= =?UTF-8?q?=20from=20the=20config=20in=20lisk-chain?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- elements/lisk-chain/src/chain.ts | 22 +++++++++---------- elements/lisk-chain/test/unit/chain.spec.ts | 7 ++---- elements/lisk-chain/test/unit/process.spec.ts | 5 +---- 3 files changed, 13 insertions(+), 21 deletions(-) diff --git a/elements/lisk-chain/src/chain.ts b/elements/lisk-chain/src/chain.ts index 52b5c8b3785..42a79981e3f 100644 --- a/elements/lisk-chain/src/chain.ts +++ b/elements/lisk-chain/src/chain.ts @@ -77,6 +77,7 @@ interface ChainConstructor { readonly rewardDistance: number; readonly rewardOffset: number; readonly minFeePerByte: number; + readonly roundLength: number; readonly baseFees: { readonly moduleID: number; readonly assetID: number; @@ -101,6 +102,7 @@ export class Chain { readonly rewardMilestones: ReadonlyArray; readonly networkIdentifier: Buffer; readonly minFeePerByte: number; + readonly roundLength: number; readonly baseFees: { readonly moduleID: number; readonly assetID: number; @@ -117,7 +119,6 @@ export class Chain { readonly [key: number]: Schema; }; private readonly _defaultAccount: Record; - private _numberOfValidators: number; public constructor({ db, @@ -134,10 +135,10 @@ export class Chain { rewardMilestones, minFeePerByte, baseFees, + roundLength, minBlockHeaderCache = DEFAULT_MIN_BLOCK_HEADER_CACHE, maxBlockHeaderCache = DEFAULT_MAX_BLOCK_HEADER_CACHE, }: ChainConstructor) { - this._numberOfValidators = -1; this.events = new EventEmitter(); const { default: defaultAccount, ...schema } = getAccountSchemaWithDefault(accountSchemas); @@ -186,6 +187,7 @@ export class Chain { networkIdentifier, minFeePerByte, baseFees, + roundLength, }; } @@ -197,8 +199,8 @@ export class Chain { return this._lastBlock; } - public get numberOfValidators(): number { - return this._numberOfValidators; + public get roundLength(): number { + return this.constants.roundLength; } public get accountSchema(): Schema { @@ -234,9 +236,6 @@ export class Chain { ); } - const validators = await this.getValidators(); - this._numberOfValidators = validators.length; - this._lastBlock = storageLastBlock; } @@ -258,7 +257,7 @@ export class Chain { const genesisInfo = await this._getGenesisInfo(); const fromHeight = Math.max( genesisInfo?.height ?? 0, - this._lastBlock.header.height - this.numberOfValidators * 3 - skipLastHeights, + this._lastBlock.header.height - this.constants.roundLength * 3 - skipLastHeights, ); const toHeight = Math.max(this._lastBlock.header.height - skipLastHeights, 1); const lastBlockHeaders = await this.dataAccess.getBlockHeadersByHeightBetween( @@ -299,7 +298,7 @@ export class Chain { } public isValidSeedReveal(blockHeader: BlockHeader, stateStore: StateStore): boolean { - return isValidSeedReveal(blockHeader, stateStore, this.numberOfValidators); + return isValidSeedReveal(blockHeader, stateStore, this.constants.roundLength); } public validateGenesisBlockHeader(block: GenesisBlock): void { @@ -327,7 +326,6 @@ export class Chain { initRounds: block.header.asset.initRounds, }), ); - this._numberOfValidators = block.header.asset.initDelegates.length; } public validateTransaction(transaction: Transaction): void { @@ -376,7 +374,7 @@ export class Chain { public async verifyBlockHeader(block: Block, stateStore: StateStore): Promise { verifyPreviousBlockId(block, this._lastBlock); validateBlockSlot(block, this._lastBlock, this.slots); - verifyReward(block.header, stateStore, this.numberOfValidators); + verifyReward(block.header, stateStore, this.constants.roundLength); await verifyBlockGenerator(block.header, this.slots, stateStore); } @@ -503,7 +501,7 @@ export class Chain { if (!genesisInfo) { throw new Error('genesis info not stored'); } - return this._numberOfValidators * genesisInfo.initRounds + genesisInfo.height; + return this.constants.roundLength * genesisInfo.initRounds + genesisInfo.height; } private async _getGenesisInfo(): Promise { diff --git a/elements/lisk-chain/test/unit/chain.spec.ts b/elements/lisk-chain/test/unit/chain.spec.ts index 474344e338d..d41d80a7335 100644 --- a/elements/lisk-chain/test/unit/chain.spec.ts +++ b/elements/lisk-chain/test/unit/chain.spec.ts @@ -53,6 +53,7 @@ describe('chain', () => { networkIdentifier: defaultNetworkIdentifier, minFeePerByte: 1000, baseFees: [], + roundLength: 103, }; const emptyEncodedDiff = codec.encode(stateDiffSchema, { created: [], @@ -147,13 +148,11 @@ describe('chain', () => { chainInstance['_lastBlock'] = createValidDefaultBlock({ header: { height: 1 }, }); - chainInstance['_numberOfValidators'] = 103; await chainInstance.newStateStore(); expect(chainInstance.dataAccess.getBlockHeadersByHeightBetween).toHaveBeenCalledWith(0, 1); }); it('should return with the chain state with lastBlock.height to lastBlock.height - 309', async () => { - chainInstance['_numberOfValidators'] = 103; await chainInstance.newStateStore(); expect(chainInstance.dataAccess.getBlockHeadersByHeightBetween).toHaveBeenCalledWith( chainInstance.lastBlock.header.height - 309, @@ -162,7 +161,6 @@ describe('chain', () => { }); it('should get the rewards of the last block', async () => { - chainInstance['_numberOfValidators'] = 103; const stateStore = await chainInstance.newStateStore(); expect(stateStore.chain.lastBlockReward.toString()).toEqual( @@ -171,7 +169,6 @@ describe('chain', () => { }); it('should return with the chain state with lastBlock.height to lastBlock.height - 310', async () => { - chainInstance['_numberOfValidators'] = 103; await chainInstance.newStateStore(1); expect(chainInstance.dataAccess.getBlockHeadersByHeightBetween).toHaveBeenCalledWith( chainInstance.lastBlock.header.height - 310, @@ -441,7 +438,7 @@ describe('chain', () => { height: 0, initRounds: 3, }); - chainInstance['_numberOfValidators'] = 103; + when(db.get) .calledWith(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`) .mockResolvedValue(validatorBuffer as never) diff --git a/elements/lisk-chain/test/unit/process.spec.ts b/elements/lisk-chain/test/unit/process.spec.ts index b8427e2f3f8..e701905ff0a 100644 --- a/elements/lisk-chain/test/unit/process.spec.ts +++ b/elements/lisk-chain/test/unit/process.spec.ts @@ -55,6 +55,7 @@ describe('chain/process block', () => { totalAmount: BigInt('10000000000000000'), blockTime: 10, minFeePerByte: 1000, + roundLength: 103, baseFees: [ { moduleID: 2, @@ -550,10 +551,6 @@ describe('chain/process block', () => { }, ]; - beforeEach(() => { - chainInstance['_numberOfValidators'] = 103; - }); - describe('Given delegate was only active in last three rounds', () => { it('should return false if current block seedReveal is not a preimage of previous block', () => { // Arrange From adfc258a2641f5e7e9b1ec418cf2ad403eb37e70 Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Wed, 20 Apr 2022 12:46:13 +0200 Subject: [PATCH 012/170] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Use=20roundLength?= =?UTF-8?q?=20from=20Chain?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- elements/lisk-bft/src/finality_manager.ts | 6 +++--- ...ft_finality_manager_protocol_specs.spec.ts | 2 +- ...t_fork_choice_rules_protocol_specs.spec.ts | 2 +- ...valid_block_headers_protocol_specs.spec.ts | 2 +- elements/lisk-bft/test/unit/bft.spec.ts | 2 +- .../test/unit/finality_manager.spec.ts | 4 ++-- framework/src/node/node.ts | 5 +++-- .../block_synchronization_mechanism.ts | 11 +++++----- .../fast_chain_switching_mechanism.ts | 10 +++++----- .../src/schema/application_config_schema.ts | 8 +++++++- framework/src/testing/block_processing_env.ts | 7 ++++--- framework/src/testing/fixtures/config.ts | 3 +-- framework/src/types.ts | 1 + .../block_process/protocol_violation.spec.ts | 6 +++--- .../node/synchronizer/temp_block.spec.ts | 4 ++-- .../__snapshots__/application.spec.ts.snap | 1 + .../unit/modules/dpos/dpos_module.spec.ts | 1 + .../unit/modules/keys/keys_module.spec.ts | 1 + .../modules/sequence/sequence_module.spec.ts | 1 + .../unit/modules/token/token_module.spec.ts | 1 + .../block_synchronization_mechanism.spec.ts | 20 +++++++++---------- .../fast_chain_switching_mechanism.spec.ts | 14 ++++++------- .../node/synchronizer/synchronizer.spec.ts | 1 + .../application_schema.spec.ts.snap | 7 +++++++ 24 files changed, 70 insertions(+), 50 deletions(-) diff --git a/elements/lisk-bft/src/finality_manager.ts b/elements/lisk-bft/src/finality_manager.ts index f7bb7b161fa..7aaec4d1c8a 100644 --- a/elements/lisk-bft/src/finality_manager.ts +++ b/elements/lisk-bft/src/finality_manager.ts @@ -147,15 +147,15 @@ export class FinalityManager extends EventEmitter { // Threshold to consider a block pre-committed (or finalized) this.preCommitThreshold = threshold; - if (this._chain.numberOfValidators <= 0) { + if (this._chain.roundLength <= 0) { throw new Error('Invalid number of validators for BFT property'); } // Limit for blocks to make perform verification or pre-vote/pre-commit (1 block less than 3 rounds) - this.processingThreshold = this._chain.numberOfValidators * BFT_ROUND_THRESHOLD - 1; + this.processingThreshold = this._chain.roundLength * BFT_ROUND_THRESHOLD - 1; // Maximum headers to store (5 rounds) - this.maxHeaders = this._chain.numberOfValidators * 5; + this.maxHeaders = this._chain.roundLength * 5; // Height up to which blocks are finalized this.finalizedHeight = finalizedHeight; diff --git a/elements/lisk-bft/test/protocol_specs/bft_finality_manager_protocol_specs.spec.ts b/elements/lisk-bft/test/protocol_specs/bft_finality_manager_protocol_specs.spec.ts index 27e24b6c6a9..db65cc4db1e 100644 --- a/elements/lisk-bft/test/protocol_specs/bft_finality_manager_protocol_specs.spec.ts +++ b/elements/lisk-bft/test/protocol_specs/bft_finality_manager_protocol_specs.spec.ts @@ -91,7 +91,7 @@ describe('FinalityManager', () => { dataAccess: { getConsensusState: jest.fn(), }, - numberOfValidators: scenario.config.activeDelegates, + roundLength: scenario.config.activeDelegates, } as unknown) as Chain; finalityManager = new FinalityManager({ diff --git a/elements/lisk-bft/test/protocol_specs/bft_fork_choice_rules_protocol_specs.spec.ts b/elements/lisk-bft/test/protocol_specs/bft_fork_choice_rules_protocol_specs.spec.ts index 96d1f402394..646a9b9a095 100644 --- a/elements/lisk-bft/test/protocol_specs/bft_fork_choice_rules_protocol_specs.spec.ts +++ b/elements/lisk-bft/test/protocol_specs/bft_fork_choice_rules_protocol_specs.spec.ts @@ -43,7 +43,7 @@ describe('bft', () => { dataAccess: { getConsensusState: jest.fn(), }, - numberOfValidators: 103, + roundLength: 103, } as unknown) as Chain; threshold = 68; diff --git a/elements/lisk-bft/test/protocol_specs/bft_invalid_block_headers_protocol_specs.spec.ts b/elements/lisk-bft/test/protocol_specs/bft_invalid_block_headers_protocol_specs.spec.ts index 5fe888e8acf..fa7ef2376bb 100644 --- a/elements/lisk-bft/test/protocol_specs/bft_invalid_block_headers_protocol_specs.spec.ts +++ b/elements/lisk-bft/test/protocol_specs/bft_invalid_block_headers_protocol_specs.spec.ts @@ -44,7 +44,7 @@ describe('FinalityManager', () => { dataAccess: { getConsensusState: jest.fn(), }, - numberOfValidators: 103, + roundLength: 103, } as unknown) as Chain; stateStore = (new StateStoreMock() as unknown) as StateStore; }); diff --git a/elements/lisk-bft/test/unit/bft.spec.ts b/elements/lisk-bft/test/unit/bft.spec.ts index c6b49d8e7df..c9ac5e9ad00 100644 --- a/elements/lisk-bft/test/unit/bft.spec.ts +++ b/elements/lisk-bft/test/unit/bft.spec.ts @@ -71,7 +71,7 @@ describe('bft', () => { dataAccess: { getConsensusState: jest.fn(), }, - numberOfValidators: 103, + roundLength: 103, } as unknown) as Chain; threshold = 68; diff --git a/elements/lisk-bft/test/unit/finality_manager.spec.ts b/elements/lisk-bft/test/unit/finality_manager.spec.ts index b14086b8ef5..06d0c9b3847 100644 --- a/elements/lisk-bft/test/unit/finality_manager.spec.ts +++ b/elements/lisk-bft/test/unit/finality_manager.spec.ts @@ -68,7 +68,7 @@ describe('finality_manager', () => { dataAccess: { getConsensusState: jest.fn(), }, - numberOfValidators: 103, + roundLength: 103, } as unknown) as Chain; finalityManager = new FinalityManager({ @@ -89,7 +89,7 @@ describe('finality_manager', () => { }); it('should throw error if number of validator is not positive', () => { - (chainStub as any).numberOfValidators = -3; + (chainStub as any).roundLength = -3; expect( () => new FinalityManager({ diff --git a/framework/src/node/node.ts b/framework/src/node/node.ts index 546be1871e0..cffb05e1849 100644 --- a/framework/src/node/node.ts +++ b/framework/src/node/node.ts @@ -373,10 +373,10 @@ export class Node { const startTime = this._chain.slots.getSlotTime(slot); let nextForgingTime = startTime; - const slotInRound = slot % this._chain.numberOfValidators; + const slotInRound = slot % this._chain.roundLength; const blockTime = this._chain.slots.blockTime(); const forgersInfo = []; - for (let i = slotInRound; i < slotInRound + this._chain.numberOfValidators; i += 1) { + for (let i = slotInRound; i < slotInRound + this._chain.roundLength; i += 1) { const validator = validators[i % validators.length]; forgersInfo.push({ ...validator, @@ -561,6 +561,7 @@ export class Node { accountSchemas: this._registeredAccountSchemas, minFeePerByte: this._options.genesisConfig.minFeePerByte, baseFees: this._options.genesisConfig.baseFees, + roundLength: this._options.genesisConfig.roundLength, }); this._bft = new BFT({ diff --git a/framework/src/node/synchronizer/block_synchronization_mechanism.ts b/framework/src/node/synchronizer/block_synchronization_mechanism.ts index 23c4c4e20c8..fbd653f4cad 100644 --- a/framework/src/node/synchronizer/block_synchronization_mechanism.ts +++ b/framework/src/node/synchronizer/block_synchronization_mechanism.ts @@ -122,7 +122,7 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { ); const finalizedBlockSlot = this._chain.slots.getSlotNumber(finalizedBlock.timestamp); const currentBlockSlot = this._chain.slots.getSlotNumber(); - const threeRounds = this._chain.numberOfValidators * 3; + const threeRounds = this._chain.roundLength * 3; return currentBlockSlot - finalizedBlockSlot > threeRounds; } @@ -367,15 +367,16 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { * corresponding to the first block of descendent consecutive rounds (starting from the last one). */ private async _requestLastCommonBlock(peerId: string): Promise { + console.log('-------->', this._chain.roundLength) const blocksPerRequestLimit = 10; // Maximum number of block IDs to be included in a single request const requestLimit = 3; // Maximum number of requests to be made to the remote peer let numberOfRequests = 1; // Keeps track of the number of requests made to the remote peer let highestCommonBlock; // Holds the common block returned by the peer if found. let currentRound = Math.ceil( - this._chain.lastBlock.header.height / this._chain.numberOfValidators, + this._chain.lastBlock.header.height / this._chain.roundLength, ); // Holds the current round number - let currentHeight = currentRound * this._chain.numberOfValidators; + let currentHeight = currentRound * this._chain.roundLength; while ( !highestCommonBlock && @@ -384,7 +385,7 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { ) { const heightList = computeBlockHeightsList( this.bft.finalizedHeight, - this._chain.numberOfValidators, + this._chain.roundLength, blocksPerRequestLimit, currentRound, ); @@ -408,7 +409,7 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { highestCommonBlock = data; // If no common block, data is undefined. currentRound -= blocksPerRequestLimit; - currentHeight = currentRound * this._chain.numberOfValidators; + currentHeight = currentRound * this._chain.roundLength; } return highestCommonBlock; diff --git a/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts b/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts index 917658dc77a..89e1e016d51 100644 --- a/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts +++ b/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts @@ -92,7 +92,7 @@ export class FastChainSwitchingMechanism extends BaseSynchronizer { const { lastBlock } = this._chain; // 3. Step: Check whether B justifies fast chain switching mechanism - const twoRounds = this._chain.numberOfValidators * 2; + const twoRounds = this._chain.roundLength * 2; if (Math.abs(receivedBlock.header.height - lastBlock.header.height) > twoRounds) { return false; } @@ -160,12 +160,12 @@ export class FastChainSwitchingMechanism extends BaseSynchronizer { if ( this._chain.lastBlock.header.height - highestCommonBlock.height > - this._chain.numberOfValidators * 2 || - receivedBlock.header.height - highestCommonBlock.height > this._chain.numberOfValidators * 2 + this._chain.roundLength * 2 || + receivedBlock.header.height - highestCommonBlock.height > this._chain.roundLength * 2 ) { throw new AbortError( `Height difference between both chains is higher than ${ - this._chain.numberOfValidators * 2 + this._chain.roundLength * 2 }`, ); } @@ -316,7 +316,7 @@ export class FastChainSwitchingMechanism extends BaseSynchronizer { private _computeLastTwoRoundsHeights(): number[] { return new Array( - Math.min(this._chain.numberOfValidators * 2, this._chain.lastBlock.header.height), + Math.min(this._chain.roundLength * 2, this._chain.lastBlock.header.height), ) .fill(0) .map((_, index) => this._chain.lastBlock.header.height - index); diff --git a/framework/src/schema/application_config_schema.ts b/framework/src/schema/application_config_schema.ts index bb2d9c6bc04..2074c14d386 100644 --- a/framework/src/schema/application_config_schema.ts +++ b/framework/src/schema/application_config_schema.ts @@ -61,7 +61,7 @@ export const applicationConfigSchema = { genesisConfig: { $id: '#/config/genesisConfig', type: 'object', - required: ['blockTime', 'communityIdentifier', 'maxPayloadLength', 'rewards'], + required: ['blockTime', 'communityIdentifier', 'maxPayloadLength', 'rewards', 'roundLength'], properties: { blockTime: { type: 'number', @@ -83,6 +83,11 @@ export const applicationConfigSchema = { minimum: 0, description: 'Minimum fee per bytes required for a transaction to be valid', }, + roundLength: { + type: 'number', + minimum: 1, + description: 'Number of slots in a round', + }, baseFees: { type: 'array', description: 'Base fee for a transaction to be valid', @@ -383,6 +388,7 @@ export const applicationConfigSchema = { bftThreshold: 68, minFeePerByte: 1000, baseFees: [], + roundLength: 103, rewards: { milestones: [ '500000000', // Initial Reward diff --git a/framework/src/testing/block_processing_env.ts b/framework/src/testing/block_processing_env.ts index 703728ff45b..dfb454a9e1b 100644 --- a/framework/src/testing/block_processing_env.ts +++ b/framework/src/testing/block_processing_env.ts @@ -118,6 +118,7 @@ const getProcessor = ( minFeePerByte: appConfig.genesisConfig.minFeePerByte, baseFees: appConfig.genesisConfig.baseFees, accountSchemas: getAccountSchemaFromModules(modules), + roundLength: appConfig.genesisConfig.roundLength, }); const bftModule = new BFT({ @@ -156,10 +157,10 @@ const getMaxHeightPreviouslyForged = async ( passphrase: string, ): Promise => { const NUM_OF_ROUNDS = 3; - const NUM_OF_DELEGATES = - defaultConfig.genesisConfig.activeDelegates + defaultConfig.genesisConfig.standbyDelegates; + const ROUND_LENGTH = + defaultConfig.genesisConfig.roundLength; const toHeight = previousBlock.height; - const fromHeight = Math.max(0, toHeight - NUM_OF_DELEGATES * NUM_OF_ROUNDS); + const fromHeight = Math.max(0, toHeight - ROUND_LENGTH * NUM_OF_ROUNDS); const { publicKey } = getPrivateAndPublicKeyFromPassphrase(passphrase); const lastBlockHeaders = await processor['_chain'].dataAccess.getBlockHeadersByHeightBetween( fromHeight, diff --git a/framework/src/testing/fixtures/config.ts b/framework/src/testing/fixtures/config.ts index 33e73aa6499..ea24b7d5422 100644 --- a/framework/src/testing/fixtures/config.ts +++ b/framework/src/testing/fixtures/config.ts @@ -56,8 +56,7 @@ export const defaultConfig = { distance: 3000000, // Distance between each milestone }, minRemainingBalance: '5000000', - activeDelegates: 101, - standbyDelegates: 2, + roundLength: 103, delegateListRoundOffset: 2, }, forging: { diff --git a/framework/src/types.ts b/framework/src/types.ts index 21e14582df4..af89faaeb7f 100644 --- a/framework/src/types.ts +++ b/framework/src/types.ts @@ -135,6 +135,7 @@ export interface GenesisConfig { distance: number; }; minFeePerByte: number; + roundLength: number; baseFees: { moduleID: number; assetID: number; diff --git a/framework/test/integration/node/processor/block_process/protocol_violation.spec.ts b/framework/test/integration/node/processor/block_process/protocol_violation.spec.ts index bc91e40aba9..3100181407b 100644 --- a/framework/test/integration/node/processor/block_process/protocol_violation.spec.ts +++ b/framework/test/integration/node/processor/block_process/protocol_violation.spec.ts @@ -125,7 +125,7 @@ describe('given a block with protocol violation', () => { it('should accept a block if reward is full and forger did not forget last 2 rounds', async () => { // Arrange await processEnv.processUntilHeight(1); - const targetHeight = chain.numberOfValidators * 2 + chain.lastBlock.header.height; + const targetHeight = chain.roundLength * 2 + chain.lastBlock.header.height; const targetGenerator = chain.lastBlock.header.generatorPublicKey; const target = getAddressFromPublicKey(targetGenerator); // Forge 2 rounds of block without generator of the last block @@ -149,7 +149,7 @@ describe('given a block with protocol violation', () => { describe('when BFT protocol is violated', () => { it('should reject a block if reward is not quarter', async () => { await processEnv.processUntilHeight(1); - const targetHeight = chain.numberOfValidators * 2 + chain.lastBlock.header.height; + const targetHeight = chain.roundLength * 2 + chain.lastBlock.header.height; const targetGenerator = chain.lastBlock.header.generatorPublicKey; const target = getAddressFromPublicKey(targetGenerator); // Forge 2 rounds of block without generator of the last block @@ -169,7 +169,7 @@ describe('given a block with protocol violation', () => { it('should accept a block if reward is quarter', async () => { await processEnv.processUntilHeight(1); - const targetHeight = chain.numberOfValidators * 2 + chain.lastBlock.header.height; + const targetHeight = chain.roundLength * 2 + chain.lastBlock.header.height; const targetGenerator = chain.lastBlock.header.generatorPublicKey; const target = getAddressFromPublicKey(targetGenerator); // Forge 2 rounds of block without generator of the last block diff --git a/framework/test/integration/node/synchronizer/temp_block.spec.ts b/framework/test/integration/node/synchronizer/temp_block.spec.ts index 21b723be7f2..24f0f13fd7a 100644 --- a/framework/test/integration/node/synchronizer/temp_block.spec.ts +++ b/framework/test/integration/node/synchronizer/temp_block.spec.ts @@ -51,7 +51,7 @@ describe('Temp block', () => { describe('given a blockchain with more than 3 rounds', () => { describe('when deleting 100 blocks and saving to the temp blocks chain', () => { it('should successfully store to temp block and restore from temp block', async () => { - const targetHeight = processEnv.getLastBlock().header.height + chain.numberOfValidators * 3; + const targetHeight = processEnv.getLastBlock().header.height + chain.roundLength * 3; while (chain.lastBlock.header.height < targetHeight) { const genesisAccount = await chain.dataAccess.getAccountByAddress( genesis.address, @@ -83,7 +83,7 @@ describe('Temp block', () => { }); it('should successfully store to temp block and build new chain on top', async () => { - const targetHeight = chain.lastBlock.header.height + chain.numberOfValidators * 3; + const targetHeight = chain.lastBlock.header.height + chain.roundLength * 3; while (chain.lastBlock.header.height < targetHeight) { const genesisAccount = await chain.dataAccess.getAccountByAddress( genesis.address, diff --git a/framework/test/unit/__snapshots__/application.spec.ts.snap b/framework/test/unit/__snapshots__/application.spec.ts.snap index 7caf309c392..2235c193001 100644 --- a/framework/test/unit/__snapshots__/application.spec.ts.snap +++ b/framework/test/unit/__snapshots__/application.spec.ts.snap @@ -2199,6 +2199,7 @@ Object { ], "offset": 2160, }, + "roundLength": 103, "standbyDelegates": 2, }, "label": "devnet", diff --git a/framework/test/unit/modules/dpos/dpos_module.spec.ts b/framework/test/unit/modules/dpos/dpos_module.spec.ts index 8955240e119..07063d0fc25 100644 --- a/framework/test/unit/modules/dpos/dpos_module.spec.ts +++ b/framework/test/unit/modules/dpos/dpos_module.spec.ts @@ -55,6 +55,7 @@ describe('DPoSModule', () => { communityIdentifier: 'lisk', maxPayloadLength: 15360, minFeePerByte: 1, + roundLength: 103, rewards: { distance: 1, milestones: ['milestone'], diff --git a/framework/test/unit/modules/keys/keys_module.spec.ts b/framework/test/unit/modules/keys/keys_module.spec.ts index 66ce8b9ce1e..1f824b8792d 100644 --- a/framework/test/unit/modules/keys/keys_module.spec.ts +++ b/framework/test/unit/modules/keys/keys_module.spec.ts @@ -62,6 +62,7 @@ describe('keys module', () => { communityIdentifier: 'lisk', maxPayloadLength: 15360, minFeePerByte: 1, + roundLength: 103, rewards: { distance: 1, milestones: ['milestone'], diff --git a/framework/test/unit/modules/sequence/sequence_module.spec.ts b/framework/test/unit/modules/sequence/sequence_module.spec.ts index 4ed80f4b41d..562128520c7 100644 --- a/framework/test/unit/modules/sequence/sequence_module.spec.ts +++ b/framework/test/unit/modules/sequence/sequence_module.spec.ts @@ -61,6 +61,7 @@ describe('sequence module', () => { communityIdentifier: 'lisk', maxPayloadLength: 15360, minFeePerByte: 1, + roundLength: 103, rewards: { distance: 1, milestones: ['milestone'], diff --git a/framework/test/unit/modules/token/token_module.spec.ts b/framework/test/unit/modules/token/token_module.spec.ts index 9ce31af8f3a..0c107a5af46 100644 --- a/framework/test/unit/modules/token/token_module.spec.ts +++ b/framework/test/unit/modules/token/token_module.spec.ts @@ -45,6 +45,7 @@ describe('token module', () => { communityIdentifier: 'lisk', maxPayloadLength: 15360, minFeePerByte: 1, + roundLength: 103, rewards: { distance: 1, milestones: ['milestone'], diff --git a/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts b/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts index 9f344a59727..c45df3f3c3b 100644 --- a/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts +++ b/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts @@ -101,8 +101,8 @@ describe('block_synchronization_mechanism', () => { blockTime: constants.blockTime, minFeePerByte: constants.minFeePerByte, baseFees: constants.baseFees, + roundLength: constants.roundLength, }); - chainModule['_numberOfValidators'] = 103; dataAccessMock = { getConsensusState: jest.fn(), @@ -219,14 +219,13 @@ describe('block_synchronization_mechanism', () => { .mockReturnValue(peersList.connectedPeers as never); await chainModule.init(genesisBlock); - chainModule['_numberOfValidators'] = 103; // Used in getHighestCommonBlock network action payload const blockHeightsList = computeBlockHeightsList( bftModule.finalizedHeight, - chainModule.numberOfValidators, + chainModule.roundLength, 10, - Math.ceil(chainModule.lastBlock.header.height / chainModule.numberOfValidators), + Math.ceil(chainModule.lastBlock.header.height / chainModule.roundLength), ); blockList = [finalizedBlock as any]; @@ -616,9 +615,9 @@ describe('block_synchronization_mechanism', () => { // Used in getHighestCommonBlock network action payload const blockHeightsList = computeBlockHeightsList( bftModule.finalizedHeight, - chainModule.numberOfValidators, + chainModule.roundLength, 10, - Math.ceil(lastBlock.header.height / chainModule.numberOfValidators), + Math.ceil(lastBlock.header.height / chainModule.roundLength), ); const receivedBlock = createValidDefaultBlock({ @@ -685,7 +684,6 @@ describe('block_synchronization_mechanism', () => { .mockResolvedValue([lastBlock] as never); await chainModule.init(genesisBlock); - chainModule['_numberOfValidators'] = 103; try { await blockSynchronizationMechanism.run(receivedBlock); @@ -711,9 +709,9 @@ describe('block_synchronization_mechanism', () => { // Used in getHighestCommonBlock network action payload const blockHeightsList = computeBlockHeightsList( bftModule.finalizedHeight, - chainModule.numberOfValidators, + chainModule.roundLength, 10, - Math.ceil(chainModule.lastBlock.header.height / chainModule.numberOfValidators), + Math.ceil(chainModule.lastBlock.header.height / chainModule.roundLength), ); blockList = [finalizedBlock]; @@ -1172,9 +1170,9 @@ describe('block_synchronization_mechanism', () => { it('should return height list for given round', () => { const heightList = computeBlockHeightsList( bftModule.finalizedHeight, - chainModule.numberOfValidators, + chainModule.roundLength, 10, - Math.ceil(chainModule.lastBlock.header.height / chainModule.numberOfValidators), + Math.ceil(chainModule.lastBlock.header.height / chainModule.roundLength), ); expect(heightList).not.toBeEmpty(); }); diff --git a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts index 327624c10ec..ac768c03e17 100644 --- a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts +++ b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts @@ -87,8 +87,8 @@ describe('fast_chain_switching_mechanism', () => { blockTime: constants.blockTime, minFeePerByte: constants.minFeePerByte, baseFees: constants.baseFees, + roundLength: constants.roundLength, }); - chainModule['_numberOfValidators'] = 103; dataAccessMock = { getConsensusState: jest.fn(), @@ -452,7 +452,7 @@ describe('fast_chain_switching_mechanism', () => { // the difference in height between the common block and the received block is > delegatesPerRound*2 const receivedBlock = createValidDefaultBlock({ header: { - height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1, + height: highestCommonBlock.height + chainModule.roundLength * 2 + 1, }, }); await fastChainSwitchingMechanism.run(receivedBlock, aPeerId); @@ -461,7 +461,7 @@ describe('fast_chain_switching_mechanism', () => { checkIfAbortIsCalled( new Errors.AbortError( `Height difference between both chains is higher than ${ - chainModule.numberOfValidators * 2 + chainModule.roundLength * 2 }`, ), ); @@ -480,7 +480,7 @@ describe('fast_chain_switching_mechanism', () => { // Difference in height between the common block and the last block is > delegatesPerRound*2 lastBlock = createValidDefaultBlock({ header: { - height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1, + height: highestCommonBlock.height + chainModule.roundLength * 2 + 1, }, }); when(chainModule.dataAccess.getBlockHeaderByHeight) @@ -513,7 +513,7 @@ describe('fast_chain_switching_mechanism', () => { .mockResolvedValue([lastBlock] as never); const heightList = new Array( - Math.min(chainModule.numberOfValidators * 2, chainModule.lastBlock.header.height), + Math.min(chainModule.roundLength * 2, chainModule.lastBlock.header.height), ) .fill(0) .map((_, index) => chainModule.lastBlock.header.height - index); @@ -543,7 +543,7 @@ describe('fast_chain_switching_mechanism', () => { // Act const receivedBlock = createValidDefaultBlock({ header: { - height: highestCommonBlock.height + chainModule.numberOfValidators * 2 + 1, + height: highestCommonBlock.height + chainModule.roundLength * 2 + 1, }, }); await fastChainSwitchingMechanism.run(receivedBlock, aPeerId); @@ -552,7 +552,7 @@ describe('fast_chain_switching_mechanism', () => { checkIfAbortIsCalled( new Errors.AbortError( `Height difference between both chains is higher than ${ - chainModule.numberOfValidators * 2 + chainModule.roundLength * 2 }`, ), ); diff --git a/framework/test/unit/node/synchronizer/synchronizer.spec.ts b/framework/test/unit/node/synchronizer/synchronizer.spec.ts index 5f63e4d1922..a0c90834dd9 100644 --- a/framework/test/unit/node/synchronizer/synchronizer.spec.ts +++ b/framework/test/unit/node/synchronizer/synchronizer.spec.ts @@ -87,6 +87,7 @@ describe('Synchronizer', () => { blockTime: constants.blockTime, minFeePerByte: constants.minFeePerByte, baseFees: constants.baseFees, + roundLength: constants.roundLength, }); dataAccessMock = { diff --git a/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap b/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap index 1741f8a2dac..abe0fe65e2e 100644 --- a/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap +++ b/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap @@ -29,6 +29,7 @@ Object { ], "offset": 2160, }, + "roundLength": 103, }, "label": "beta-sdk-app", "logger": Object { @@ -206,12 +207,18 @@ Object { ], "type": "object", }, + "roundLength": Object { + "description": "Number of slots in a round", + "minimum": 1, + "type": "number", + }, }, "required": Array [ "blockTime", "communityIdentifier", "maxPayloadLength", "rewards", + "roundLength", ], "type": "object", }, From f8103a79e0e0c802cd23e4f725e1afca3df6df98 Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Wed, 20 Apr 2022 16:08:52 +0200 Subject: [PATCH 013/170] =?UTF-8?q?=F0=9F=94=A5=20Remove=20console=20state?= =?UTF-8?q?ment?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/node/synchronizer/block_synchronization_mechanism.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/framework/src/node/synchronizer/block_synchronization_mechanism.ts b/framework/src/node/synchronizer/block_synchronization_mechanism.ts index fbd653f4cad..26e83a89c48 100644 --- a/framework/src/node/synchronizer/block_synchronization_mechanism.ts +++ b/framework/src/node/synchronizer/block_synchronization_mechanism.ts @@ -367,7 +367,6 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { * corresponding to the first block of descendent consecutive rounds (starting from the last one). */ private async _requestLastCommonBlock(peerId: string): Promise { - console.log('-------->', this._chain.roundLength) const blocksPerRequestLimit = 10; // Maximum number of block IDs to be included in a single request const requestLimit = 3; // Maximum number of requests to be made to the remote peer From 85cf6eca8cae9fa5a415e3100c7ad2028947d97d Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Wed, 20 Apr 2022 16:19:32 +0200 Subject: [PATCH 014/170] =?UTF-8?q?=F0=9F=92=85=20Fix=20format?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../node/synchronizer/block_synchronization_mechanism.ts | 4 +--- .../node/synchronizer/fast_chain_switching_mechanism.ts | 8 ++------ framework/src/testing/block_processing_env.ts | 3 +-- .../fast_chain_switching_mechanism.spec.ts | 8 ++------ 4 files changed, 6 insertions(+), 17 deletions(-) diff --git a/framework/src/node/synchronizer/block_synchronization_mechanism.ts b/framework/src/node/synchronizer/block_synchronization_mechanism.ts index 26e83a89c48..14c381964bc 100644 --- a/framework/src/node/synchronizer/block_synchronization_mechanism.ts +++ b/framework/src/node/synchronizer/block_synchronization_mechanism.ts @@ -372,9 +372,7 @@ export class BlockSynchronizationMechanism extends BaseSynchronizer { let numberOfRequests = 1; // Keeps track of the number of requests made to the remote peer let highestCommonBlock; // Holds the common block returned by the peer if found. - let currentRound = Math.ceil( - this._chain.lastBlock.header.height / this._chain.roundLength, - ); // Holds the current round number + let currentRound = Math.ceil(this._chain.lastBlock.header.height / this._chain.roundLength); // Holds the current round number let currentHeight = currentRound * this._chain.roundLength; while ( diff --git a/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts b/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts index 89e1e016d51..2ae75508858 100644 --- a/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts +++ b/framework/src/node/synchronizer/fast_chain_switching_mechanism.ts @@ -164,9 +164,7 @@ export class FastChainSwitchingMechanism extends BaseSynchronizer { receivedBlock.header.height - highestCommonBlock.height > this._chain.roundLength * 2 ) { throw new AbortError( - `Height difference between both chains is higher than ${ - this._chain.roundLength * 2 - }`, + `Height difference between both chains is higher than ${this._chain.roundLength * 2}`, ); } @@ -315,9 +313,7 @@ export class FastChainSwitchingMechanism extends BaseSynchronizer { } private _computeLastTwoRoundsHeights(): number[] { - return new Array( - Math.min(this._chain.roundLength * 2, this._chain.lastBlock.header.height), - ) + return new Array(Math.min(this._chain.roundLength * 2, this._chain.lastBlock.header.height)) .fill(0) .map((_, index) => this._chain.lastBlock.header.height - index); } diff --git a/framework/src/testing/block_processing_env.ts b/framework/src/testing/block_processing_env.ts index dfb454a9e1b..b8d829b5120 100644 --- a/framework/src/testing/block_processing_env.ts +++ b/framework/src/testing/block_processing_env.ts @@ -157,8 +157,7 @@ const getMaxHeightPreviouslyForged = async ( passphrase: string, ): Promise => { const NUM_OF_ROUNDS = 3; - const ROUND_LENGTH = - defaultConfig.genesisConfig.roundLength; + const ROUND_LENGTH = defaultConfig.genesisConfig.roundLength; const toHeight = previousBlock.height; const fromHeight = Math.max(0, toHeight - ROUND_LENGTH * NUM_OF_ROUNDS); const { publicKey } = getPrivateAndPublicKeyFromPassphrase(passphrase); diff --git a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts index ac768c03e17..371bfd95636 100644 --- a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts +++ b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts @@ -460,9 +460,7 @@ describe('fast_chain_switching_mechanism', () => { // Assert checkIfAbortIsCalled( new Errors.AbortError( - `Height difference between both chains is higher than ${ - chainModule.roundLength * 2 - }`, + `Height difference between both chains is higher than ${chainModule.roundLength * 2}`, ), ); expect(fastChainSwitchingMechanism['_queryBlocks']).toHaveBeenCalledWith( @@ -551,9 +549,7 @@ describe('fast_chain_switching_mechanism', () => { // Assert checkIfAbortIsCalled( new Errors.AbortError( - `Height difference between both chains is higher than ${ - chainModule.roundLength * 2 - }`, + `Height difference between both chains is higher than ${chainModule.roundLength * 2}`, ), ); expect(fastChainSwitchingMechanism['_queryBlocks']).toHaveBeenCalledWith( From fd4402875b0f2e063ece50134a4bf353e9e8c8f2 Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Thu, 21 Apr 2022 18:24:08 +0200 Subject: [PATCH 015/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- commander/package.json | 12 +++++------ .../templates/init/package-template.json | 8 ++++---- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 4 ++-- elements/lisk-bft/package.json | 4 ++-- elements/lisk-chain/package.json | 2 +- elements/lisk-client/package.json | 4 ++-- elements/lisk-elements/package.json | 10 +++++----- elements/lisk-genesis/package.json | 4 ++-- .../package.json | 6 +++--- .../lisk-framework-faucet-plugin/package.json | 8 ++++---- .../lisk-framework-forger-plugin/package.json | 10 +++++----- .../package.json | 6 +++--- .../package.json | 6 +++--- .../package.json | 8 ++++---- framework/package.json | 10 +++++----- sdk/package.json | 20 +++++++++---------- 17 files changed, 62 insertions(+), 62 deletions(-) diff --git a/commander/package.json b/commander/package.json index 416cf1080fb..aac27902907 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.10-alpha.0", + "version": "5.1.10-alpha.1", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,13 +91,13 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-chain": "^0.3.4-alpha.0", - "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", + "@liskhq/lisk-client": "^5.2.2-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.1", "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", "@liskhq/lisk-transactions": "^5.2.2-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.2-alpha.0", + "lisk-framework": "^0.9.2-alpha.1", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index fc9e4b0ffdb..47695b3baed 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -98,15 +98,15 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7-alpha.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.1.7-alpha.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7-alpha.1", + "@liskhq/lisk-framework-faucet-plugin": "^0.1.7-alpha.1", "@oclif/command": "1.8.16", "@oclif/plugin-autocomplete": "1.2.0", "@oclif/plugin-help": "5.1.12", "fs-extra": "9.1.0", "inquirer": "7.3.2", - "lisk-commander": "^5.1.10-alpha.0", - "lisk-sdk": "^5.2.2-alpha.0", + "lisk-commander": "^5.1.10-alpha.1", + "lisk-sdk": "^5.2.2-alpha.1", "tar": "6.0.2", "tslib": "1.13.0", "axios": "0.21.1" diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 8e81f89d479..8563522cb12 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -29,7 +29,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "lisk-sdk": "^5.2.2-alpha.0" + "lisk-sdk": "^5.2.2-alpha.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 2a6f066a7e9..43005f0869f 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.6-alpha.0", + "version": "5.1.6-alpha.1", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -45,7 +45,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "16.11.26", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index d7e277d8012..dc908de4f9f 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.4-alpha.0", + "version": "0.3.4-alpha.1", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 6cf97d9a467..f3ab8281156 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.4-alpha.0", + "version": "0.3.4-alpha.1", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index c3d78f1f2e5..c23173308cf 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.2-alpha.0", + "version": "5.2.2-alpha.1", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,7 +55,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index ce7695f96eb..860fe586f75 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.2-alpha.0", + "version": "5.2.2-alpha.1", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,13 +36,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-bft": "^0.3.4-alpha.0", - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-bft": "^0.3.4-alpha.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.1", "@liskhq/lisk-p2p": "^0.7.3-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 87bf37a98e7..83c9995078e 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.4-alpha.0", + "version": "0.2.4-alpha.1", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 1fcd283f4a5..d16e414b7c5 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.7-alpha.0", + "version": "0.1.7-alpha.1", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-client": "^5.2.2-alpha.1", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.2-alpha.0", + "lisk-framework": "^0.9.2-alpha.1", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 458c6258814..9e77af5c25c 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.7-alpha.0", + "version": "0.1.7-alpha.1", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-client": "^5.2.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-client": "^5.2.2-alpha.1", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-transactions": "^5.2.2-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", "@liskhq/lisk-validator": "^0.6.2-alpha.0", "axios": "0.21.1", "express": "4.17.3", - "lisk-framework": "^0.9.2-alpha.0", + "lisk-framework": "^0.9.2-alpha.1", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 41b37a238c6..bb34435442f 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.7-alpha.0", + "version": "0.2.7-alpha.1", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,7 +38,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.0" + "lisk-framework": "^0.9.2-alpha.1" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-genesis": "^0.2.4-alpha.1", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 54b5684adc3..48f988af986 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.7-alpha.0", + "version": "0.2.7-alpha.1", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,14 +37,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-utils": "^0.2.1-alpha.0", "@liskhq/lisk-validator": "^0.6.2-alpha.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.0" + "lisk-framework": "^0.9.2-alpha.1" }, "devDependencies": { "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 452a730ff2a..4cf0b3d889c 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.7-alpha.0", + "version": "0.2.7-alpha.1", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", @@ -46,7 +46,7 @@ "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.0" + "lisk-framework": "^0.9.2-alpha.1" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 11b5f59be2e..771a0f547eb 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.7-alpha.0", + "version": "0.2.7-alpha.1", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,8 +38,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.4-alpha.0", - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-bft": "^0.3.4-alpha.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", @@ -48,7 +48,7 @@ "@liskhq/lisk-validator": "^0.6.2-alpha.0", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.2-alpha.0" + "lisk-framework": "^0.9.2-alpha.1" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index 836a8300cfd..d88f785dcd1 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.2-alpha.0", + "version": "0.9.2-alpha.1", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,13 +40,13 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-bft": "^0.3.4-alpha.0", - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-bft": "^0.3.4-alpha.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-genesis": "^0.2.4-alpha.1", "@liskhq/lisk-p2p": "^0.7.3-alpha.0", "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", "@liskhq/lisk-transactions": "^5.2.2-alpha.0", diff --git a/sdk/package.json b/sdk/package.json index 10479629aec..ca68eb67446 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.2-alpha.0", + "version": "5.2.2-alpha.1", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,17 +29,17 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.0", - "@liskhq/lisk-bft": "^0.3.4-alpha.0", - "@liskhq/lisk-chain": "^0.3.4-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6-alpha.1", + "@liskhq/lisk-bft": "^0.3.4-alpha.1", + "@liskhq/lisk-chain": "^0.3.4-alpha.1", "@liskhq/lisk-codec": "^0.2.2-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-framework-forger-plugin": "^0.2.7-alpha.0", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.7-alpha.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.7-alpha.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.0", + "@liskhq/lisk-framework-forger-plugin": "^0.2.7-alpha.1", + "@liskhq/lisk-framework-http-api-plugin": "^0.2.7-alpha.1", + "@liskhq/lisk-framework-monitor-plugin": "^0.2.7-alpha.1", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7-alpha.1", + "@liskhq/lisk-genesis": "^0.2.4-alpha.1", "@liskhq/lisk-p2p": "^0.7.3-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", @@ -47,7 +47,7 @@ "@liskhq/lisk-tree": "^0.2.2-alpha.0", "@liskhq/lisk-utils": "^0.2.1-alpha.0", "@liskhq/lisk-validator": "^0.6.2-alpha.0", - "lisk-framework": "^0.9.2-alpha.0" + "lisk-framework": "^0.9.2-alpha.1" }, "devDependencies": { "eslint": "7.22.0", From 3ad5ca7634a19fb3e0c6149d777dfd8f31ee6167 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 27 Apr 2022 15:26:16 +0200 Subject: [PATCH 016/170] :books: Update package description and readme version --- elements/lisk-elements/package.json | 2 +- framework/package.json | 2 +- sdk/README.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 860fe586f75..ff175584140 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,7 +1,7 @@ { "name": "lisk-elements", "version": "5.2.2-alpha.1", - "description": "Elements for building blockchain applications in the Lisk network", + "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", "keywords": [ diff --git a/framework/package.json b/framework/package.json index d88f785dcd1..896da7f7a15 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,7 +1,7 @@ { "name": "lisk-framework", "version": "0.9.2-alpha.1", - "description": "Lisk blockchain application platform", + "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", "keywords": [ diff --git a/sdk/README.md b/sdk/README.md index dbf0336a2ae..f8def6f7174 100644 --- a/sdk/README.md +++ b/sdk/README.md @@ -40,7 +40,7 @@ Before running Lisk SDK, the following dependencies need to be installed in orde | Dependencies | Version | | ------------ | ------- | -| NodeJS | 12+ | +| NodeJS | 16+ | ### Installation From 6db616a419a7f855d5d1ff513c84ca890655b86e Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 27 Apr 2022 15:37:53 +0200 Subject: [PATCH 017/170] :arrow_up: Bump dependencies --- yarn.lock | 68 ++++++++++++++++++++----------------------------------- 1 file changed, 24 insertions(+), 44 deletions(-) diff --git a/yarn.lock b/yarn.lock index 9e8d71bead6..bfff9708d96 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4726,9 +4726,9 @@ ansi-regex@^2.0.0: integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity "sha1-Ej1keekq1FrYl9QFTjx8p9tJROE= sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==" ansi-regex@^4.0.0, ansi-regex@^4.1.0: version "4.1.0" @@ -5020,9 +5020,9 @@ async@^2.6.2: lodash "^4.17.14" async@^3.1.0, async@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/async/-/async-3.2.0.tgz#b3a2685c5ebb641d3de02d161002c60fc9f85720" - integrity sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw== + version "3.2.3" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.3.tgz#ac53dafd3f4720ee9e8a160628f18ea91df196c9" + integrity sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g== asynckit@^0.4.0: version "0.4.0" @@ -5798,7 +5798,7 @@ cacache@^12.0.2: infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" - mkdirp "^0.5.1" + mkdirp "^0.5.6" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" @@ -6768,7 +6768,7 @@ copy-concurrently@^1.0.0: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" - mkdirp "^0.5.1" + mkdirp "^0.5.6" rimraf "^2.5.4" run-queue "^1.0.0" @@ -6784,7 +6784,7 @@ copyfiles@2.2.0: dependencies: glob "^7.0.5" minimatch "^3.0.3" - mkdirp "^0.5.1" + mkdirp "^0.5.6" noms "0.0.0" through2 "^2.0.1" yargs "^13.2.4" @@ -11382,7 +11382,7 @@ jest-util@^25.1.0: "@jest/types" "^25.1.0" chalk "^3.0.0" is-ci "^2.0.0" - mkdirp "^0.5.1" + mkdirp "^0.5.6" jest-util@^26.1.0, jest-util@^26.6.2: version "26.6.2" @@ -12672,20 +12672,10 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0= - -minimist@^1.1.0, minimist@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= - -minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== +minimist@0.0.8, minimist@1.2.6, minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== minipass-collect@^1.0.2: version "1.0.2" @@ -12852,20 +12842,10 @@ module-deps@^6.0.0: through2 "^2.0.0" xtend "^4.0.0" -moment@^2.10.6: - version "2.24.0" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b" - integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg== - -moment@^2.19.3: - version "2.27.0" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.27.0.tgz#8bff4e3e26a236220dfe3e36de756b6ebaa0105d" - integrity sha512-al0MUK7cpIcglMv3YF13qSgdAIqxHTO7brRtaz3DlSULbqfazqkc5kEjNrLDOM7fsjshoFIihnU8snrP7zUvhQ== - -moment@^2.27.0: - version "2.29.1" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" - integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== +moment@^2.10.6, moment@^2.19.3, moment@^2.27.0: + version "2.29.3" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" + integrity "sha1-7dR0EcMiQTmZ96WUDVJt4YPAMfM= sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==" move-concurrently@^1.0.1: version "1.0.1" @@ -12875,7 +12855,7 @@ move-concurrently@^1.0.1: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.1" + mkdirp "^0.5.6" rimraf "^2.5.4" run-queue "^1.0.3" @@ -12933,7 +12913,7 @@ mv@~2: resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" integrity sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI= dependencies: - mkdirp "~0.5.1" + mkdirp "~0.5.6" ncp "~2.0.0" rimraf "~2.4.0" @@ -13059,7 +13039,7 @@ node-gyp@^5.0.2: env-paths "^2.2.0" glob "^7.1.4" graceful-fs "^4.2.2" - mkdirp "^0.5.1" + mkdirp "^0.5.6" nopt "^4.0.1" npmlog "^4.1.2" request "^2.88.0" @@ -17523,7 +17503,7 @@ svgo@^1.0.0, svgo@^1.2.2: css-tree "1.0.0-alpha.37" csso "^4.0.2" js-yaml "^3.13.1" - mkdirp "~0.5.1" + mkdirp "~0.5.6" object.values "^1.1.0" sax "~1.2.4" stable "^0.1.8" @@ -17568,7 +17548,7 @@ tar-fs@^2.0.0: integrity sha512-vaY0obB6Om/fso8a8vakQBzwholQ7v5+uy+tF3Ozvxv1KNezmVQAiWtcNmMHFSFPqL3dJA8ha6gdtFbfX9mcxA== dependencies: chownr "^1.1.1" - mkdirp "^0.5.1" + mkdirp "^0.5.6" pump "^3.0.0" tar-stream "^2.0.0" @@ -18619,7 +18599,7 @@ webpack-dev-middleware@^3.7.2: dependencies: memory-fs "^0.4.1" mime "^2.4.4" - mkdirp "^0.5.1" + mkdirp "^0.5.6" range-parser "^1.2.1" webpack-log "^2.0.0" From df21bd992595c9b1eed528557774309731de663b Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 27 Apr 2022 16:56:29 +0200 Subject: [PATCH 018/170] :recycle: Update yarn.lock --- yarn.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/yarn.lock b/yarn.lock index bfff9708d96..f297348a9ef 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5798,7 +5798,7 @@ cacache@^12.0.2: infer-owner "^1.0.3" lru-cache "^5.1.1" mississippi "^3.0.0" - mkdirp "^0.5.6" + mkdirp "^0.5.1" move-concurrently "^1.0.1" promise-inflight "^1.0.1" rimraf "^2.6.3" @@ -6768,7 +6768,7 @@ copy-concurrently@^1.0.0: aproba "^1.1.1" fs-write-stream-atomic "^1.0.8" iferr "^0.1.5" - mkdirp "^0.5.6" + mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.0" @@ -6784,7 +6784,7 @@ copyfiles@2.2.0: dependencies: glob "^7.0.5" minimatch "^3.0.3" - mkdirp "^0.5.6" + mkdirp "^0.5.1" noms "0.0.0" through2 "^2.0.1" yargs "^13.2.4" @@ -11382,7 +11382,7 @@ jest-util@^25.1.0: "@jest/types" "^25.1.0" chalk "^3.0.0" is-ci "^2.0.0" - mkdirp "^0.5.6" + mkdirp "^0.5.1" jest-util@^26.1.0, jest-util@^26.6.2: version "26.6.2" @@ -12672,7 +12672,7 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@0.0.8, minimist@1.2.6, minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: +minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== @@ -12792,25 +12792,25 @@ mkdirp@1.x, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@^0.5.4, mkdirp@^0.5.5: +mkdirp@^0.5.0, mkdirp@^0.5.3, mkdirp@^0.5.4, mkdirp@^0.5.5: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" +mkdirp@^0.5.1, mkdirp@~0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + mkdirp@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.3.tgz#4cf2e30ad45959dddea53ad97d518b6c8205e1ea" integrity sha512-6uCP4Qc0sWsgMLy1EOqqS/3rjDHOEnsStVr/4vtAIK2Y5i2kA7lFFejYrpIyiN9w0pYf4ckeCYT9f1r1P9KX5g== -mkdirp@~0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= - dependencies: - minimist "0.0.8" - mock-stdin@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/mock-stdin/-/mock-stdin-0.3.1.tgz#c657d9642d90786435c64ca5e99bbd4d09bd7dd3" @@ -12855,7 +12855,7 @@ move-concurrently@^1.0.1: aproba "^1.1.1" copy-concurrently "^1.0.0" fs-write-stream-atomic "^1.0.8" - mkdirp "^0.5.6" + mkdirp "^0.5.1" rimraf "^2.5.4" run-queue "^1.0.3" @@ -12913,7 +12913,7 @@ mv@~2: resolved "https://registry.yarnpkg.com/mv/-/mv-2.1.1.tgz#ae6ce0d6f6d5e0a4f7d893798d03c1ea9559b6a2" integrity sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI= dependencies: - mkdirp "~0.5.6" + mkdirp "~0.5.1" ncp "~2.0.0" rimraf "~2.4.0" @@ -13039,7 +13039,7 @@ node-gyp@^5.0.2: env-paths "^2.2.0" glob "^7.1.4" graceful-fs "^4.2.2" - mkdirp "^0.5.6" + mkdirp "^0.5.1" nopt "^4.0.1" npmlog "^4.1.2" request "^2.88.0" @@ -17503,7 +17503,7 @@ svgo@^1.0.0, svgo@^1.2.2: css-tree "1.0.0-alpha.37" csso "^4.0.2" js-yaml "^3.13.1" - mkdirp "~0.5.6" + mkdirp "~0.5.1" object.values "^1.1.0" sax "~1.2.4" stable "^0.1.8" @@ -17548,7 +17548,7 @@ tar-fs@^2.0.0: integrity sha512-vaY0obB6Om/fso8a8vakQBzwholQ7v5+uy+tF3Ozvxv1KNezmVQAiWtcNmMHFSFPqL3dJA8ha6gdtFbfX9mcxA== dependencies: chownr "^1.1.1" - mkdirp "^0.5.6" + mkdirp "^0.5.1" pump "^3.0.0" tar-stream "^2.0.0" @@ -18599,7 +18599,7 @@ webpack-dev-middleware@^3.7.2: dependencies: memory-fs "^0.4.1" mime "^2.4.4" - mkdirp "^0.5.6" + mkdirp "^0.5.1" range-parser "^1.2.1" webpack-log "^2.0.0" From a56ab694f032f156a8614fe7a0e3391f6ca840df Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Thu, 28 Apr 2022 15:03:06 +0200 Subject: [PATCH 019/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version=205?= =?UTF-8?q?.2.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- commander/package.json | 26 ++++++------ .../templates/init/package-template.json | 8 ++-- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 10 ++--- elements/lisk-bft/package.json | 12 +++--- elements/lisk-chain/package.json | 16 ++++---- elements/lisk-client/package.json | 18 ++++----- elements/lisk-codec/package.json | 6 +-- elements/lisk-cryptography/package.json | 2 +- elements/lisk-db/package.json | 2 +- elements/lisk-elements/package.json | 30 +++++++------- elements/lisk-genesis/package.json | 12 +++--- elements/lisk-p2p/package.json | 8 ++-- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +-- elements/lisk-transactions/package.json | 8 ++-- elements/lisk-tree/package.json | 6 +-- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../package.json | 10 ++--- .../lisk-framework-faucet-plugin/package.json | 16 ++++---- .../lisk-framework-forger-plugin/package.json | 22 +++++----- .../package.json | 14 +++---- .../package.json | 14 +++---- .../package.json | 20 +++++----- framework/package.json | 30 +++++++------- sdk/package.json | 40 +++++++++---------- yarn.lock | 39 ++++++++++++++++++ 28 files changed, 212 insertions(+), 173 deletions(-) diff --git a/commander/package.json b/commander/package.json index aac27902907..61eb2fecd6c 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.10-alpha.1", + "version": "5.1.10", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,17 +91,17 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-client": "^5.2.2-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.1", - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "@oclif/command": "1.8.16", "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.2-alpha.1", + "lisk-framework": "^0.9.2", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 47695b3baed..6f35596bd8d 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -98,15 +98,15 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7-alpha.1", - "@liskhq/lisk-framework-faucet-plugin": "^0.1.7-alpha.1", + "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7", + "@liskhq/lisk-framework-faucet-plugin": "^0.1.7", "@oclif/command": "1.8.16", "@oclif/plugin-autocomplete": "1.2.0", "@oclif/plugin-help": "5.1.12", "fs-extra": "9.1.0", "inquirer": "7.3.2", - "lisk-commander": "^5.1.10-alpha.1", - "lisk-sdk": "^5.2.2-alpha.1", + "lisk-commander": "^5.1.10", + "lisk-sdk": "^5.2.2", "tar": "6.0.2", "tslib": "1.13.0", "axios": "0.21.1" diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 8563522cb12..8edca7f9f97 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -29,7 +29,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "lisk-sdk": "^5.2.2-alpha.1" + "lisk-sdk": "^5.2.2" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 43005f0869f..a1b1616343a 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.6-alpha.1", + "version": "5.1.6", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-transactions": "^5.2.2", "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", + "@liskhq/lisk-chain": "^0.3.4", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "16.11.26", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index dc908de4f9f..22fbd27ea70 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.4-alpha.1", + "version": "0.3.4", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "@types/node": "16.11.26", "debug": "4.3.4" }, diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index f3ab8281156..6e12e6b313f 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.4-alpha.1", + "version": "0.3.4", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-tree": "^0.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-tree": "^0.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1", "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index c23173308cf..3483ef88168 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.2-alpha.1", + "version": "5.2.2", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,14 +55,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-tree": "^0.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-tree": "^0.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index a2a211e1af4..75786cbc55e 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.2.2-alpha.0", + "version": "0.2.2", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0" + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 364ec331669..96e1c1a6fd1 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "3.2.1-alpha.0", + "version": "3.2.1", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json index 9b7f7a80ed1..3a0db898ccd 100644 --- a/elements/lisk-db/package.json +++ b/elements/lisk-db/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-db", - "version": "0.2.1-alpha.0", + "version": "0.2.1", "description": "A database access implementation for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index ff175584140..456a0da71a8 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.2-alpha.1", + "version": "5.2.2", "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,20 +36,20 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-bft": "^0.3.4-alpha.1", - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.1", - "@liskhq/lisk-p2p": "^0.7.3-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", - "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-tree": "^0.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0" + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-bft": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-transaction-pool": "^0.5.3", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-tree": "^0.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 83c9995078e..6ce6e5be5b3 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.4-alpha.1", + "version": "0.2.4", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "lodash.clonedeep": "4.5.0" }, "devDependencies": { diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 42a6e309c48..75d39342a21 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.7.3-alpha.0", + "version": "0.7.3", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,9 +42,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-validator": "^0.6.2", "lodash.shuffle": "4.2.0", "semver": "7.3.5", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 14db756b7a1..37ce36609c5 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "3.1.1-alpha.0", + "version": "3.1.1", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 1f93b1eb351..c73bfbef64f 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.5.3-alpha.0", + "version": "0.5.3", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,8 +37,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 7d955b17df7..736611d38ba 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "5.2.2-alpha.0", + "version": "5.2.2", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,9 +36,9 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0" + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-validator": "^0.6.2" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 3c99c60f924..53ab8c4c6bb 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.2.2-alpha.0", + "version": "0.2.2", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0" + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 793788be7ab..a1069f2915f 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.2.1-alpha.0", + "version": "0.2.1", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 571682eea2b..99bbb0467b8 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.6.2-alpha.0", + "version": "0.6.2", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1", "ajv": "8.1.0", "ajv-formats": "2.0.2", "debug": "4.3.4", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index d16e414b7c5..f4ca5ce4397 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.7-alpha.1", + "version": "0.1.7", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.2-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", + "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.2-alpha.1", + "lisk-framework": "^0.9.2", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 9e77af5c25c..46e0b8c7daf 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.7-alpha.1", + "version": "0.1.7", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-client": "^5.2.2-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "axios": "0.21.1", "express": "4.17.3", - "lisk-framework": "^0.9.2-alpha.1", + "lisk-framework": "^0.9.2", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index bb34435442f..473139f9793 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.7-alpha.1", + "version": "0.2.7", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,13 +38,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "axios": "0.21.1", "cors": "2.8.5", "debug": "4.3.4", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.1" + "lisk-framework": "^0.9.2" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-genesis": "^0.2.4-alpha.1", + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-genesis": "^0.2.4", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 48f988af986..dde20b32358 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.7-alpha.1", + "version": "0.2.7", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,18 +37,18 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.1" + "lisk-framework": "^0.9.2" }, "devDependencies": { - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-transactions": "^5.2.2", "@types/cors": "2.8.6", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 4cf0b3d889c..5b8575bf37c 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.7-alpha.1", + "version": "0.2.7", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,16 +37,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2-alpha.1" + "lisk-framework": "^0.9.2" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 771a0f547eb..f401dcba9c6 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.7-alpha.1", + "version": "0.2.7", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.4-alpha.1", - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-bft": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.2-alpha.1" + "lisk-framework": "^0.9.2" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index 896da7f7a15..df200a9f236 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.2-alpha.1", + "version": "0.9.2", "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,19 +40,19 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-bft": "^0.3.4-alpha.1", - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-genesis": "^0.2.4-alpha.1", - "@liskhq/lisk-p2p": "^0.7.3-alpha.0", - "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-tree": "^0.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-bft": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-transaction-pool": "^0.5.3", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-tree": "^0.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.5", @@ -64,7 +64,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", + "@liskhq/lisk-passphrase": "^3.1.1", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/sdk/package.json b/sdk/package.json index ca68eb67446..5e4aa532cc7 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.2-alpha.1", + "version": "5.2.2", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6-alpha.1", - "@liskhq/lisk-bft": "^0.3.4-alpha.1", - "@liskhq/lisk-chain": "^0.3.4-alpha.1", - "@liskhq/lisk-codec": "^0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1-alpha.0", - "@liskhq/lisk-db": "^0.2.1-alpha.0", - "@liskhq/lisk-framework-forger-plugin": "^0.2.7-alpha.1", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.7-alpha.1", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.7-alpha.1", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7-alpha.1", - "@liskhq/lisk-genesis": "^0.2.4-alpha.1", - "@liskhq/lisk-p2p": "^0.7.3-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1-alpha.0", - "@liskhq/lisk-transaction-pool": "^0.5.3-alpha.0", - "@liskhq/lisk-transactions": "^5.2.2-alpha.0", - "@liskhq/lisk-tree": "^0.2.2-alpha.0", - "@liskhq/lisk-utils": "^0.2.1-alpha.0", - "@liskhq/lisk-validator": "^0.6.2-alpha.0", - "lisk-framework": "^0.9.2-alpha.1" + "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-bft": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-codec": "^0.2.2", + "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-framework-forger-plugin": "^0.2.7", + "@liskhq/lisk-framework-http-api-plugin": "^0.2.7", + "@liskhq/lisk-framework-monitor-plugin": "^0.2.7", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7", + "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-transaction-pool": "^0.5.3", + "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-tree": "^0.2.2", + "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-validator": "^0.6.2", + "lisk-framework": "^0.9.2" }, "devDependencies": { "eslint": "7.22.0", diff --git a/yarn.lock b/yarn.lock index f297348a9ef..4a2de23e446 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2515,6 +2515,45 @@ dependencies: "@types/node" "11.11.2" +"@liskhq/lisk-codec@0.2.2-alpha.0": + version "0.2.2-alpha.0" + resolved "https://npm.lisk.com/@liskhq%2flisk-codec/-/lisk-codec-0.2.2-alpha.0.tgz#6e414b8540f348f12a43edf972e50c19b312c400" + integrity sha512-5b/BPEhrXXgbNb7JrXdiDlmV55kt4dtArUtuzYo789MWPGTKTdsBRsF5Mja/66Zpxoeu9N8lREOevwwbNWdu9Q== + dependencies: + "@liskhq/lisk-utils" "^0.2.1-alpha.0" + "@liskhq/lisk-validator" "^0.6.2-alpha.0" + +"@liskhq/lisk-cryptography@3.2.1-alpha.0": + version "3.2.1-alpha.0" + resolved "https://npm.lisk.com/@liskhq%2flisk-cryptography/-/lisk-cryptography-3.2.1-alpha.0.tgz#645a8f901912d973847c4f1205d849f771a638ae" + integrity sha512-IoX3/k6sz//eT6eEC47fZ0TVFsoJoSwNMfAgM94XAlrCIWUOrM2s9L2rGY+iNrqvLzz5y0BCwULQcRDPqhNBGA== + dependencies: + buffer-reverse "1.0.1" + ed2curve "0.3.0" + tweetnacl "1.0.3" + varuint-bitcoin "1.1.2" + optionalDependencies: + sodium-native "3.2.1" + +"@liskhq/lisk-passphrase@3.1.1-alpha.0": + version "3.1.1-alpha.0" + resolved "https://npm.lisk.com/@liskhq%2flisk-passphrase/-/lisk-passphrase-3.1.1-alpha.0.tgz#7ca4a28b2ccfc912c223b2ddad4afd87e0669b0d" + integrity sha512-KpyViYG/Gp0IQ2eknF0xnQ2Pgt15b3+CrMsiwN+Jzy+rsQPhoFC0yDZXT/1SXH/mQnf2PANwOgsTLz63pPbMNw== + dependencies: + bip39 "3.0.4" + +"@liskhq/lisk-validator@0.6.2-alpha.0": + version "0.6.2-alpha.0" + resolved "https://npm.lisk.com/@liskhq%2flisk-validator/-/lisk-validator-0.6.2-alpha.0.tgz#57c8e9f1275f58e7d3348a728b230d1dfadf6684" + integrity sha512-7JWqbQcxgd64bSKT6sP9qEnbYiHGgU5noE8rQ1NCDzMOf5pIfVQVsrkC947sD3Eib+3psFLMtajQ79zRiqJHmQ== + dependencies: + "@liskhq/lisk-cryptography" "^3.2.1-alpha.0" + ajv "8.1.0" + ajv-formats "2.0.2" + debug "4.3.4" + semver "7.3.5" + validator "13.7.0" + "@nodelib/fs.scandir@2.1.3": version "2.1.3" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" From bde8ddc1a0822fa9f40b078c1d3639f2531f72e4 Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Fri, 29 Apr 2022 10:00:18 +0200 Subject: [PATCH 020/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version=205?= =?UTF-8?q?.2.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- protocol-specs/package.json | 8 ++++---- yarn.lock | 39 ------------------------------------- 2 files changed, 4 insertions(+), 43 deletions(-) diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 220898d0fd5..f650e47ca95 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.2.2-alpha.0", - "@liskhq/lisk-cryptography": "3.2.1-alpha.0", - "@liskhq/lisk-passphrase": "3.1.1-alpha.0", - "@liskhq/lisk-validator": "0.6.2-alpha.0", + "@liskhq/lisk-codec": "0.2.2", + "@liskhq/lisk-cryptography": "3.2.1", + "@liskhq/lisk-passphrase": "3.1.1", + "@liskhq/lisk-validator": "0.6.2", "protobufjs": "6.9.0" }, "devDependencies": { diff --git a/yarn.lock b/yarn.lock index 4a2de23e446..f297348a9ef 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2515,45 +2515,6 @@ dependencies: "@types/node" "11.11.2" -"@liskhq/lisk-codec@0.2.2-alpha.0": - version "0.2.2-alpha.0" - resolved "https://npm.lisk.com/@liskhq%2flisk-codec/-/lisk-codec-0.2.2-alpha.0.tgz#6e414b8540f348f12a43edf972e50c19b312c400" - integrity sha512-5b/BPEhrXXgbNb7JrXdiDlmV55kt4dtArUtuzYo789MWPGTKTdsBRsF5Mja/66Zpxoeu9N8lREOevwwbNWdu9Q== - dependencies: - "@liskhq/lisk-utils" "^0.2.1-alpha.0" - "@liskhq/lisk-validator" "^0.6.2-alpha.0" - -"@liskhq/lisk-cryptography@3.2.1-alpha.0": - version "3.2.1-alpha.0" - resolved "https://npm.lisk.com/@liskhq%2flisk-cryptography/-/lisk-cryptography-3.2.1-alpha.0.tgz#645a8f901912d973847c4f1205d849f771a638ae" - integrity sha512-IoX3/k6sz//eT6eEC47fZ0TVFsoJoSwNMfAgM94XAlrCIWUOrM2s9L2rGY+iNrqvLzz5y0BCwULQcRDPqhNBGA== - dependencies: - buffer-reverse "1.0.1" - ed2curve "0.3.0" - tweetnacl "1.0.3" - varuint-bitcoin "1.1.2" - optionalDependencies: - sodium-native "3.2.1" - -"@liskhq/lisk-passphrase@3.1.1-alpha.0": - version "3.1.1-alpha.0" - resolved "https://npm.lisk.com/@liskhq%2flisk-passphrase/-/lisk-passphrase-3.1.1-alpha.0.tgz#7ca4a28b2ccfc912c223b2ddad4afd87e0669b0d" - integrity sha512-KpyViYG/Gp0IQ2eknF0xnQ2Pgt15b3+CrMsiwN+Jzy+rsQPhoFC0yDZXT/1SXH/mQnf2PANwOgsTLz63pPbMNw== - dependencies: - bip39 "3.0.4" - -"@liskhq/lisk-validator@0.6.2-alpha.0": - version "0.6.2-alpha.0" - resolved "https://npm.lisk.com/@liskhq%2flisk-validator/-/lisk-validator-0.6.2-alpha.0.tgz#57c8e9f1275f58e7d3348a728b230d1dfadf6684" - integrity sha512-7JWqbQcxgd64bSKT6sP9qEnbYiHGgU5noE8rQ1NCDzMOf5pIfVQVsrkC947sD3Eib+3psFLMtajQ79zRiqJHmQ== - dependencies: - "@liskhq/lisk-cryptography" "^3.2.1-alpha.0" - ajv "8.1.0" - ajv-formats "2.0.2" - debug "4.3.4" - semver "7.3.5" - validator "13.7.0" - "@nodelib/fs.scandir@2.1.3": version "2.1.3" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" From d3ad04f820694d866d77d7c5f7e53fa9c0cd47d3 Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Thu, 6 Oct 2022 18:51:43 +0200 Subject: [PATCH 021/170] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Update=20expirePee?= =?UTF-8?q?rFromBucket=20to=20delete=20old=20peer=20when=20called?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- elements/lisk-p2p/src/utils/network.ts | 14 ++--- .../lisk-p2p/test/unit/utils/network.spec.ts | 51 +++++++++++++++++-- 2 files changed, 54 insertions(+), 11 deletions(-) diff --git a/elements/lisk-p2p/src/utils/network.ts b/elements/lisk-p2p/src/utils/network.ts index bbd7644d270..888e4fce88f 100644 --- a/elements/lisk-p2p/src/utils/network.ts +++ b/elements/lisk-p2p/src/utils/network.ts @@ -142,14 +142,16 @@ export const expirePeerFromBucket = ( thresholdTime: number, ): P2PEnhancedPeerInfo | undefined => { for (const [peerId, peer] of bucket) { - const timeDifference = Math.round( - Math.abs((peer.dateAdded as Date).getTime() - new Date().getTime()), - ); + if (peer.dateAdded) { + const timeDifference = Math.round( + Math.abs(new Date().getTime()- (peer.dateAdded as Date).getTime()), + ); - if (timeDifference >= thresholdTime) { - bucket.delete(peerId); + if (timeDifference >= thresholdTime) { + bucket.delete(peerId); - return peer; + return peer; + } } } diff --git a/elements/lisk-p2p/test/unit/utils/network.spec.ts b/elements/lisk-p2p/test/unit/utils/network.spec.ts index a7da1147d66..5b9602d6d97 100644 --- a/elements/lisk-p2p/test/unit/utils/network.spec.ts +++ b/elements/lisk-p2p/test/unit/utils/network.spec.ts @@ -22,8 +22,11 @@ import { NETWORK, getBucketId, PEER_TYPE, + expirePeerFromBucket, } from '../../../src/utils'; -import { DEFAULT_RANDOM_SECRET } from '../../../src/constants'; +import { DEFAULT_EVICTION_THRESHOLD_TIME, DEFAULT_RANDOM_SECRET } from '../../../src/constants'; +import { initPeerInfoList } from '../../utils/peers'; +import { P2PEnhancedPeerInfo } from '../../../src/types'; describe('utils/network', () => { const MAX_GROUP_NUM = 255; @@ -140,11 +143,49 @@ describe('utils/network', () => { }); describe('#expirePeerFromBucket', () => { - describe('when bucket contains old peers', () => { - it.todo('should return the evicted peer info'); + let peerBucket: Map; + const peers = initPeerInfoList(); + + beforeEach(() => { + peerBucket = new Map(); + + for (const p of peers) { + peerBucket.set(p?.peerId, { + ...p, + dateAdded: new Date(), + }); + } + }); + + it('should return the evicted peer info when bucket contains old peers', () => { + const peer1 = peerBucket.get(peers[0].peerId) as P2PEnhancedPeerInfo; + const timeNow = new Date(); + const oneDayOldTime = new Date(timeNow.getTime() - (DEFAULT_EVICTION_THRESHOLD_TIME + 1000)); + const oldPeer = { + ...peer1, + dateAdded: oneDayOldTime, + }; + peerBucket.set(peer1?.peerId, oldPeer); + expect(expirePeerFromBucket(peerBucket, DEFAULT_EVICTION_THRESHOLD_TIME)).toEqual(oldPeer); + }); + + it('should return undefined when bucket does not contains old peers', () => { + for (const p of peers) { + peerBucket.set(p?.peerId, { + ...p, + dateAdded: new Date(), + }); + } + expect(expirePeerFromBucket(peerBucket, DEFAULT_EVICTION_THRESHOLD_TIME)).toBeUndefined(); }); - describe('when bucket does not contains old peers', () => { - it.todo('should return undefined'); + + it('should return undefined when peers don\'t have dateAdded field', () => { + const peerBucketWithoutDateAdded = new Map(); + const peers = initPeerInfoList(); + for (const p of peers) { + peerBucketWithoutDateAdded.set(p?.peerId, p); + } + expect(expirePeerFromBucket(peerBucketWithoutDateAdded, DEFAULT_EVICTION_THRESHOLD_TIME)).toBeUndefined(); }); }); From 247555e08ba4a367926d32b078d26eeeab7b832f Mon Sep 17 00:00:00 2001 From: Ishan Tiwari Date: Mon, 10 Oct 2022 11:32:52 +0200 Subject: [PATCH 022/170] =?UTF-8?q?=F0=9F=92=85=20Fix=20formatting=20and?= =?UTF-8?q?=20remove=20typecasting?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- elements/lisk-p2p/src/utils/network.ts | 2 +- elements/lisk-p2p/test/unit/utils/network.spec.ts | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/elements/lisk-p2p/src/utils/network.ts b/elements/lisk-p2p/src/utils/network.ts index 888e4fce88f..a5d02f2de97 100644 --- a/elements/lisk-p2p/src/utils/network.ts +++ b/elements/lisk-p2p/src/utils/network.ts @@ -144,7 +144,7 @@ export const expirePeerFromBucket = ( for (const [peerId, peer] of bucket) { if (peer.dateAdded) { const timeDifference = Math.round( - Math.abs(new Date().getTime()- (peer.dateAdded as Date).getTime()), + Math.abs(new Date().getTime() - peer.dateAdded.getTime()), ); if (timeDifference >= thresholdTime) { diff --git a/elements/lisk-p2p/test/unit/utils/network.spec.ts b/elements/lisk-p2p/test/unit/utils/network.spec.ts index 5b9602d6d97..ec484ffb598 100644 --- a/elements/lisk-p2p/test/unit/utils/network.spec.ts +++ b/elements/lisk-p2p/test/unit/utils/network.spec.ts @@ -179,13 +179,15 @@ describe('utils/network', () => { expect(expirePeerFromBucket(peerBucket, DEFAULT_EVICTION_THRESHOLD_TIME)).toBeUndefined(); }); - it('should return undefined when peers don\'t have dateAdded field', () => { + it("should return undefined when peers don't have dateAdded field", () => { const peerBucketWithoutDateAdded = new Map(); const peers = initPeerInfoList(); for (const p of peers) { peerBucketWithoutDateAdded.set(p?.peerId, p); } - expect(expirePeerFromBucket(peerBucketWithoutDateAdded, DEFAULT_EVICTION_THRESHOLD_TIME)).toBeUndefined(); + expect( + expirePeerFromBucket(peerBucketWithoutDateAdded, DEFAULT_EVICTION_THRESHOLD_TIME), + ).toBeUndefined(); }); }); From ad2d2fa8788890bdcd6590b04cb9a4a7d470875b Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 7 Feb 2023 15:47:38 +0100 Subject: [PATCH 023/170] :arrow_up: Update dependency versions --- commander/jest.config.js | 3 + commander/package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 2 +- .../lisk-framework-forger-plugin/package.json | 4 +- .../package.json | 2 +- .../package.json | 2 +- package.json | 2 +- yarn.lock | 3154 ++++++++++------- 8 files changed, 1924 insertions(+), 1251 deletions(-) diff --git a/commander/jest.config.js b/commander/jest.config.js index d27856e86f1..6dd91839853 100644 --- a/commander/jest.config.js +++ b/commander/jest.config.js @@ -9,6 +9,9 @@ module.exports = { transform: { '^.+\\.(ts|tsx)$': 'ts-jest', }, + moduleNameMapper: { + "^axios$": "axios/dist/axios.js" + }, verbose: false, collectCoverage: false, coverageReporters: ['json'], diff --git a/commander/package.json b/commander/package.json index 61eb2fecd6c..eec489c31ef 100644 --- a/commander/package.json +++ b/commander/package.json @@ -106,9 +106,9 @@ "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", "@oclif/parser": "3.8.7", - "@oclif/plugin-autocomplete": "1.2.0", + "@oclif/plugin-autocomplete": "1.4.4", "@oclif/plugin-help": "5.1.12", - "axios": "0.21.1", + "axios": "1.3.2", "bip39": "3.0.4", "chalk": "4.1.0", "cli-table3": "0.6.0", @@ -122,7 +122,7 @@ "tar": "6.1.11", "ts-morph": "9.1.0", "tslib": "1.14.1", - "yeoman-environment": "3.3.0", + "yeoman-environment": "3.13.0", "yeoman-generator": "5.2.0" }, "devDependencies": { diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 46e0b8c7daf..c904bac7fa7 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -47,7 +47,7 @@ "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", - "axios": "0.21.1", + "axios": "1.3.2", "express": "4.17.3", "lisk-framework": "^0.9.2", "react": "^17.0.1", diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 473139f9793..0f58c96fbee 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -45,7 +45,7 @@ "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", - "axios": "0.21.1", + "axios": "1.3.2", "cors": "2.8.5", "debug": "4.3.4", "express": "4.17.3", @@ -67,7 +67,7 @@ "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", - "axios": "0.21.1", + "axios": "1.3.2", "eslint": "7.22.0", "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index dde20b32358..6296f16a5f2 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -58,7 +58,7 @@ "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", - "axios": "0.26.1", + "axios": "1.3.2", "eslint": "7.22.0", "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index f401dcba9c6..d3465ea7445 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -60,7 +60,7 @@ "@types/node": "16.11.26", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", - "axios": "0.26.1", + "axios": "1.3.2", "eslint": "7.22.0", "eslint-config-lisk-base": "2.0.1", "eslint-plugin-import": "2.22.1", diff --git a/package.json b/package.json index 06c5bbc888e..64ab8835298 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ "istanbul-lib-coverage": "3.0.0", "istanbul-lib-report": "3.0.0", "istanbul-reports": "3.0.0", - "lerna": "4.0.0", + "lerna": "6.4.1", "lint-staged": "10.5.4", "prettier": "2.2.1", "typescript": "4.2.3", diff --git a/yarn.lock b/yarn.lock index f297348a9ef..8df229cde39 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1504,6 +1504,11 @@ resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.2.tgz#30aa825f11d438671d585bd44e7fd564535fc210" integrity "sha1-MKqCXxHUOGcdWFvUTn/VZFNfwhA= sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw==" +"@gar/promisify@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" + integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw== + "@hapi/address@2.x.x": version "2.1.4" resolved "https://registry.yarnpkg.com/@hapi/address/-/address-2.1.4.tgz#5d67ed43f3fd41a69d4b9ff7b56e7c0d1d0a81e5" @@ -1548,7 +1553,12 @@ dependencies: "@hapi/hoek" "^9.0.0" -"@isaacs/string-locale-compare@^1.0.1": +"@hutson/parse-repository-url@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@hutson/parse-repository-url/-/parse-repository-url-3.0.2.tgz#98c23c950a3d9b6c8f0daed06da6c3af06981340" + integrity sha512-H9XAx3hc0BQHY6l+IFSWHDySypcXsvsuLhgYLUGywmJ5pswRVQJUHpOsobnLYp2ZUaUlKiKDrgWWhosOwAEM8Q== + +"@isaacs/string-locale-compare@^1.1.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b" integrity "sha1-KRwifpP9QHqW7NWYeaNYCRIOQys= sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==" @@ -1837,676 +1847,690 @@ "@types/yargs" "^15.0.0" chalk "^4.0.0" -"@lerna/add@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/add/-/add-4.0.0.tgz#c36f57d132502a57b9e7058d1548b7a565ef183f" - integrity sha512-cpmAH1iS3k8JBxNvnMqrGTTjbY/ZAiKa1ChJzFevMYY3eeqbvhsBKnBcxjRXtdrJ6bd3dCQM+ZtK+0i682Fhng== - dependencies: - "@lerna/bootstrap" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/npm-conf" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/add@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/add/-/add-6.4.1.tgz#fa20fe9ff875dc5758141262c8cde0d9a6481ec4" + integrity sha512-YSRnMcsdYnQtQQK0NSyrS9YGXvB3jzvx183o+JTH892MKzSlBqwpBHekCknSibyxga1HeZ0SNKQXgsHAwWkrRw== + dependencies: + "@lerna/bootstrap" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/npm-conf" "6.4.1" + "@lerna/validation-error" "6.4.1" dedent "^0.7.0" - npm-package-arg "^8.1.0" + npm-package-arg "8.1.1" p-map "^4.0.0" - pacote "^11.2.6" + pacote "^13.6.1" semver "^7.3.4" -"@lerna/bootstrap@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/bootstrap/-/bootstrap-4.0.0.tgz#5f5c5e2c6cfc8fcec50cb2fbe569a8c607101891" - integrity sha512-RkS7UbeM2vu+kJnHzxNRCLvoOP9yGNgkzRdy4UV2hNalD7EP41bLvRVOwRYQ7fhc2QcbhnKNdOBihYRL0LcKtw== - dependencies: - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/has-npm-version" "4.0.0" - "@lerna/npm-install" "4.0.0" - "@lerna/package-graph" "4.0.0" - "@lerna/pulse-till-done" "4.0.0" - "@lerna/rimraf-dir" "4.0.0" - "@lerna/run-lifecycle" "4.0.0" - "@lerna/run-topologically" "4.0.0" - "@lerna/symlink-binary" "4.0.0" - "@lerna/symlink-dependencies" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/bootstrap@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/bootstrap/-/bootstrap-6.4.1.tgz#a76ff22c3160d134fb60bcfddb3f8b0759b4f1ff" + integrity sha512-64cm0mnxzxhUUjH3T19ZSjPdn28vczRhhTXhNAvOhhU0sQgHrroam1xQC1395qbkV3iosSertlu8e7xbXW033w== + dependencies: + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/has-npm-version" "6.4.1" + "@lerna/npm-install" "6.4.1" + "@lerna/package-graph" "6.4.1" + "@lerna/pulse-till-done" "6.4.1" + "@lerna/rimraf-dir" "6.4.1" + "@lerna/run-lifecycle" "6.4.1" + "@lerna/run-topologically" "6.4.1" + "@lerna/symlink-binary" "6.4.1" + "@lerna/symlink-dependencies" "6.4.1" + "@lerna/validation-error" "6.4.1" + "@npmcli/arborist" "5.3.0" dedent "^0.7.0" get-port "^5.1.1" multimatch "^5.0.0" - npm-package-arg "^8.1.0" - npmlog "^4.1.2" + npm-package-arg "8.1.1" + npmlog "^6.0.2" p-map "^4.0.0" p-map-series "^2.1.0" p-waterfall "^2.1.1" - read-package-tree "^5.3.1" semver "^7.3.4" -"@lerna/changed@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/changed/-/changed-4.0.0.tgz#b9fc76cea39b9292a6cd263f03eb57af85c9270b" - integrity sha512-cD+KuPRp6qiPOD+BO6S6SN5cARspIaWSOqGBpGnYzLb4uWT8Vk4JzKyYtc8ym1DIwyoFXHosXt8+GDAgR8QrgQ== +"@lerna/changed@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/changed/-/changed-6.4.1.tgz#4da6d08df7c53bc90c0c0d9d04839f91dd6d70a9" + integrity sha512-Z/z0sTm3l/iZW0eTSsnQpcY5d6eOpNO0g4wMOK+hIboWG0QOTc8b28XCnfCUO+33UisKl8PffultgoaHMKkGgw== dependencies: - "@lerna/collect-updates" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/listable" "4.0.0" - "@lerna/output" "4.0.0" + "@lerna/collect-updates" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/listable" "6.4.1" + "@lerna/output" "6.4.1" -"@lerna/check-working-tree@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/check-working-tree/-/check-working-tree-4.0.0.tgz#257e36a602c00142e76082a19358e3e1ae8dbd58" - integrity sha512-/++bxM43jYJCshBiKP5cRlCTwSJdRSxVmcDAXM+1oUewlZJVSVlnks5eO0uLxokVFvLhHlC5kHMc7gbVFPHv6Q== +"@lerna/check-working-tree@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/check-working-tree/-/check-working-tree-6.4.1.tgz#c0dcb5c474faf214865058e2fedda44962367a4e" + integrity sha512-EnlkA1wxaRLqhJdn9HX7h+JYxqiTK9aWEFOPqAE8lqjxHn3RpM9qBp1bAdL7CeUk3kN1lvxKwDEm0mfcIyMbPA== dependencies: - "@lerna/collect-uncommitted" "4.0.0" - "@lerna/describe-ref" "4.0.0" - "@lerna/validation-error" "4.0.0" + "@lerna/collect-uncommitted" "6.4.1" + "@lerna/describe-ref" "6.4.1" + "@lerna/validation-error" "6.4.1" -"@lerna/child-process@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/child-process/-/child-process-4.0.0.tgz#341b96a57dffbd9705646d316e231df6fa4df6e1" - integrity sha512-XtCnmCT9eyVsUUHx6y/CTBYdV9g2Cr/VxyseTWBgfIur92/YKClfEtJTbOh94jRT62hlKLqSvux/UhxXVh613Q== +"@lerna/child-process@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/child-process/-/child-process-6.4.1.tgz#d697fb769f4c5b57c59f87471eb9b3d65be904a3" + integrity sha512-dvEKK0yKmxOv8pccf3I5D/k+OGiLxQp5KYjsrDtkes2pjpCFfQAMbmpol/Tqx6w/2o2rSaRrLsnX8TENo66FsA== dependencies: chalk "^4.1.0" execa "^5.0.0" strong-log-transformer "^2.1.0" -"@lerna/clean@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/clean/-/clean-4.0.0.tgz#8f778b6f2617aa2a936a6b5e085ae62498e57dc5" - integrity sha512-uugG2iN9k45ITx2jtd8nEOoAtca8hNlDCUM0N3lFgU/b1mEQYAPRkqr1qs4FLRl/Y50ZJ41wUz1eazS+d/0osA== - dependencies: - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/prompt" "4.0.0" - "@lerna/pulse-till-done" "4.0.0" - "@lerna/rimraf-dir" "4.0.0" +"@lerna/clean@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/clean/-/clean-6.4.1.tgz#e9ee365ee6879ee998b78b3269fad02b5f385771" + integrity sha512-FuVyW3mpos5ESCWSkQ1/ViXyEtsZ9k45U66cdM/HnteHQk/XskSQw0sz9R+whrZRUDu6YgYLSoj1j0YAHVK/3A== + dependencies: + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/prompt" "6.4.1" + "@lerna/pulse-till-done" "6.4.1" + "@lerna/rimraf-dir" "6.4.1" p-map "^4.0.0" p-map-series "^2.1.0" p-waterfall "^2.1.1" -"@lerna/cli@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/cli/-/cli-4.0.0.tgz#8eabd334558836c1664df23f19acb95e98b5bbf3" - integrity sha512-Neaw3GzFrwZiRZv2g7g6NwFjs3er1vhraIniEs0jjVLPMNC4eata0na3GfE5yibkM/9d3gZdmihhZdZ3EBdvYA== +"@lerna/cli@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/cli/-/cli-6.4.1.tgz#2b2d093baace40e822caee8c90f698e98a437a2f" + integrity sha512-2pNa48i2wzFEd9LMPKWI3lkW/3widDqiB7oZUM1Xvm4eAOuDWc9I3RWmAUIVlPQNf3n4McxJCvsZZ9BpQN50Fg== dependencies: - "@lerna/global-options" "4.0.0" + "@lerna/global-options" "6.4.1" dedent "^0.7.0" - npmlog "^4.1.2" + npmlog "^6.0.2" yargs "^16.2.0" -"@lerna/collect-uncommitted@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/collect-uncommitted/-/collect-uncommitted-4.0.0.tgz#855cd64612969371cfc2453b90593053ff1ba779" - integrity sha512-ufSTfHZzbx69YNj7KXQ3o66V4RC76ffOjwLX0q/ab//61bObJ41n03SiQEhSlmpP+gmFbTJ3/7pTe04AHX9m/g== +"@lerna/collect-uncommitted@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/collect-uncommitted/-/collect-uncommitted-6.4.1.tgz#ae62bcaa5ecaa5b7fbc41eb9ae90b6711be156ec" + integrity sha512-5IVQGhlLrt7Ujc5ooYA1Xlicdba/wMcDSnbQwr8ufeqnzV2z4729pLCVk55gmi6ZienH/YeBPHxhB5u34ofE0Q== dependencies: - "@lerna/child-process" "4.0.0" + "@lerna/child-process" "6.4.1" chalk "^4.1.0" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/collect-updates@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/collect-updates/-/collect-updates-4.0.0.tgz#8e208b1bafd98a372ff1177f7a5e288f6bea8041" - integrity sha512-bnNGpaj4zuxsEkyaCZLka9s7nMs58uZoxrRIPJ+nrmrZYp1V5rrd+7/NYTuunOhY2ug1sTBvTAxj3NZQ+JKnOw== +"@lerna/collect-updates@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/collect-updates/-/collect-updates-6.4.1.tgz#4f7cf1c411f3253d0104e7b64cb0aa315a5dfc81" + integrity sha512-pzw2/FC+nIqYkknUHK9SMmvP3MsLEjxI597p3WV86cEDN3eb1dyGIGuHiKShtjvT08SKSwpTX+3bCYvLVxtC5Q== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/describe-ref" "4.0.0" + "@lerna/child-process" "6.4.1" + "@lerna/describe-ref" "6.4.1" minimatch "^3.0.4" - npmlog "^4.1.2" + npmlog "^6.0.2" slash "^3.0.0" -"@lerna/command@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/command/-/command-4.0.0.tgz#991c7971df8f5bf6ae6e42c808869a55361c1b98" - integrity sha512-LM9g3rt5FsPNFqIHUeRwWXLNHJ5NKzOwmVKZ8anSp4e1SPrv2HNc1V02/9QyDDZK/w+5POXH5lxZUI1CHaOK/A== - dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/package-graph" "4.0.0" - "@lerna/project" "4.0.0" - "@lerna/validation-error" "4.0.0" - "@lerna/write-log-file" "4.0.0" +"@lerna/command@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/command/-/command-6.4.1.tgz#96c4f5d88792c6c638738c66fcc3a7ad0d2487e2" + integrity sha512-3Lifj8UTNYbRad8JMP7IFEEdlIyclWyyvq/zvNnTS9kCOEymfmsB3lGXr07/AFoi6qDrvN64j7YSbPZ6C6qonw== + dependencies: + "@lerna/child-process" "6.4.1" + "@lerna/package-graph" "6.4.1" + "@lerna/project" "6.4.1" + "@lerna/validation-error" "6.4.1" + "@lerna/write-log-file" "6.4.1" clone-deep "^4.0.1" dedent "^0.7.0" execa "^5.0.0" is-ci "^2.0.0" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/conventional-commits@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/conventional-commits/-/conventional-commits-4.0.0.tgz#660fb2c7b718cb942ead70110df61f18c6f99750" - integrity sha512-CSUQRjJHFrH8eBn7+wegZLV3OrNc0Y1FehYfYGhjLE2SIfpCL4bmfu/ViYuHh9YjwHaA+4SX6d3hR+xkeseKmw== +"@lerna/conventional-commits@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/conventional-commits/-/conventional-commits-6.4.1.tgz#b8d44a8a71865b4d37b900137acef623f3a0a11b" + integrity sha512-NIvCOjStjQy5O8VojB7/fVReNNDEJOmzRG2sTpgZ/vNS4AzojBQZ/tobzhm7rVkZZ43R9srZeuhfH9WgFsVUSA== dependencies: - "@lerna/validation-error" "4.0.0" + "@lerna/validation-error" "6.4.1" conventional-changelog-angular "^5.0.12" - conventional-changelog-core "^4.2.2" + conventional-changelog-core "^4.2.4" conventional-recommended-bump "^6.1.0" fs-extra "^9.1.0" get-stream "^6.0.0" - lodash.template "^4.5.0" - npm-package-arg "^8.1.0" - npmlog "^4.1.2" + npm-package-arg "8.1.1" + npmlog "^6.0.2" pify "^5.0.0" semver "^7.3.4" -"@lerna/create-symlink@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/create-symlink/-/create-symlink-4.0.0.tgz#8c5317ce5ae89f67825443bd7651bf4121786228" - integrity sha512-I0phtKJJdafUiDwm7BBlEUOtogmu8+taxq6PtIrxZbllV9hWg59qkpuIsiFp+no7nfRVuaasNYHwNUhDAVQBig== +"@lerna/create-symlink@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/create-symlink/-/create-symlink-6.4.1.tgz#0efec22d78dd814a70d8345ced52c39beb05874b" + integrity sha512-rNivHFYV1GAULxnaTqeGb2AdEN2OZzAiZcx5CFgj45DWXQEGwPEfpFmCSJdXhFZbyd3K0uiDlAXjAmV56ov3FQ== dependencies: - cmd-shim "^4.1.0" + cmd-shim "^5.0.0" fs-extra "^9.1.0" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/create@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/create/-/create-4.0.0.tgz#b6947e9b5dfb6530321952998948c3e63d64d730" - integrity sha512-mVOB1niKByEUfxlbKTM1UNECWAjwUdiioIbRQZEeEabtjCL69r9rscIsjlGyhGWCfsdAG5wfq4t47nlDXdLLag== +"@lerna/create@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/create/-/create-6.4.1.tgz#3fc8556adadff1265432a6cee69ee14465798e71" + integrity sha512-qfQS8PjeGDDlxEvKsI/tYixIFzV2938qLvJohEKWFn64uvdLnXCamQ0wvRJST8p1ZpHWX4AXrB+xEJM3EFABrA== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/npm-conf" "4.0.0" - "@lerna/validation-error" "4.0.0" + "@lerna/child-process" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/npm-conf" "6.4.1" + "@lerna/validation-error" "6.4.1" dedent "^0.7.0" fs-extra "^9.1.0" - globby "^11.0.2" - init-package-json "^2.0.2" - npm-package-arg "^8.1.0" + init-package-json "^3.0.2" + npm-package-arg "8.1.1" p-reduce "^2.1.0" - pacote "^11.2.6" + pacote "^13.6.1" pify "^5.0.0" semver "^7.3.4" slash "^3.0.0" validate-npm-package-license "^3.0.4" - validate-npm-package-name "^3.0.0" - whatwg-url "^8.4.0" + validate-npm-package-name "^4.0.0" yargs-parser "20.2.4" -"@lerna/describe-ref@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/describe-ref/-/describe-ref-4.0.0.tgz#53c53b4ea65fdceffa072a62bfebe6772c45d9ec" - integrity sha512-eTU5+xC4C5Gcgz+Ey4Qiw9nV2B4JJbMulsYJMW8QjGcGh8zudib7Sduj6urgZXUYNyhYpRs+teci9M2J8u+UvQ== +"@lerna/describe-ref@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/describe-ref/-/describe-ref-6.4.1.tgz#c0a0beca5dfeada3a39b030f69c8c98f5623bb13" + integrity sha512-MXGXU8r27wl355kb1lQtAiu6gkxJ5tAisVJvFxFM1M+X8Sq56icNoaROqYrvW6y97A9+3S8Q48pD3SzkFv31Xw== dependencies: - "@lerna/child-process" "4.0.0" - npmlog "^4.1.2" + "@lerna/child-process" "6.4.1" + npmlog "^6.0.2" -"@lerna/diff@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/diff/-/diff-4.0.0.tgz#6d3071817aaa4205a07bf77cfc6e932796d48b92" - integrity sha512-jYPKprQVg41+MUMxx6cwtqsNm0Yxx9GDEwdiPLwcUTFx+/qKCEwifKNJ1oGIPBxyEHX2PFCOjkK39lHoj2qiag== +"@lerna/diff@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/diff/-/diff-6.4.1.tgz#ca9e62a451ce199faaa7ef5990ded3fad947e2f9" + integrity sha512-TnzJsRPN2fOjUrmo5Boi43fJmRtBJDsVgwZM51VnLoKcDtO1kcScXJ16Od2Xx5bXbp5dES5vGDLL/USVVWfeAg== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/validation-error" "4.0.0" - npmlog "^4.1.2" + "@lerna/child-process" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/validation-error" "6.4.1" + npmlog "^6.0.2" -"@lerna/exec@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/exec/-/exec-4.0.0.tgz#eb6cb95cb92d42590e9e2d628fcaf4719d4a8be6" - integrity sha512-VGXtL/b/JfY84NB98VWZpIExfhLOzy0ozm/0XaS4a2SmkAJc5CeUfrhvHxxkxiTBLkU+iVQUyYEoAT0ulQ8PCw== - dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/profiler" "4.0.0" - "@lerna/run-topologically" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/exec@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/exec/-/exec-6.4.1.tgz#493ce805b6959e8299ec58fab8d31fd01ed209ba" + integrity sha512-KAWfuZpoyd3FMejHUORd0GORMr45/d9OGAwHitfQPVs4brsxgQFjbbBEEGIdwsg08XhkDb4nl6IYVASVTq9+gA== + dependencies: + "@lerna/child-process" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/profiler" "6.4.1" + "@lerna/run-topologically" "6.4.1" + "@lerna/validation-error" "6.4.1" p-map "^4.0.0" -"@lerna/filter-options@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/filter-options/-/filter-options-4.0.0.tgz#ac94cc515d7fa3b47e2f7d74deddeabb1de5e9e6" - integrity sha512-vV2ANOeZhOqM0rzXnYcFFCJ/kBWy/3OA58irXih9AMTAlQLymWAK0akWybl++sUJ4HB9Hx12TOqaXbYS2NM5uw== +"@lerna/filter-options@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/filter-options/-/filter-options-6.4.1.tgz#571d37436878fab8b2ac84ca1c3863acd3515cfb" + integrity sha512-efJh3lP2T+9oyNIP2QNd9EErf0Sm3l3Tz8CILMsNJpjSU6kO43TYWQ+L/ezu2zM99KVYz8GROLqDcHRwdr8qUA== dependencies: - "@lerna/collect-updates" "4.0.0" - "@lerna/filter-packages" "4.0.0" + "@lerna/collect-updates" "6.4.1" + "@lerna/filter-packages" "6.4.1" dedent "^0.7.0" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/filter-packages@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/filter-packages/-/filter-packages-4.0.0.tgz#b1f70d70e1de9cdd36a4e50caa0ac501f8d012f2" - integrity sha512-+4AJIkK7iIiOaqCiVTYJxh/I9qikk4XjNQLhE3kixaqgMuHl1NQ99qXRR0OZqAWB9mh8Z1HA9bM5K1HZLBTOqA== +"@lerna/filter-packages@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/filter-packages/-/filter-packages-6.4.1.tgz#e138b182816a049c81de094069cad12aaa41a236" + integrity sha512-LCMGDGy4b+Mrb6xkcVzp4novbf5MoZEE6ZQF1gqG0wBWqJzNcKeFiOmf352rcDnfjPGZP6ct5+xXWosX/q6qwg== dependencies: - "@lerna/validation-error" "4.0.0" + "@lerna/validation-error" "6.4.1" multimatch "^5.0.0" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/get-npm-exec-opts@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/get-npm-exec-opts/-/get-npm-exec-opts-4.0.0.tgz#dc955be94a4ae75c374ef9bce91320887d34608f" - integrity sha512-yvmkerU31CTWS2c7DvmAWmZVeclPBqI7gPVr5VATUKNWJ/zmVcU4PqbYoLu92I9Qc4gY1TuUplMNdNuZTSL7IQ== +"@lerna/get-npm-exec-opts@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/get-npm-exec-opts/-/get-npm-exec-opts-6.4.1.tgz#42681f6db4238277889b3423f87308eda5dc01ec" + integrity sha512-IvN/jyoklrWcjssOf121tZhOc16MaFPOu5ii8a+Oy0jfTriIGv929Ya8MWodj75qec9s+JHoShB8yEcMqZce4g== dependencies: - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/get-packed@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/get-packed/-/get-packed-4.0.0.tgz#0989d61624ac1f97e393bdad2137c49cd7a37823" - integrity sha512-rfWONRsEIGyPJTxFzC8ECb3ZbsDXJbfqWYyeeQQDrJRPnEJErlltRLPLgC2QWbxFgFPsoDLeQmFHJnf0iDfd8w== +"@lerna/get-packed@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/get-packed/-/get-packed-6.4.1.tgz#b3b8b907002d50bf8792dd97e2729249c0b0e0cd" + integrity sha512-uaDtYwK1OEUVIXn84m45uPlXShtiUcw6V9TgB3rvHa3rrRVbR7D4r+JXcwVxLGrAS7LwxVbYWEEO/Z/bX7J/Lg== dependencies: fs-extra "^9.1.0" - ssri "^8.0.1" + ssri "^9.0.1" tar "^6.1.0" -"@lerna/github-client@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/github-client/-/github-client-4.0.0.tgz#2ced67721363ef70f8e12ffafce4410918f4a8a4" - integrity sha512-2jhsldZtTKXYUBnOm23Lb0Fx8G4qfSXF9y7UpyUgWUj+YZYd+cFxSuorwQIgk5P4XXrtVhsUesIsli+BYSThiw== +"@lerna/github-client@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/github-client/-/github-client-6.4.1.tgz#25d19b440395a6039b9162ee58dadb9dce990ff0" + integrity sha512-ridDMuzmjMNlcDmrGrV9mxqwUKzt9iYqCPwVYJlRYrnE3jxyg+RdooquqskVFj11djcY6xCV2Q2V1lUYwF+PmA== dependencies: - "@lerna/child-process" "4.0.0" + "@lerna/child-process" "6.4.1" "@octokit/plugin-enterprise-rest" "^6.0.1" - "@octokit/rest" "^18.1.0" - git-url-parse "^11.4.4" - npmlog "^4.1.2" + "@octokit/rest" "^19.0.3" + git-url-parse "^13.1.0" + npmlog "^6.0.2" -"@lerna/gitlab-client@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/gitlab-client/-/gitlab-client-4.0.0.tgz#00dad73379c7b38951d4b4ded043504c14e2b67d" - integrity sha512-OMUpGSkeDWFf7BxGHlkbb35T7YHqVFCwBPSIR6wRsszY8PAzCYahtH3IaJzEJyUg6vmZsNl0FSr3pdA2skhxqA== +"@lerna/gitlab-client@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/gitlab-client/-/gitlab-client-6.4.1.tgz#a01d962dc52a55b8272ea52bc54d72c5fd9db6f9" + integrity sha512-AdLG4d+jbUvv0jQyygQUTNaTCNSMDxioJso6aAjQ/vkwyy3fBJ6FYzX74J4adSfOxC2MQZITFyuG+c9ggp7pyQ== dependencies: node-fetch "^2.6.1" - npmlog "^4.1.2" - whatwg-url "^8.4.0" + npmlog "^6.0.2" -"@lerna/global-options@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/global-options/-/global-options-4.0.0.tgz#c7d8b0de6a01d8a845e2621ea89e7f60f18c6a5f" - integrity sha512-TRMR8afAHxuYBHK7F++Ogop2a82xQjoGna1dvPOY6ltj/pEx59pdgcJfYcynYqMkFIk8bhLJJN9/ndIfX29FTQ== +"@lerna/global-options@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/global-options/-/global-options-6.4.1.tgz#7df76b1d38500606a8dc3ce0804bab6894c4f4a3" + integrity sha512-UTXkt+bleBB8xPzxBPjaCN/v63yQdfssVjhgdbkQ//4kayaRA65LyEtJTi9rUrsLlIy9/rbeb+SAZUHg129fJg== -"@lerna/has-npm-version@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/has-npm-version/-/has-npm-version-4.0.0.tgz#d3fc3292c545eb28bd493b36e6237cf0279f631c" - integrity sha512-LQ3U6XFH8ZmLCsvsgq1zNDqka0Xzjq5ibVN+igAI5ccRWNaUsE/OcmsyMr50xAtNQMYMzmpw5GVLAivT2/YzCg== +"@lerna/has-npm-version@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/has-npm-version/-/has-npm-version-6.4.1.tgz#04eba7df687e665294834253b659430efc1e01bb" + integrity sha512-vW191w5iCkwNWWWcy4542ZOpjKYjcP/pU3o3+w6NM1J3yBjWZcNa8lfzQQgde2QkGyNi+i70o6wIca1o0sdKwg== dependencies: - "@lerna/child-process" "4.0.0" + "@lerna/child-process" "6.4.1" semver "^7.3.4" -"@lerna/import@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/import/-/import-4.0.0.tgz#bde656c4a451fa87ae41733ff8a8da60547c5465" - integrity sha512-FaIhd+4aiBousKNqC7TX1Uhe97eNKf5/SC7c5WZANVWtC7aBWdmswwDt3usrzCNpj6/Wwr9EtEbYROzxKH8ffg== - dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/prompt" "4.0.0" - "@lerna/pulse-till-done" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/import@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/import/-/import-6.4.1.tgz#b5696fed68a32d32398d66f95192267f1da5110e" + integrity sha512-oDg8g1PNrCM1JESLsG3rQBtPC+/K9e4ohs0xDKt5E6p4l7dc0Ib4oo0oCCT/hGzZUlNwHxrc2q9JMRzSAn6P/Q== + dependencies: + "@lerna/child-process" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/prompt" "6.4.1" + "@lerna/pulse-till-done" "6.4.1" + "@lerna/validation-error" "6.4.1" dedent "^0.7.0" fs-extra "^9.1.0" p-map-series "^2.1.0" -"@lerna/info@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/info/-/info-4.0.0.tgz#b9fb0e479d60efe1623603958a831a88b1d7f1fc" - integrity sha512-8Uboa12kaCSZEn4XRfPz5KU9XXoexSPS4oeYGj76s2UQb1O1GdnEyfjyNWoUl1KlJ2i/8nxUskpXIftoFYH0/Q== +"@lerna/info@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/info/-/info-6.4.1.tgz#30354fcb82c99b1f0ed753f957fbaca5b250c3fa" + integrity sha512-Ks4R7IndIr4vQXz+702gumPVhH6JVkshje0WKA3+ew2qzYZf68lU1sBe1OZsQJU3eeY2c60ax+bItSa7aaIHGw== dependencies: - "@lerna/command" "4.0.0" - "@lerna/output" "4.0.0" + "@lerna/command" "6.4.1" + "@lerna/output" "6.4.1" envinfo "^7.7.4" -"@lerna/init@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/init/-/init-4.0.0.tgz#dadff67e6dfb981e8ccbe0e6a310e837962f6c7a" - integrity sha512-wY6kygop0BCXupzWj5eLvTUqdR7vIAm0OgyV9WHpMYQGfs1V22jhztt8mtjCloD/O0nEe4tJhdG62XU5aYmPNQ== +"@lerna/init@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/init/-/init-6.4.1.tgz#ea4905ca976189db4b0bf04d78919060146bf684" + integrity sha512-CXd/s/xgj0ZTAoOVyolOTLW2BG7uQOhWW4P/ktlwwJr9s3c4H/z+Gj36UXw3q5X1xdR29NZt7Vc6fvROBZMjUQ== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/command" "4.0.0" + "@lerna/child-process" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/project" "6.4.1" fs-extra "^9.1.0" p-map "^4.0.0" write-json-file "^4.3.0" -"@lerna/link@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/link/-/link-4.0.0.tgz#c3a38aabd44279d714e90f2451e31b63f0fb65ba" - integrity sha512-KlvPi7XTAcVOByfaLlOeYOfkkDcd+bejpHMCd1KcArcFTwijOwXOVi24DYomIeHvy6HsX/IUquJ4PPUJIeB4+w== +"@lerna/link@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/link/-/link-6.4.1.tgz#f31ed1f6aea1581e358a9ff545be78b61e923175" + integrity sha512-O8Rt7MAZT/WT2AwrB/+HY76ktnXA9cDFO9rhyKWZGTHdplbzuJgfsGzu8Xv0Ind+w+a8xLfqtWGPlwiETnDyrw== dependencies: - "@lerna/command" "4.0.0" - "@lerna/package-graph" "4.0.0" - "@lerna/symlink-dependencies" "4.0.0" + "@lerna/command" "6.4.1" + "@lerna/package-graph" "6.4.1" + "@lerna/symlink-dependencies" "6.4.1" + "@lerna/validation-error" "6.4.1" p-map "^4.0.0" slash "^3.0.0" -"@lerna/list@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/list/-/list-4.0.0.tgz#24b4e6995bd73f81c556793fe502b847efd9d1d7" - integrity sha512-L2B5m3P+U4Bif5PultR4TI+KtW+SArwq1i75QZ78mRYxPc0U/piau1DbLOmwrdqr99wzM49t0Dlvl6twd7GHFg== +"@lerna/list@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/list/-/list-6.4.1.tgz#12ad83902e148d1e5ba007149b72b14636f9f1ba" + integrity sha512-7a6AKgXgC4X7nK6twVPNrKCiDhrCiAhL/FE4u9HYhHqw9yFwyq8Qe/r1RVOkAOASNZzZ8GuBvob042bpunupCw== dependencies: - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/listable" "4.0.0" - "@lerna/output" "4.0.0" + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/listable" "6.4.1" + "@lerna/output" "6.4.1" -"@lerna/listable@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/listable/-/listable-4.0.0.tgz#d00d6cb4809b403f2b0374fc521a78e318b01214" - integrity sha512-/rPOSDKsOHs5/PBLINZOkRIX1joOXUXEtyUs5DHLM8q6/RP668x/1lFhw6Dx7/U+L0+tbkpGtZ1Yt0LewCLgeQ== +"@lerna/listable@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/listable/-/listable-6.4.1.tgz#6f5c83865391c6beeb41802951c674e2de119bde" + integrity sha512-L8ANeidM10aoF8aL3L/771Bb9r/TRkbEPzAiC8Iy2IBTYftS87E3rT/4k5KBEGYzMieSKJaskSFBV0OQGYV1Cw== dependencies: - "@lerna/query-graph" "4.0.0" + "@lerna/query-graph" "6.4.1" chalk "^4.1.0" - columnify "^1.5.4" + columnify "^1.6.0" -"@lerna/log-packed@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/log-packed/-/log-packed-4.0.0.tgz#95168fe2e26ac6a71e42f4be857519b77e57a09f" - integrity sha512-+dpCiWbdzgMAtpajLToy9PO713IHoE6GV/aizXycAyA07QlqnkpaBNZ8DW84gHdM1j79TWockGJo9PybVhrrZQ== +"@lerna/log-packed@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/log-packed/-/log-packed-6.4.1.tgz#43eae50d5c0cd906b1977a58b62b35541cf89ec1" + integrity sha512-Pwv7LnIgWqZH4vkM1rWTVF+pmWJu7d0ZhVwyhCaBJUsYbo+SyB2ZETGygo3Z/A+vZ/S7ImhEEKfIxU9bg5lScQ== dependencies: byte-size "^7.0.0" - columnify "^1.5.4" + columnify "^1.6.0" has-unicode "^2.0.1" - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/npm-conf@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/npm-conf/-/npm-conf-4.0.0.tgz#b259fd1e1cee2bf5402b236e770140ff9ade7fd2" - integrity sha512-uS7H02yQNq3oejgjxAxqq/jhwGEE0W0ntr8vM3EfpCW1F/wZruwQw+7bleJQ9vUBjmdXST//tk8mXzr5+JXCfw== +"@lerna/npm-conf@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/npm-conf/-/npm-conf-6.4.1.tgz#64dba237ff41472a24f96192669c1bc0dce15edb" + integrity sha512-Q+83uySGXYk3n1pYhvxtzyGwBGijYgYecgpiwRG1YNyaeGy+Mkrj19cyTWubT+rU/kM5c6If28+y9kdudvc7zQ== dependencies: config-chain "^1.1.12" pify "^5.0.0" -"@lerna/npm-dist-tag@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/npm-dist-tag/-/npm-dist-tag-4.0.0.tgz#d1e99b4eccd3414142f0548ad331bf2d53f3257a" - integrity sha512-F20sg28FMYTgXqEQihgoqSfwmq+Id3zT23CnOwD+XQMPSy9IzyLf1fFVH319vXIw6NF6Pgs4JZN2Qty6/CQXGw== +"@lerna/npm-dist-tag@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/npm-dist-tag/-/npm-dist-tag-6.4.1.tgz#f14e7176f7e323284e8aa8636b44818a61738fd1" + integrity sha512-If1Hn4q9fn0JWuBm455iIZDWE6Fsn4Nv8Tpqb+dYf0CtoT5Hn+iT64xSiU5XJw9Vc23IR7dIujkEXm2MVbnvZw== dependencies: - "@lerna/otplease" "4.0.0" - npm-package-arg "^8.1.0" - npm-registry-fetch "^9.0.0" - npmlog "^4.1.2" + "@lerna/otplease" "6.4.1" + npm-package-arg "8.1.1" + npm-registry-fetch "^13.3.0" + npmlog "^6.0.2" -"@lerna/npm-install@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/npm-install/-/npm-install-4.0.0.tgz#31180be3ab3b7d1818a1a0c206aec156b7094c78" - integrity sha512-aKNxq2j3bCH3eXl3Fmu4D54s/YLL9WSwV8W7X2O25r98wzrO38AUN6AB9EtmAx+LV/SP15et7Yueg9vSaanRWg== +"@lerna/npm-install@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/npm-install/-/npm-install-6.4.1.tgz#99f5748cb43de9786ea2b538c94a7183d38fc476" + integrity sha512-7gI1txMA9qTaT3iiuk/8/vL78wIhtbbOLhMf8m5yQ2G+3t47RUA8MNgUMsq4Zszw9C83drayqesyTf0u8BzVRg== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/get-npm-exec-opts" "4.0.0" + "@lerna/child-process" "6.4.1" + "@lerna/get-npm-exec-opts" "6.4.1" fs-extra "^9.1.0" - npm-package-arg "^8.1.0" - npmlog "^4.1.2" + npm-package-arg "8.1.1" + npmlog "^6.0.2" signal-exit "^3.0.3" write-pkg "^4.0.0" -"@lerna/npm-publish@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/npm-publish/-/npm-publish-4.0.0.tgz#84eb62e876fe949ae1fd62c60804423dbc2c4472" - integrity sha512-vQb7yAPRo5G5r77DRjHITc9piR9gvEKWrmfCH7wkfBnGWEqu7n8/4bFQ7lhnkujvc8RXOsYpvbMQkNfkYibD/w== +"@lerna/npm-publish@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/npm-publish/-/npm-publish-6.4.1.tgz#baf07b108ae8b32932612db63206bcd5b5ee0e88" + integrity sha512-lbNEg+pThPAD8lIgNArm63agtIuCBCF3umxvgTQeLzyqUX6EtGaKJFyz/6c2ANcAuf8UfU7WQxFFbOiolibXTQ== dependencies: - "@lerna/otplease" "4.0.0" - "@lerna/run-lifecycle" "4.0.0" + "@lerna/otplease" "6.4.1" + "@lerna/run-lifecycle" "6.4.1" fs-extra "^9.1.0" - libnpmpublish "^4.0.0" - npm-package-arg "^8.1.0" - npmlog "^4.1.2" + libnpmpublish "^6.0.4" + npm-package-arg "8.1.1" + npmlog "^6.0.2" pify "^5.0.0" - read-package-json "^3.0.0" + read-package-json "^5.0.1" -"@lerna/npm-run-script@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/npm-run-script/-/npm-run-script-4.0.0.tgz#dfebf4f4601442e7c0b5214f9fb0d96c9350743b" - integrity sha512-Jmyh9/IwXJjOXqKfIgtxi0bxi1pUeKe5bD3S81tkcy+kyng/GNj9WSqD5ZggoNP2NP//s4CLDAtUYLdP7CU9rA== +"@lerna/npm-run-script@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/npm-run-script/-/npm-run-script-6.4.1.tgz#86db4f15d359b8a371db666aa51c9b2b87b602f3" + integrity sha512-HyvwuyhrGqDa1UbI+pPbI6v+wT6I34R0PW3WCADn6l59+AyqLOCUQQr+dMW7jdYNwjO6c/Ttbvj4W58EWsaGtQ== dependencies: - "@lerna/child-process" "4.0.0" - "@lerna/get-npm-exec-opts" "4.0.0" - npmlog "^4.1.2" + "@lerna/child-process" "6.4.1" + "@lerna/get-npm-exec-opts" "6.4.1" + npmlog "^6.0.2" -"@lerna/otplease@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/otplease/-/otplease-4.0.0.tgz#84972eb43448f8a1077435ba1c5e59233b725850" - integrity sha512-Sgzbqdk1GH4psNiT6hk+BhjOfIr/5KhGBk86CEfHNJTk9BK4aZYyJD4lpDbDdMjIV4g03G7pYoqHzH765T4fxw== +"@lerna/otplease@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/otplease/-/otplease-6.4.1.tgz#9573e053c43e7139442da96fe655aa02749cb8a3" + integrity sha512-ePUciFfFdythHNMp8FP5K15R/CoGzSLVniJdD50qm76c4ATXZHnGCW2PGwoeAZCy4QTzhlhdBq78uN0wAs75GA== dependencies: - "@lerna/prompt" "4.0.0" + "@lerna/prompt" "6.4.1" -"@lerna/output@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/output/-/output-4.0.0.tgz#b1d72215c0e35483e4f3e9994debc82c621851f2" - integrity sha512-Un1sHtO1AD7buDQrpnaYTi2EG6sLF+KOPEAMxeUYG5qG3khTs2Zgzq5WE3dt2N/bKh7naESt20JjIW6tBELP0w== +"@lerna/output@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/output/-/output-6.4.1.tgz#327baf768b8fb63db9d52f68288d387379f814f7" + integrity sha512-A1yRLF0bO+lhbIkrryRd6hGSD0wnyS1rTPOWJhScO/Zyv8vIPWhd2fZCLR1gI2d/Kt05qmK3T/zETTwloK7Fww== dependencies: - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/pack-directory@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/pack-directory/-/pack-directory-4.0.0.tgz#8b617db95d20792f043aaaa13a9ccc0e04cb4c74" - integrity sha512-NJrmZNmBHS+5aM+T8N6FVbaKFScVqKlQFJNY2k7nsJ/uklNKsLLl6VhTQBPwMTbf6Tf7l6bcKzpy7aePuq9UiQ== - dependencies: - "@lerna/get-packed" "4.0.0" - "@lerna/package" "4.0.0" - "@lerna/run-lifecycle" "4.0.0" - npm-packlist "^2.1.4" - npmlog "^4.1.2" +"@lerna/pack-directory@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/pack-directory/-/pack-directory-6.4.1.tgz#e78aae4e7944057d8fc6cb4dd8ae50be7a95c2fd" + integrity sha512-kBtDL9bPP72/Nl7Gqa2CA3Odb8CYY1EF2jt801f+B37TqRLf57UXQom7yF3PbWPCPmhoU+8Fc4RMpUwSbFC46Q== + dependencies: + "@lerna/get-packed" "6.4.1" + "@lerna/package" "6.4.1" + "@lerna/run-lifecycle" "6.4.1" + "@lerna/temp-write" "6.4.1" + npm-packlist "^5.1.1" + npmlog "^6.0.2" tar "^6.1.0" - temp-write "^4.0.0" -"@lerna/package-graph@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/package-graph/-/package-graph-4.0.0.tgz#16a00253a8ac810f72041481cb46bcee8d8123dd" - integrity sha512-QED2ZCTkfXMKFoTGoccwUzjHtZMSf3UKX14A4/kYyBms9xfFsesCZ6SLI5YeySEgcul8iuIWfQFZqRw+Qrjraw== +"@lerna/package-graph@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/package-graph/-/package-graph-6.4.1.tgz#7a18024d531f0bd88609944e572b4861f0f8868f" + integrity sha512-fQvc59stRYOqxT3Mn7g/yI9/Kw5XetJoKcW5l8XeqKqcTNDURqKnN0qaNBY6lTTLOe4cR7gfXF2l1u3HOz0qEg== dependencies: - "@lerna/prerelease-id-from-version" "4.0.0" - "@lerna/validation-error" "4.0.0" - npm-package-arg "^8.1.0" - npmlog "^4.1.2" + "@lerna/prerelease-id-from-version" "6.4.1" + "@lerna/validation-error" "6.4.1" + npm-package-arg "8.1.1" + npmlog "^6.0.2" semver "^7.3.4" -"@lerna/package@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/package/-/package-4.0.0.tgz#1b4c259c4bcff45c876ee1d591a043aacbc0d6b7" - integrity sha512-l0M/izok6FlyyitxiQKr+gZLVFnvxRQdNhzmQ6nRnN9dvBJWn+IxxpM+cLqGACatTnyo9LDzNTOj2Db3+s0s8Q== +"@lerna/package@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/package/-/package-6.4.1.tgz#ebbd4c5f58f4b6cf77019271a686be9585272a3b" + integrity sha512-TrOah58RnwS9R8d3+WgFFTu5lqgZs7M+e1dvcRga7oSJeKscqpEK57G0xspvF3ycjfXQwRMmEtwPmpkeEVLMzA== dependencies: load-json-file "^6.2.0" - npm-package-arg "^8.1.0" + npm-package-arg "8.1.1" write-pkg "^4.0.0" -"@lerna/prerelease-id-from-version@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/prerelease-id-from-version/-/prerelease-id-from-version-4.0.0.tgz#c7e0676fcee1950d85630e108eddecdd5b48c916" - integrity sha512-GQqguzETdsYRxOSmdFZ6zDBXDErIETWOqomLERRY54f4p+tk4aJjoVdd9xKwehC9TBfIFvlRbL1V9uQGHh1opg== +"@lerna/prerelease-id-from-version@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/prerelease-id-from-version/-/prerelease-id-from-version-6.4.1.tgz#65eb1835cdfd112783eea6b596812c64f535386b" + integrity sha512-uGicdMFrmfHXeC0FTosnUKRgUjrBJdZwrmw7ZWMb5DAJGOuTzrvJIcz5f0/eL3XqypC/7g+9DoTgKjX3hlxPZA== dependencies: semver "^7.3.4" -"@lerna/profiler@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/profiler/-/profiler-4.0.0.tgz#8a53ab874522eae15d178402bff90a14071908e9" - integrity sha512-/BaEbqnVh1LgW/+qz8wCuI+obzi5/vRE8nlhjPzdEzdmWmZXuCKyWSEzAyHOJWw1ntwMiww5dZHhFQABuoFz9Q== +"@lerna/profiler@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/profiler/-/profiler-6.4.1.tgz#0d5e017e1389e35960d671f43db7eb16337fda1b" + integrity sha512-dq2uQxcu0aq6eSoN+JwnvHoAnjtZAVngMvywz5bTAfzz/sSvIad1v8RCpJUMBQHxaPtbfiNvOIQgDZOmCBIM4g== dependencies: fs-extra "^9.1.0" - npmlog "^4.1.2" + npmlog "^6.0.2" upath "^2.0.1" -"@lerna/project@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/project/-/project-4.0.0.tgz#ff84893935833533a74deff30c0e64ddb7f0ba6b" - integrity sha512-o0MlVbDkD5qRPkFKlBZsXZjoNTWPyuL58564nSfZJ6JYNmgAptnWPB2dQlAc7HWRZkmnC2fCkEdoU+jioPavbg== +"@lerna/project@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/project/-/project-6.4.1.tgz#0519323aa8bde5b73fc0bf1c428385a556a445f0" + integrity sha512-BPFYr4A0mNZ2jZymlcwwh7PfIC+I6r52xgGtJ4KIrIOB6mVKo9u30dgYJbUQxmSuMRTOnX7PJZttQQzSda4gEg== dependencies: - "@lerna/package" "4.0.0" - "@lerna/validation-error" "4.0.0" + "@lerna/package" "6.4.1" + "@lerna/validation-error" "6.4.1" cosmiconfig "^7.0.0" dedent "^0.7.0" dot-prop "^6.0.1" glob-parent "^5.1.1" globby "^11.0.2" + js-yaml "^4.1.0" load-json-file "^6.2.0" - npmlog "^4.1.2" + npmlog "^6.0.2" p-map "^4.0.0" resolve-from "^5.0.0" write-json-file "^4.3.0" -"@lerna/prompt@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/prompt/-/prompt-4.0.0.tgz#5ec69a803f3f0db0ad9f221dad64664d3daca41b" - integrity sha512-4Ig46oCH1TH5M7YyTt53fT6TuaKMgqUUaqdgxvp6HP6jtdak6+amcsqB8YGz2eQnw/sdxunx84DfI9XpoLj4bQ== +"@lerna/prompt@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/prompt/-/prompt-6.4.1.tgz#5ede06b4c8e17ec3045180b10ec5bd313cbc8585" + integrity sha512-vMxCIgF9Vpe80PnargBGAdS/Ib58iYEcfkcXwo7mYBCxEVcaUJFKZ72FEW8rw+H5LkxBlzrBJyfKRoOe0ks9gQ== dependencies: - inquirer "^7.3.3" - npmlog "^4.1.2" + inquirer "^8.2.4" + npmlog "^6.0.2" -"@lerna/publish@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/publish/-/publish-4.0.0.tgz#f67011305adeba120066a3b6d984a5bb5fceef65" - integrity sha512-K8jpqjHrChH22qtkytA5GRKIVFEtqBF6JWj1I8dWZtHs4Jywn8yB1jQ3BAMLhqmDJjWJtRck0KXhQQKzDK2UPg== - dependencies: - "@lerna/check-working-tree" "4.0.0" - "@lerna/child-process" "4.0.0" - "@lerna/collect-updates" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/describe-ref" "4.0.0" - "@lerna/log-packed" "4.0.0" - "@lerna/npm-conf" "4.0.0" - "@lerna/npm-dist-tag" "4.0.0" - "@lerna/npm-publish" "4.0.0" - "@lerna/otplease" "4.0.0" - "@lerna/output" "4.0.0" - "@lerna/pack-directory" "4.0.0" - "@lerna/prerelease-id-from-version" "4.0.0" - "@lerna/prompt" "4.0.0" - "@lerna/pulse-till-done" "4.0.0" - "@lerna/run-lifecycle" "4.0.0" - "@lerna/run-topologically" "4.0.0" - "@lerna/validation-error" "4.0.0" - "@lerna/version" "4.0.0" +"@lerna/publish@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/publish/-/publish-6.4.1.tgz#e1bdfa67297ca4a3054863e7acfc8482bf613c35" + integrity sha512-/D/AECpw2VNMa1Nh4g29ddYKRIqygEV1ftV8PYXVlHpqWN7VaKrcbRU6pn0ldgpFlMyPtESfv1zS32F5CQ944w== + dependencies: + "@lerna/check-working-tree" "6.4.1" + "@lerna/child-process" "6.4.1" + "@lerna/collect-updates" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/describe-ref" "6.4.1" + "@lerna/log-packed" "6.4.1" + "@lerna/npm-conf" "6.4.1" + "@lerna/npm-dist-tag" "6.4.1" + "@lerna/npm-publish" "6.4.1" + "@lerna/otplease" "6.4.1" + "@lerna/output" "6.4.1" + "@lerna/pack-directory" "6.4.1" + "@lerna/prerelease-id-from-version" "6.4.1" + "@lerna/prompt" "6.4.1" + "@lerna/pulse-till-done" "6.4.1" + "@lerna/run-lifecycle" "6.4.1" + "@lerna/run-topologically" "6.4.1" + "@lerna/validation-error" "6.4.1" + "@lerna/version" "6.4.1" fs-extra "^9.1.0" - libnpmaccess "^4.0.1" - npm-package-arg "^8.1.0" - npm-registry-fetch "^9.0.0" - npmlog "^4.1.2" + libnpmaccess "^6.0.3" + npm-package-arg "8.1.1" + npm-registry-fetch "^13.3.0" + npmlog "^6.0.2" p-map "^4.0.0" p-pipe "^3.1.0" - pacote "^11.2.6" + pacote "^13.6.1" semver "^7.3.4" -"@lerna/pulse-till-done@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/pulse-till-done/-/pulse-till-done-4.0.0.tgz#04bace7d483a8205c187b806bcd8be23d7bb80a3" - integrity sha512-Frb4F7QGckaybRhbF7aosLsJ5e9WuH7h0KUkjlzSByVycxY91UZgaEIVjS2oN9wQLrheLMHl6SiFY0/Pvo0Cxg== +"@lerna/pulse-till-done@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/pulse-till-done/-/pulse-till-done-6.4.1.tgz#85c38a43939bf5e21b61091d0bcf73a1109a59db" + integrity sha512-efAkOC1UuiyqYBfrmhDBL6ufYtnpSqAG+lT4d/yk3CzJEJKkoCwh2Hb692kqHHQ5F74Uusc8tcRB7GBcfNZRWA== dependencies: - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/query-graph@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/query-graph/-/query-graph-4.0.0.tgz#09dd1c819ac5ee3f38db23931143701f8a6eef63" - integrity sha512-YlP6yI3tM4WbBmL9GCmNDoeQyzcyg1e4W96y/PKMZa5GbyUvkS2+Jc2kwPD+5KcXou3wQZxSPzR3Te5OenaDdg== +"@lerna/query-graph@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/query-graph/-/query-graph-6.4.1.tgz#3c224a49ff392d08ce8aeeaa1af4458f522a2b78" + integrity sha512-gBGZLgu2x6L4d4ZYDn4+d5rxT9RNBC+biOxi0QrbaIq83I+JpHVmFSmExXK3rcTritrQ3JT9NCqb+Yu9tL9adQ== dependencies: - "@lerna/package-graph" "4.0.0" + "@lerna/package-graph" "6.4.1" -"@lerna/resolve-symlink@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/resolve-symlink/-/resolve-symlink-4.0.0.tgz#6d006628a210c9b821964657a9e20a8c9a115e14" - integrity sha512-RtX8VEUzqT+uLSCohx8zgmjc6zjyRlh6i/helxtZTMmc4+6O4FS9q5LJas2uGO2wKvBlhcD6siibGt7dIC3xZA== +"@lerna/resolve-symlink@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/resolve-symlink/-/resolve-symlink-6.4.1.tgz#ab42dcbd03bc4028ec77ee481c5db8884ebaf40a" + integrity sha512-gnqltcwhWVLUxCuwXWe/ch9WWTxXRI7F0ZvCtIgdfOpbosm3f1g27VO1LjXeJN2i6ks03qqMowqy4xB4uMR9IA== dependencies: fs-extra "^9.1.0" - npmlog "^4.1.2" - read-cmd-shim "^2.0.0" + npmlog "^6.0.2" + read-cmd-shim "^3.0.0" -"@lerna/rimraf-dir@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/rimraf-dir/-/rimraf-dir-4.0.0.tgz#2edf3b62d4eb0ef4e44e430f5844667d551ec25a" - integrity sha512-QNH9ABWk9mcMJh2/muD9iYWBk1oQd40y6oH+f3wwmVGKYU5YJD//+zMiBI13jxZRtwBx0vmBZzkBkK1dR11cBg== +"@lerna/rimraf-dir@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/rimraf-dir/-/rimraf-dir-6.4.1.tgz#116e379f653135b3ae955dcba703bdf212cab51a" + integrity sha512-5sDOmZmVj0iXIiEgdhCm0Prjg5q2SQQKtMd7ImimPtWKkV0IyJWxrepJFbeQoFj5xBQF7QB5jlVNEfQfKhD6pQ== dependencies: - "@lerna/child-process" "4.0.0" - npmlog "^4.1.2" + "@lerna/child-process" "6.4.1" + npmlog "^6.0.2" path-exists "^4.0.0" rimraf "^3.0.2" -"@lerna/run-lifecycle@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/run-lifecycle/-/run-lifecycle-4.0.0.tgz#e648a46f9210a9bcd7c391df6844498cb5079334" - integrity sha512-IwxxsajjCQQEJAeAaxF8QdEixfI7eLKNm4GHhXHrgBu185JcwScFZrj9Bs+PFKxwb+gNLR4iI5rpUdY8Y0UdGQ== +"@lerna/run-lifecycle@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/run-lifecycle/-/run-lifecycle-6.4.1.tgz#1eac136afae97e197bdb564e67fb385f4d346685" + integrity sha512-42VopI8NC8uVCZ3YPwbTycGVBSgukJltW5Saein0m7TIqFjwSfrcP0n7QJOr+WAu9uQkk+2kBstF5WmvKiqgEA== dependencies: - "@lerna/npm-conf" "4.0.0" - npm-lifecycle "^3.1.5" - npmlog "^4.1.2" + "@lerna/npm-conf" "6.4.1" + "@npmcli/run-script" "^4.1.7" + npmlog "^6.0.2" + p-queue "^6.6.2" -"@lerna/run-topologically@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/run-topologically/-/run-topologically-4.0.0.tgz#af846eeee1a09b0c2be0d1bfb5ef0f7b04bb1827" - integrity sha512-EVZw9hGwo+5yp+VL94+NXRYisqgAlj0jWKWtAIynDCpghRxCE5GMO3xrQLmQgqkpUl9ZxQFpICgYv5DW4DksQA== +"@lerna/run-topologically@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/run-topologically/-/run-topologically-6.4.1.tgz#640b07d83f1d1e6d3bc36f81a74957839bb1672f" + integrity sha512-gXlnAsYrjs6KIUGDnHM8M8nt30Amxq3r0lSCNAt+vEu2sMMEOh9lffGGaJobJZ4bdwoXnKay3uER/TU8E9owMw== dependencies: - "@lerna/query-graph" "4.0.0" + "@lerna/query-graph" "6.4.1" p-queue "^6.6.2" -"@lerna/run@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/run/-/run-4.0.0.tgz#4bc7fda055a729487897c23579694f6183c91262" - integrity sha512-9giulCOzlMPzcZS/6Eov6pxE9gNTyaXk0Man+iCIdGJNMrCnW7Dme0Z229WWP/UoxDKg71F2tMsVVGDiRd8fFQ== - dependencies: - "@lerna/command" "4.0.0" - "@lerna/filter-options" "4.0.0" - "@lerna/npm-run-script" "4.0.0" - "@lerna/output" "4.0.0" - "@lerna/profiler" "4.0.0" - "@lerna/run-topologically" "4.0.0" - "@lerna/timer" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/run@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/run/-/run-6.4.1.tgz#985279f071ff23ae15f92837f85f979a1352fc01" + integrity sha512-HRw7kS6KNqTxqntFiFXPEeBEct08NjnL6xKbbOV6pXXf+lXUQbJlF8S7t6UYqeWgTZ4iU9caIxtZIY+EpW93mQ== + dependencies: + "@lerna/command" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/npm-run-script" "6.4.1" + "@lerna/output" "6.4.1" + "@lerna/profiler" "6.4.1" + "@lerna/run-topologically" "6.4.1" + "@lerna/timer" "6.4.1" + "@lerna/validation-error" "6.4.1" + fs-extra "^9.1.0" + nx ">=15.4.2 < 16" p-map "^4.0.0" -"@lerna/symlink-binary@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/symlink-binary/-/symlink-binary-4.0.0.tgz#21009f62d53a425f136cb4c1a32c6b2a0cc02d47" - integrity sha512-zualodWC4q1QQc1pkz969hcFeWXOsVYZC5AWVtAPTDfLl+TwM7eG/O6oP+Rr3fFowspxo6b1TQ6sYfDV6HXNWA== +"@lerna/symlink-binary@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/symlink-binary/-/symlink-binary-6.4.1.tgz#d8e1b653a7ae9fe38834851c66c92278e3bb25ae" + integrity sha512-poZX90VmXRjL/JTvxaUQPeMDxFUIQvhBkHnH+dwW0RjsHB/2Tu4QUAsE0OlFnlWQGsAtXF4FTtW8Xs57E/19Kw== dependencies: - "@lerna/create-symlink" "4.0.0" - "@lerna/package" "4.0.0" + "@lerna/create-symlink" "6.4.1" + "@lerna/package" "6.4.1" fs-extra "^9.1.0" p-map "^4.0.0" -"@lerna/symlink-dependencies@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/symlink-dependencies/-/symlink-dependencies-4.0.0.tgz#8910eca084ae062642d0490d8972cf2d98e9ebbd" - integrity sha512-BABo0MjeUHNAe2FNGty1eantWp8u83BHSeIMPDxNq0MuW2K3CiQRaeWT3EGPAzXpGt0+hVzBrA6+OT0GPn7Yuw== +"@lerna/symlink-dependencies@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/symlink-dependencies/-/symlink-dependencies-6.4.1.tgz#988203cc260406b64d61294367821a0f26419ee6" + integrity sha512-43W2uLlpn3TTYuHVeO/2A6uiTZg6TOk/OSKi21ujD7IfVIYcRYCwCV+8LPP12R3rzyab0JWkWnhp80Z8A2Uykw== dependencies: - "@lerna/create-symlink" "4.0.0" - "@lerna/resolve-symlink" "4.0.0" - "@lerna/symlink-binary" "4.0.0" + "@lerna/create-symlink" "6.4.1" + "@lerna/resolve-symlink" "6.4.1" + "@lerna/symlink-binary" "6.4.1" fs-extra "^9.1.0" p-map "^4.0.0" p-map-series "^2.1.0" -"@lerna/timer@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/timer/-/timer-4.0.0.tgz#a52e51bfcd39bfd768988049ace7b15c1fd7a6da" - integrity sha512-WFsnlaE7SdOvjuyd05oKt8Leg3ENHICnvX3uYKKdByA+S3g+TCz38JsNs7OUZVt+ba63nC2nbXDlUnuT2Xbsfg== +"@lerna/temp-write@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/temp-write/-/temp-write-6.4.1.tgz#1c46d05b633597c77b0c5f5ab46c1315195f7786" + integrity sha512-7uiGFVoTyos5xXbVQg4bG18qVEn9dFmboXCcHbMj5mc/+/QmU9QeNz/Cq36O5TY6gBbLnyj3lfL5PhzERWKMFg== + dependencies: + graceful-fs "^4.1.15" + is-stream "^2.0.0" + make-dir "^3.0.0" + temp-dir "^1.0.0" + uuid "^8.3.2" -"@lerna/validation-error@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/validation-error/-/validation-error-4.0.0.tgz#af9d62fe8304eaa2eb9a6ba1394f9aa807026d35" - integrity sha512-1rBOM5/koiVWlRi3V6dB863E1YzJS8v41UtsHgMr6gB2ncJ2LsQtMKlJpi3voqcgh41H8UsPXR58RrrpPpufyw== +"@lerna/timer@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/timer/-/timer-6.4.1.tgz#47fe50b56bd2fc32396a2559f7bb65de8200f07d" + integrity sha512-ogmjFTWwRvevZr76a2sAbhmu3Ut2x73nDIn0bcwZwZ3Qc3pHD8eITdjs/wIKkHse3J7l3TO5BFJPnrvDS7HLnw== + +"@lerna/validation-error@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/validation-error/-/validation-error-6.4.1.tgz#2cab92c2be395158c3d65fa57ddb73892617d7e8" + integrity sha512-fxfJvl3VgFd7eBfVMRX6Yal9omDLs2mcGKkNYeCEyt4Uwlz1B5tPAXyk/sNMfkKV2Aat/mlK5tnY13vUrMKkyA== dependencies: - npmlog "^4.1.2" + npmlog "^6.0.2" -"@lerna/version@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/version/-/version-4.0.0.tgz#532659ec6154d8a8789c5ab53878663e244e3228" - integrity sha512-otUgiqs5W9zGWJZSCCMRV/2Zm2A9q9JwSDS7s/tlKq4mWCYriWo7+wsHEA/nPTMDyYyBO5oyZDj+3X50KDUzeA== - dependencies: - "@lerna/check-working-tree" "4.0.0" - "@lerna/child-process" "4.0.0" - "@lerna/collect-updates" "4.0.0" - "@lerna/command" "4.0.0" - "@lerna/conventional-commits" "4.0.0" - "@lerna/github-client" "4.0.0" - "@lerna/gitlab-client" "4.0.0" - "@lerna/output" "4.0.0" - "@lerna/prerelease-id-from-version" "4.0.0" - "@lerna/prompt" "4.0.0" - "@lerna/run-lifecycle" "4.0.0" - "@lerna/run-topologically" "4.0.0" - "@lerna/validation-error" "4.0.0" +"@lerna/version@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/version/-/version-6.4.1.tgz#01011364df04240ce92dffed1d2fa76bb9f959ff" + integrity sha512-1/krPq0PtEqDXtaaZsVuKev9pXJCkNC1vOo2qCcn6PBkODw/QTAvGcUi0I+BM2c//pdxge9/gfmbDo1lC8RtAQ== + dependencies: + "@lerna/check-working-tree" "6.4.1" + "@lerna/child-process" "6.4.1" + "@lerna/collect-updates" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/conventional-commits" "6.4.1" + "@lerna/github-client" "6.4.1" + "@lerna/gitlab-client" "6.4.1" + "@lerna/output" "6.4.1" + "@lerna/prerelease-id-from-version" "6.4.1" + "@lerna/prompt" "6.4.1" + "@lerna/run-lifecycle" "6.4.1" + "@lerna/run-topologically" "6.4.1" + "@lerna/temp-write" "6.4.1" + "@lerna/validation-error" "6.4.1" + "@nrwl/devkit" ">=15.4.2 < 16" chalk "^4.1.0" dedent "^0.7.0" load-json-file "^6.2.0" minimatch "^3.0.4" - npmlog "^4.1.2" + npmlog "^6.0.2" p-map "^4.0.0" p-pipe "^3.1.0" p-reduce "^2.1.0" p-waterfall "^2.1.1" semver "^7.3.4" slash "^3.0.0" - temp-write "^4.0.0" write-json-file "^4.3.0" -"@lerna/write-log-file@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@lerna/write-log-file/-/write-log-file-4.0.0.tgz#18221a38a6a307d6b0a5844dd592ad53fa27091e" - integrity sha512-XRG5BloiArpXRakcnPHmEHJp+4AtnhRtpDIHSghmXD5EichI1uD73J7FgPp30mm2pDRq3FdqB0NbwSEsJ9xFQg== +"@lerna/write-log-file@6.4.1": + version "6.4.1" + resolved "https://registry.yarnpkg.com/@lerna/write-log-file/-/write-log-file-6.4.1.tgz#b9b959e4b853cdabf0309bc5da1513fa025117ec" + integrity sha512-LE4fueQSDrQo76F4/gFXL0wnGhqdG7WHVH8D8TrKouF2Afl4NHltObCm4WsSMPjcfciVnZQFfx1ruxU4r/enHQ== dependencies: - npmlog "^4.1.2" - write-file-atomic "^3.0.3" + npmlog "^6.0.2" + write-file-atomic "^4.0.1" "@liskhq/bignum@1.3.1": version "1.3.1" @@ -2536,21 +2560,61 @@ "@nodelib/fs.scandir" "2.1.3" fastq "^1.6.0" -"@npmcli/arborist@^2.2.2": - version "2.10.0" - resolved "https://registry.yarnpkg.com/@npmcli/arborist/-/arborist-2.10.0.tgz#424c2d73a7ae59c960b0cc7f74fed043e4316c2c" - integrity "sha1-Qkwtc6euWclgsMx/dP7QQ+QxbCw= sha512-CLnD+zXG9oijEEzViimz8fbOoFVb7hoypiaf7p6giJhvYtrxLAyY3cZAMPIFQvsG731+02eMDp3LqVBNo7BaZA==" +"@npmcli/arborist@5.3.0": + version "5.3.0" + resolved "https://registry.yarnpkg.com/@npmcli/arborist/-/arborist-5.3.0.tgz#321d9424677bfc08569e98a5ac445ee781f32053" + integrity sha512-+rZ9zgL1lnbl8Xbb1NQdMjveOMwj4lIYfcDtyJHHi5x4X8jtR6m8SXooJMZy5vmFVZ8w7A2Bnd/oX9eTuU8w5A== + dependencies: + "@isaacs/string-locale-compare" "^1.1.0" + "@npmcli/installed-package-contents" "^1.0.7" + "@npmcli/map-workspaces" "^2.0.3" + "@npmcli/metavuln-calculator" "^3.0.1" + "@npmcli/move-file" "^2.0.0" + "@npmcli/name-from-folder" "^1.0.1" + "@npmcli/node-gyp" "^2.0.0" + "@npmcli/package-json" "^2.0.0" + "@npmcli/run-script" "^4.1.3" + bin-links "^3.0.0" + cacache "^16.0.6" + common-ancestor-path "^1.0.1" + json-parse-even-better-errors "^2.3.1" + json-stringify-nice "^1.1.4" + mkdirp "^1.0.4" + mkdirp-infer-owner "^2.0.0" + nopt "^5.0.0" + npm-install-checks "^5.0.0" + npm-package-arg "^9.0.0" + npm-pick-manifest "^7.0.0" + npm-registry-fetch "^13.0.0" + npmlog "^6.0.2" + pacote "^13.6.1" + parse-conflict-json "^2.0.1" + proc-log "^2.0.0" + promise-all-reject-late "^1.0.0" + promise-call-limit "^1.0.1" + read-package-json-fast "^2.0.2" + readdir-scoped-modules "^1.1.0" + rimraf "^3.0.2" + semver "^7.3.7" + ssri "^9.0.0" + treeverse "^2.0.0" + walk-up-path "^1.0.0" + +"@npmcli/arborist@^4.0.4": + version "4.3.1" + resolved "https://registry.yarnpkg.com/@npmcli/arborist/-/arborist-4.3.1.tgz#a08cddce3339882f688c1dea1651f6971e781c44" + integrity sha512-yMRgZVDpwWjplorzt9SFSaakWx6QIK248Nw4ZFgkrAy/GvJaFRaSZzE6nD7JBK5r8g/+PTxFq5Wj/sfciE7x+A== dependencies: - "@isaacs/string-locale-compare" "^1.0.1" + "@isaacs/string-locale-compare" "^1.1.0" "@npmcli/installed-package-contents" "^1.0.7" - "@npmcli/map-workspaces" "^1.0.2" - "@npmcli/metavuln-calculator" "^1.1.0" + "@npmcli/map-workspaces" "^2.0.0" + "@npmcli/metavuln-calculator" "^2.0.0" "@npmcli/move-file" "^1.1.0" "@npmcli/name-from-folder" "^1.0.1" - "@npmcli/node-gyp" "^1.0.1" + "@npmcli/node-gyp" "^1.0.3" "@npmcli/package-json" "^1.0.1" - "@npmcli/run-script" "^1.8.2" - bin-links "^2.2.1" + "@npmcli/run-script" "^2.0.0" + bin-links "^3.0.0" cacache "^15.0.3" common-ancestor-path "^1.0.1" json-parse-even-better-errors "^2.3.1" @@ -2560,9 +2624,9 @@ npm-install-checks "^4.0.0" npm-package-arg "^8.1.5" npm-pick-manifest "^6.1.0" - npm-registry-fetch "^11.0.0" - pacote "^11.3.5" - parse-conflict-json "^1.1.1" + npm-registry-fetch "^12.0.1" + pacote "^12.0.2" + parse-conflict-json "^2.0.1" proc-log "^1.0.0" promise-all-reject-late "^1.0.0" promise-call-limit "^1.0.1" @@ -2574,11 +2638,6 @@ treeverse "^1.0.4" walk-up-path "^1.0.0" -"@npmcli/ci-detect@^1.0.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@npmcli/ci-detect/-/ci-detect-1.3.0.tgz#6c1d2c625fb6ef1b9dea85ad0a5afcbef85ef22a" - integrity sha512-oN3y7FAROHhrAt7Rr7PnTSwrHrZVRTS2ZbyxeQwSSYD0ifwM3YNgQqbaRmjcWoPyq77MjchusjJDspbzMmip1Q== - "@npmcli/fs@^1.0.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.1.0.tgz#bec1d1b89c170d40e1b73ad6c943b0b75e7d2951" @@ -2587,7 +2646,15 @@ "@gar/promisify" "^1.0.1" semver "^7.3.5" -"@npmcli/git@^2.0.1", "@npmcli/git@^2.1.0": +"@npmcli/fs@^2.1.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-2.1.2.tgz#a9e2541a4a2fec2e69c29b35e6060973da79b865" + integrity sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ== + dependencies: + "@gar/promisify" "^1.1.3" + semver "^7.3.5" + +"@npmcli/git@^2.1.0": version "2.1.0" resolved "https://registry.yarnpkg.com/@npmcli/git/-/git-2.1.0.tgz#2fbd77e147530247d37f325930d457b3ebe894f6" integrity "sha1-L7134UdTAkfTfzJZMNRXs+volPY= sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw==" @@ -2601,6 +2668,21 @@ semver "^7.3.5" which "^2.0.2" +"@npmcli/git@^3.0.0": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@npmcli/git/-/git-3.0.2.tgz#5c5de6b4d70474cf2d09af149ce42e4e1dacb931" + integrity sha512-CAcd08y3DWBJqJDpfuVL0uijlq5oaXaOJEKHKc4wqrjd00gkvTZB+nFuLn+doOOKddaQS9JfqtNoFCO2LCvA3w== + dependencies: + "@npmcli/promise-spawn" "^3.0.0" + lru-cache "^7.4.4" + mkdirp "^1.0.4" + npm-pick-manifest "^7.0.0" + proc-log "^2.0.0" + promise-inflight "^1.0.1" + promise-retry "^2.0.1" + semver "^7.3.5" + which "^2.0.2" + "@npmcli/installed-package-contents@^1.0.6", "@npmcli/installed-package-contents@^1.0.7": version "1.0.7" resolved "https://registry.yarnpkg.com/@npmcli/installed-package-contents/-/installed-package-contents-1.0.7.tgz#ab7408c6147911b970a8abe261ce512232a3f4fa" @@ -2609,25 +2691,36 @@ npm-bundled "^1.1.1" npm-normalize-package-bin "^1.0.1" -"@npmcli/map-workspaces@^1.0.2": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@npmcli/map-workspaces/-/map-workspaces-1.0.3.tgz#6072a0794762cf8f572e6080fa66d1bbefa991d5" - integrity sha512-SdlRlOoQw4WKD4vtb/n5gUkobEABYBEOo8fRE4L8CtBkyWDSvIrReTfKvQ/Jc/LQqDaaZ5iv1iMSQzKCUr1n1A== +"@npmcli/map-workspaces@^2.0.0", "@npmcli/map-workspaces@^2.0.3": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@npmcli/map-workspaces/-/map-workspaces-2.0.4.tgz#9e5e8ab655215a262aefabf139782b894e0504fc" + integrity sha512-bMo0aAfwhVwqoVM5UzX1DJnlvVvzDCHae821jv48L1EsrYwfOZChlqWYXEtto/+BkBXetPbEWgau++/brh4oVg== dependencies: "@npmcli/name-from-folder" "^1.0.1" - glob "^7.1.6" - minimatch "^3.0.4" - read-package-json-fast "^2.0.1" + glob "^8.0.1" + minimatch "^5.0.1" + read-package-json-fast "^2.0.3" -"@npmcli/metavuln-calculator@^1.1.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@npmcli/metavuln-calculator/-/metavuln-calculator-1.1.1.tgz#2f95ff3c6d88b366dd70de1c3f304267c631b458" - integrity sha512-9xe+ZZ1iGVaUovBVFI9h3qW+UuECUzhvZPxK9RaEA2mjU26o5D0JloGYWwLYvQELJNmBdQB6rrpuN8jni6LwzQ== +"@npmcli/metavuln-calculator@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/metavuln-calculator/-/metavuln-calculator-2.0.0.tgz#70937b8b5a5cad5c588c8a7b38c4a8bd6f62c84c" + integrity sha512-VVW+JhWCKRwCTE+0xvD6p3uV4WpqocNYYtzyvenqL/u1Q3Xx6fGTJ+6UoIoii07fbuEO9U3IIyuGY0CYHDv1sg== dependencies: cacache "^15.0.5" - pacote "^11.1.11" + json-parse-even-better-errors "^2.3.1" + pacote "^12.0.0" semver "^7.3.2" +"@npmcli/metavuln-calculator@^3.0.1": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@npmcli/metavuln-calculator/-/metavuln-calculator-3.1.1.tgz#9359bd72b400f8353f6a28a25c8457b562602622" + integrity sha512-n69ygIaqAedecLeVH3KnO39M6ZHiJ2dEv5A7DGvcqCB8q17BGUgW8QaanIkbWUo2aYGZqJaOORTLAlIvKjNDKA== + dependencies: + cacache "^16.0.0" + json-parse-even-better-errors "^2.3.1" + pacote "^13.0.3" + semver "^7.3.5" + "@npmcli/move-file@^1.0.1", "@npmcli/move-file@^1.1.0": version "1.1.2" resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674" @@ -2636,16 +2729,34 @@ mkdirp "^1.0.4" rimraf "^3.0.2" +"@npmcli/move-file@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-2.0.1.tgz#26f6bdc379d87f75e55739bab89db525b06100e4" + integrity sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ== + dependencies: + mkdirp "^1.0.4" + rimraf "^3.0.2" + "@npmcli/name-from-folder@^1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@npmcli/name-from-folder/-/name-from-folder-1.0.1.tgz#77ecd0a4fcb772ba6fe927e2e2e155fbec2e6b1a" integrity sha512-qq3oEfcLFwNfEYOQ8HLimRGKlD8WSeGEdtUa7hmzpR8Sa7haL1KVQrvgO6wqMjhWFFVjgtrh1gIxDz+P8sjUaA== -"@npmcli/node-gyp@^1.0.1", "@npmcli/node-gyp@^1.0.2": +"@npmcli/node-gyp@^1.0.2": version "1.0.2" resolved "https://registry.yarnpkg.com/@npmcli/node-gyp/-/node-gyp-1.0.2.tgz#3cdc1f30e9736dbc417373ed803b42b1a0a29ede" integrity sha512-yrJUe6reVMpktcvagumoqD9r08fH1iRo01gn1u0zoCApa9lnZGEigVKUd2hzsCId4gdtkZZIVscLhNxMECKgRg== +"@npmcli/node-gyp@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@npmcli/node-gyp/-/node-gyp-1.0.3.tgz#a912e637418ffc5f2db375e93b85837691a43a33" + integrity sha512-fnkhw+fmX65kiLqk6E3BFLXNC26rUhK90zVwe2yncPliVT/Qos3xjhTLE59Df8KnPlcwIERXKVlU1bXoUQ+liA== + +"@npmcli/node-gyp@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/node-gyp/-/node-gyp-2.0.0.tgz#8c20e53e34e9078d18815c1d2dda6f2420d75e35" + integrity sha512-doNI35wIe3bBaEgrlPfdJPaCpUR89pJWep4Hq3aRdh6gKazIVWfs0jHttvSSoq47ZXgC7h73kDsUl8AoIQUB+A== + "@npmcli/package-json@^1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@npmcli/package-json/-/package-json-1.0.1.tgz#1ed42f00febe5293c3502fd0ef785647355f6e89" @@ -2653,6 +2764,13 @@ dependencies: json-parse-even-better-errors "^2.3.1" +"@npmcli/package-json@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/package-json/-/package-json-2.0.0.tgz#3bbcf4677e21055adbe673d9f08c9f9cde942e4a" + integrity sha512-42jnZ6yl16GzjWSH7vtrmWyJDGVa/LXPdpN2rcUWolFjc9ON2N3uz0qdBbQACfmhuJZ2lbKYtmK5qx68ZPLHMA== + dependencies: + json-parse-even-better-errors "^2.3.1" + "@npmcli/promise-spawn@^1.2.0", "@npmcli/promise-spawn@^1.3.2": version "1.3.2" resolved "https://registry.yarnpkg.com/@npmcli/promise-spawn/-/promise-spawn-1.3.2.tgz#42d4e56a8e9274fba180dabc0aea6e38f29274f5" @@ -2660,17 +2778,59 @@ dependencies: infer-owner "^1.0.4" -"@npmcli/run-script@^1.8.2": - version "1.8.4" - resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-1.8.4.tgz#03ced92503a6fe948cbc0975ce39210bc5e824d6" - integrity sha512-Yd9HXTtF1JGDXZw0+SOn+mWLYS0e7bHBHVC/2C8yqs4wUrs/k8rwBSinD7rfk+3WG/MFGRZKxjyoD34Pch2E/A== +"@npmcli/promise-spawn@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/promise-spawn/-/promise-spawn-3.0.0.tgz#53283b5f18f855c6925f23c24e67c911501ef573" + integrity sha512-s9SgS+p3a9Eohe68cSI3fi+hpcZUmXq5P7w0kMlAsWVtR7XbK3ptkZqKT2cK1zLDObJ3sR+8P59sJE0w/KTL1g== + dependencies: + infer-owner "^1.0.4" + +"@npmcli/run-script@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-2.0.0.tgz#9949c0cab415b17aaac279646db4f027d6f1e743" + integrity sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig== dependencies: "@npmcli/node-gyp" "^1.0.2" "@npmcli/promise-spawn" "^1.3.2" - infer-owner "^1.0.4" - node-gyp "^7.1.0" + node-gyp "^8.2.0" read-package-json-fast "^2.0.1" +"@npmcli/run-script@^4.1.0", "@npmcli/run-script@^4.1.3", "@npmcli/run-script@^4.1.7": + version "4.2.1" + resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-4.2.1.tgz#c07c5c71bc1c70a5f2a06b0d4da976641609b946" + integrity sha512-7dqywvVudPSrRCW5nTHpHgeWnbBtz8cFkOuKrecm6ih+oO9ciydhWt6OF7HlqupRRmB8Q/gECVdB9LMfToJbRg== + dependencies: + "@npmcli/node-gyp" "^2.0.0" + "@npmcli/promise-spawn" "^3.0.0" + node-gyp "^9.0.0" + read-package-json-fast "^2.0.3" + which "^2.0.2" + +"@nrwl/cli@15.6.3": + version "15.6.3" + resolved "https://registry.yarnpkg.com/@nrwl/cli/-/cli-15.6.3.tgz#999531d6efb30afc39373bdcbd7e78254a3a3fd3" + integrity sha512-K4E0spofThZXMnhA6R8hkUTdfqmwSnUE2+DlD5Y3jqsvKTAgwF5U41IFkEouFZCf+dWjy0RA20bWoX48EVFtmQ== + dependencies: + nx "15.6.3" + +"@nrwl/devkit@>=15.4.2 < 16": + version "15.6.3" + resolved "https://registry.yarnpkg.com/@nrwl/devkit/-/devkit-15.6.3.tgz#e4e96c53ba3304786a49034286c8511534b2b194" + integrity sha512-/JDvdzNxUM+C1PCZPCrvmFx+OfywqZdOq1GS9QR8C0VctTLG4D/SGSFD88O1SAdcbH/f1mMiBGfEYZYd23fghQ== + dependencies: + "@phenomnomnominal/tsquery" "4.1.1" + ejs "^3.1.7" + ignore "^5.0.4" + semver "7.3.4" + tslib "^2.3.0" + +"@nrwl/tao@15.6.3": + version "15.6.3" + resolved "https://registry.yarnpkg.com/@nrwl/tao/-/tao-15.6.3.tgz#b24e11345375dea96bc386c60b9b1102a7584932" + integrity sha512-bDZbPIbU5Mf2BvX0q8GjPxrm1WkYyfW+gp7mLuuJth2sEpZiCr47mSwuGko/y4CKXvIX46VQcAS0pKQMKugXsg== + dependencies: + nx "15.6.3" + "@oclif/command@1.8.16", "@oclif/command@^1.8.14", "@oclif/command@^1.8.15": version "1.8.16" resolved "https://registry.yarnpkg.com/@oclif/command/-/command-1.8.16.tgz#bea46f81b2061b47e1cda318a0b923e62ca4cc0c" @@ -2707,7 +2867,7 @@ is-wsl "^2.1.1" tslib "^2.3.1" -"@oclif/core@^1.2.0", "@oclif/core@^1.3.6": +"@oclif/core@^1.3.6": version "1.6.0" resolved "https://registry.yarnpkg.com/@oclif/core/-/core-1.6.0.tgz#a91333275cd43a49097158f4ae8e15ccf718bd48" integrity sha512-JHerjgRd29EtUVpDIrzohq2XdxJfgmZVGHAFlf75QVhLGFaleopZAQNBXkHkxG//kGib0LhyVGW7azcFKzr1eQ== @@ -2742,6 +2902,40 @@ widest-line "^3.1.0" wrap-ansi "^7.0.0" +"@oclif/core@^2.0.7": + version "2.0.9" + resolved "https://registry.yarnpkg.com/@oclif/core/-/core-2.0.9.tgz#f38c7260653a60698772ff09c5cf91febbfe003b" + integrity sha512-SLwSQa1No4br0D9cLY8lleXvcs3K/YpSQdo0md6z8AHx3P+l0/fv9dtIlYYD8MqsRUcNPxshJ3ZkN2sNNM5VjQ== + dependencies: + "@types/cli-progress" "^3.11.0" + ansi-escapes "^4.3.2" + ansi-styles "^4.3.0" + cardinal "^2.1.1" + chalk "^4.1.2" + clean-stack "^3.0.1" + cli-progress "^3.10.0" + debug "^4.3.4" + ejs "^3.1.6" + fs-extra "^9.1.0" + get-package-type "^0.1.0" + globby "^11.1.0" + hyperlinker "^1.0.0" + indent-string "^4.0.0" + is-wsl "^2.2.0" + js-yaml "^3.14.1" + natural-orderby "^2.0.3" + object-treeify "^1.1.33" + password-prompt "^1.1.2" + semver "^7.3.7" + string-width "^4.2.3" + strip-ansi "^6.0.1" + supports-color "^8.1.1" + supports-hyperlinks "^2.2.0" + tslib "^2.4.1" + widest-line "^3.1.0" + wordwrap "^1.0.0" + wrap-ansi "^7.0.0" + "@oclif/dev-cli@1.26.10": version "1.26.10" resolved "https://registry.yarnpkg.com/@oclif/dev-cli/-/dev-cli-1.26.10.tgz#d8df3a79009b68552f5e7f249d1d19ca52278382" @@ -2822,14 +3016,14 @@ chalk "^2.4.2" tslib "^1.9.3" -"@oclif/plugin-autocomplete@1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@oclif/plugin-autocomplete/-/plugin-autocomplete-1.2.0.tgz#c807d4ee0fd745296ea745c0c8ca28a0c6233bf3" - integrity sha512-Y64uhbhQLcLms2N6kvoIb40s2czOECeMzGs0ATf/3kNojY2nsYaQ0mI6PghQs/JgpVg4DnZOJivleYBr+XPn7Q== +"@oclif/plugin-autocomplete@1.4.4": + version "1.4.4" + resolved "https://registry.yarnpkg.com/@oclif/plugin-autocomplete/-/plugin-autocomplete-1.4.4.tgz#c2e5fbcfbabc0a92df12f29d8a3e4a34fa7756e8" + integrity sha512-8Bcn1h1H5EORJ3UMYS91AE3lQh7Ks5u4na7hPXS1GsnWaN1MVdpipvEAPV2Uj5bUUC+xj/v0k7N4ld0BWkEx+w== dependencies: - "@oclif/core" "^1.2.0" + "@oclif/core" "^2.0.7" chalk "^4.1.0" - debug "^4.0.0" + debug "^4.3.4" fs-extra "^9.0.1" "@oclif/plugin-help@3.2.18": @@ -2880,6 +3074,13 @@ dependencies: "@octokit/types" "^6.0.3" +"@octokit/auth-token@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-3.0.3.tgz#ce7e48a3166731f26068d7a7a7996b5da58cbe0c" + integrity sha512-/aFM2M4HVDBT/jjDBa84sJniv1t9Gm/rLkalaz9htOm+L+8JMj1k9w0CkUdcxNyNxZPlTxKPVko+m1VlM58ZVA== + dependencies: + "@octokit/types" "^9.0.0" + "@octokit/core@^3.2.3": version "3.3.1" resolved "https://registry.yarnpkg.com/@octokit/core/-/core-3.3.1.tgz#c6bb6ba171ad84a5f430853a98892cfe8f93d8cd" @@ -2893,6 +3094,19 @@ before-after-hook "^2.2.0" universal-user-agent "^6.0.0" +"@octokit/core@^4.1.0": + version "4.2.0" + resolved "https://registry.yarnpkg.com/@octokit/core/-/core-4.2.0.tgz#8c253ba9605aca605bc46187c34fcccae6a96648" + integrity sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg== + dependencies: + "@octokit/auth-token" "^3.0.0" + "@octokit/graphql" "^5.0.0" + "@octokit/request" "^6.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + before-after-hook "^2.2.0" + universal-user-agent "^6.0.0" + "@octokit/endpoint@^6.0.1": version "6.0.11" resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-6.0.11.tgz#082adc2aebca6dcefa1fb383f5efb3ed081949d1" @@ -2902,6 +3116,15 @@ is-plain-object "^5.0.0" universal-user-agent "^6.0.0" +"@octokit/endpoint@^7.0.0": + version "7.0.5" + resolved "https://registry.yarnpkg.com/@octokit/endpoint/-/endpoint-7.0.5.tgz#2bb2a911c12c50f10014183f5d596ce30ac67dd1" + integrity sha512-LG4o4HMY1Xoaec87IqQ41TQ+glvIeTKqfjkCEmt5AIwDZJwQeVZFIEYXrYY6yLwK+pAScb9Gj4q+Nz2qSw1roA== + dependencies: + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + universal-user-agent "^6.0.0" + "@octokit/graphql@^4.5.8": version "4.6.1" resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-4.6.1.tgz#f975486a46c94b7dbe58a0ca751935edc7e32cc9" @@ -2911,6 +3134,20 @@ "@octokit/types" "^6.0.3" universal-user-agent "^6.0.0" +"@octokit/graphql@^5.0.0": + version "5.0.5" + resolved "https://registry.yarnpkg.com/@octokit/graphql/-/graphql-5.0.5.tgz#a4cb3ea73f83b861893a6370ee82abb36e81afd2" + integrity sha512-Qwfvh3xdqKtIznjX9lz2D458r7dJPP8l6r4GQkIdWQouZwHQK0mVT88uwiU2bdTU2OtT1uOlKpRciUWldpG0yQ== + dependencies: + "@octokit/request" "^6.0.0" + "@octokit/types" "^9.0.0" + universal-user-agent "^6.0.0" + +"@octokit/openapi-types@^16.0.0": + version "16.0.0" + resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-16.0.0.tgz#d92838a6cd9fb4639ca875ddb3437f1045cc625e" + integrity sha512-JbFWOqTJVLHZSUUoF4FzAZKYtqdxWu9Z5m2QQnOyEa04fOFljvyh7D3GYKbfuaSWisqehImiVIMG4eyJeP5VEA== + "@octokit/openapi-types@^6.0.0": version "6.0.0" resolved "https://registry.yarnpkg.com/@octokit/openapi-types/-/openapi-types-6.0.0.tgz#7da8d7d5a72d3282c1a3ff9f951c8133a707480d" @@ -2928,18 +3165,22 @@ dependencies: "@octokit/types" "^6.11.0" +"@octokit/plugin-paginate-rest@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.0.0.tgz#f34b5a7d9416019126042cd7d7b811e006c0d561" + integrity sha512-Sq5VU1PfT6/JyuXPyt04KZNVsFOSBaYOAq2QRZUwzVlI10KFvcbUo8lR258AAQL1Et60b0WuVik+zOWKLuDZxw== + dependencies: + "@octokit/types" "^9.0.0" + "@octokit/plugin-request-log@^1.0.2": version "1.0.3" resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.3.tgz#70a62be213e1edc04bb8897ee48c311482f9700d" integrity sha512-4RFU4li238jMJAzLgAwkBAw+4Loile5haQMQr+uhFq27BmyJXcXSKvoQKqh0agsZEiUlW6iSv3FAgvmGkur7OQ== -"@octokit/plugin-rest-endpoint-methods@4.14.0": - version "4.14.0" - resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.14.0.tgz#1e76439897ea02d71379a1b06885c4f49378b8e8" - integrity sha512-QoZ469GDvFALHuxhcRA4KFGTaPeu5Z0MILGPa7irTGfYE0WfL+LFqWmULm9tuFKaKNlTcEQ7c5uJ0p4k5uvmNQ== - dependencies: - "@octokit/types" "^6.13.0" - deprecation "^2.3.1" +"@octokit/plugin-request-log@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz#5e50ed7083a613816b1e4a28aeec5fb7f1462e85" + integrity sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA== "@octokit/plugin-rest-endpoint-methods@5.0.0": version "5.0.0" @@ -2949,6 +3190,14 @@ "@octokit/types" "^6.13.0" deprecation "^2.3.1" +"@octokit/plugin-rest-endpoint-methods@^7.0.0": + version "7.0.1" + resolved "https://registry.yarnpkg.com/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.0.1.tgz#f7ebe18144fd89460f98f35a587b056646e84502" + integrity sha512-pnCaLwZBudK5xCdrR823xHGNgqOzRnJ/mpC/76YPpNP7DybdsJtP7mdOwh+wYZxK5jqeQuhu59ogMI4NRlBUvA== + dependencies: + "@octokit/types" "^9.0.0" + deprecation "^2.3.1" + "@octokit/request-error@^2.0.0", "@octokit/request-error@^2.0.5": version "2.0.5" resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-2.0.5.tgz#72cc91edc870281ad583a42619256b380c600143" @@ -2958,6 +3207,15 @@ deprecation "^2.0.0" once "^1.4.0" +"@octokit/request-error@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@octokit/request-error/-/request-error-3.0.3.tgz#ef3dd08b8e964e53e55d471acfe00baa892b9c69" + integrity sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ== + dependencies: + "@octokit/types" "^9.0.0" + deprecation "^2.0.0" + once "^1.4.0" + "@octokit/request@^5.3.0", "@octokit/request@^5.4.12": version "5.4.14" resolved "https://registry.yarnpkg.com/@octokit/request/-/request-5.4.14.tgz#ec5f96f78333bb2af390afa5ff66f114b063bc96" @@ -2972,6 +3230,18 @@ once "^1.4.0" universal-user-agent "^6.0.0" +"@octokit/request@^6.0.0": + version "6.2.3" + resolved "https://registry.yarnpkg.com/@octokit/request/-/request-6.2.3.tgz#76d5d6d44da5c8d406620a4c285d280ae310bdb4" + integrity sha512-TNAodj5yNzrrZ/VxP+H5HiYaZep0H3GU0O7PaF+fhDrt8FPrnkei9Aal/txsN/1P7V3CPiThG0tIvpPDYUsyAA== + dependencies: + "@octokit/endpoint" "^7.0.0" + "@octokit/request-error" "^3.0.0" + "@octokit/types" "^9.0.0" + is-plain-object "^5.0.0" + node-fetch "^2.6.7" + universal-user-agent "^6.0.0" + "@octokit/rest@^18.0.6": version "18.5.2" resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-18.5.2.tgz#0369e554b7076e3749005147be94c661c7a5a74b" @@ -2982,15 +3252,15 @@ "@octokit/plugin-request-log" "^1.0.2" "@octokit/plugin-rest-endpoint-methods" "5.0.0" -"@octokit/rest@^18.1.0": - version "18.4.0" - resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-18.4.0.tgz#b12ec99ca8a46d5f01ebe6e99947a5cbf9e007fb" - integrity sha512-3bFg0vyD3O+6EukYzLTu4tUapMofSR4nYgMEOJc25sefippsatiWfNoOnx0QNj3PIXVJdW0riUjQnDwgS0JNWA== +"@octokit/rest@^19.0.3": + version "19.0.7" + resolved "https://registry.yarnpkg.com/@octokit/rest/-/rest-19.0.7.tgz#d2e21b4995ab96ae5bfae50b4969da7e04e0bb70" + integrity sha512-HRtSfjrWmWVNp2uAkEpQnuGMJsu/+dBr47dRc5QVgsCbnIc1+GFEaoKBWkYG+zjrsHpSqcAElMio+n10c0b5JA== dependencies: - "@octokit/core" "^3.2.3" - "@octokit/plugin-paginate-rest" "^2.6.2" - "@octokit/plugin-request-log" "^1.0.2" - "@octokit/plugin-rest-endpoint-methods" "4.14.0" + "@octokit/core" "^4.1.0" + "@octokit/plugin-paginate-rest" "^6.0.0" + "@octokit/plugin-request-log" "^1.0.4" + "@octokit/plugin-rest-endpoint-methods" "^7.0.0" "@octokit/types@^6.0.3", "@octokit/types@^6.11.0", "@octokit/types@^6.13.0", "@octokit/types@^6.7.1": version "6.13.0" @@ -2999,6 +3269,28 @@ dependencies: "@octokit/openapi-types" "^6.0.0" +"@octokit/types@^9.0.0": + version "9.0.0" + resolved "https://registry.yarnpkg.com/@octokit/types/-/types-9.0.0.tgz#6050db04ddf4188ec92d60e4da1a2ce0633ff635" + integrity sha512-LUewfj94xCMH2rbD5YJ+6AQ4AVjFYTgpp6rboWM5T7N3IsIF65SBEOVcYMGAEzO/kKNiNaW4LoWtoThOhH06gw== + dependencies: + "@octokit/openapi-types" "^16.0.0" + +"@parcel/watcher@2.0.4": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.0.4.tgz#f300fef4cc38008ff4b8c29d92588eced3ce014b" + integrity sha512-cTDi+FUDBIUOBKEtj+nhiJ71AZVlkAsQFuGQTun5tV9mwQBQgZvhCzG+URPQc8myeN32yRVZEfVAPCs1RW+Jvg== + dependencies: + node-addon-api "^3.2.1" + node-gyp-build "^4.3.0" + +"@phenomnomnominal/tsquery@4.1.1": + version "4.1.1" + resolved "https://registry.yarnpkg.com/@phenomnomnominal/tsquery/-/tsquery-4.1.1.tgz#42971b83590e9d853d024ddb04a18085a36518df" + integrity sha512-jjMmK1tnZbm1Jq5a7fBliM4gQwjxMU7TFoRNwIyzwlO+eHPRCFv/Nv+H/Gi1jc3WR7QURG8D5d0Tn12YGrUqBQ== + dependencies: + esquery "^1.0.1" + "@pmmmwh/react-refresh-webpack-plugin@0.4.3": version "0.4.3" resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.4.3.tgz#1eec460596d200c0236bf195b078a5d1df89b766" @@ -3246,6 +3538,11 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== +"@tootallnate/once@2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" + integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== + "@ts-morph/common@~0.7.0": version "0.7.3" resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.7.3.tgz#380020c278e4aa6cecedf362a1157591d1003267" @@ -3340,6 +3637,13 @@ resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.8.tgz#c8d645506db0d15f4aafd4dfa873f443ad87ea59" integrity sha512-U1bQiWbln41Yo6EeHMr+34aUhvrMVyrhn9lYfPSpLTCrZlGxU4Rtn1bocX+0p2Fc/Jkd2FanCEXdw0WNfHHM0w== +"@types/cli-progress@^3.11.0": + version "3.11.0" + resolved "https://registry.yarnpkg.com/@types/cli-progress/-/cli-progress-3.11.0.tgz#ec79df99b26757c3d1c7170af8422e0fc95eef7e" + integrity sha512-XhXhBv1R/q2ahF3BM7qT5HLzJNlIL0wbcGyZVjqOTqAybAnsLisd7gy1UCyIqpL+5Iv6XhlSyzjLCnI2sIdbCg== + dependencies: + "@types/node" "*" + "@types/component-emitter@*": version "1.2.7" resolved "https://registry.yarnpkg.com/@types/component-emitter/-/component-emitter-1.2.7.tgz#d49a2c65a89c8b594e7355a92e43cf1d278b577c" @@ -4452,6 +4756,26 @@ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== +"@yarnpkg/lockfile@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" + integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== + +"@yarnpkg/parsers@^3.0.0-rc.18": + version "3.0.0-rc.38" + resolved "https://registry.yarnpkg.com/@yarnpkg/parsers/-/parsers-3.0.0-rc.38.tgz#91b393554017016e12d2f4ea33f589dcfe7d5670" + integrity sha512-YqkUSOZSBjbhzvU/ZbK6yoE70L/KVXAQTyUMaKAFoHEpy7csAljivTBu0C3SZKbDxMRjFWAvnLS8US7W3hFLow== + dependencies: + js-yaml "^3.10.0" + tslib "^2.4.0" + +"@zkochan/js-yaml@0.0.6": + version "0.0.6" + resolved "https://registry.yarnpkg.com/@zkochan/js-yaml/-/js-yaml-0.0.6.tgz#975f0b306e705e28b8068a07737fa46d3fc04826" + integrity sha512-nzvgl3VfhcELQ8LyVrYOru+UtAy1nrygk2+AGbTm8a5YcO6o8lSjAT+pfg3vJWxIoZKOUhrK6UU7xW/+00kQrg== + dependencies: + argparse "^2.0.1" + JSONStream@^1.0.3, JSONStream@^1.0.4: version "1.3.5" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0" @@ -4465,7 +4789,7 @@ abab@^2.0.3: resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.3.tgz#623e2075e02eb2d3f2475e49f99c91846467907a" integrity sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg== -abbrev@1: +abbrev@1, abbrev@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== @@ -4604,6 +4928,15 @@ agentkeepalive@^4.1.3: depd "^1.1.2" humanize-ms "^1.2.1" +agentkeepalive@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.2.1.tgz#a7975cbb9f83b367f06c90cc51ff28fe7d499717" + integrity sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA== + dependencies: + debug "^4.1.0" + depd "^1.1.2" + humanize-ms "^1.2.1" + aggregate-error@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.0.1.tgz#db2fe7246e536f40d9b5442a39e117d7dd6a24e0" @@ -4785,28 +5118,36 @@ anymatch@^3.0.3, anymatch@~3.1.1: normalize-path "^3.0.0" picomatch "^2.0.4" -aproba@^1.0.3, aproba@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - -aproba@^2.0.0: +"aproba@^1.0.3 || ^2.0.0", aproba@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== +aproba@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== + arch@^2.1.2: version "2.2.0" resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== -are-we-there-yet@^1.1.5, are-we-there-yet@~1.1.2: - version "1.1.5" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" - integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w== +are-we-there-yet@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c" + integrity sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw== + dependencies: + delegates "^1.0.0" + readable-stream "^3.6.0" + +are-we-there-yet@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz#679df222b278c64f2cdba1175cdc00b0d96164bd" + integrity sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg== dependencies: delegates "^1.0.0" - readable-stream "^2.0.6" + readable-stream "^3.6.0" arg@^4.1.0: version "4.1.3" @@ -4820,6 +5161,11 @@ argparse@^1.0.7: dependencies: sprintf-js "~1.0.2" +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + aria-query@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" @@ -4853,11 +5199,6 @@ array-differ@^3.0.0: resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" integrity sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg== -array-find-index@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" - integrity sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E= - array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" @@ -5007,11 +5348,6 @@ async-limiter@^1.0.0, async-limiter@~1.0.0: resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== -async@0.9.x: - version "0.9.2" - resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d" - integrity sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0= - async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" @@ -5024,6 +5360,11 @@ async@^3.1.0, async@^3.2.0: resolved "https://registry.yarnpkg.com/async/-/async-3.2.3.tgz#ac53dafd3f4720ee9e8a160628f18ea91df196c9" integrity sha512-spZRyzKL5l5BZQrr/6m/SqFdBN0q3OCI0f9rjfBzCMBIP4p75P620rR3gTmaksNOhmzgdxcaxdNfMy6anrbM0g== +async@^3.2.3: + version "3.2.4" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" @@ -5067,19 +5408,14 @@ axe-core@^4.0.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.1.2.tgz#7cf783331320098bfbef620df3b3c770147bc224" integrity sha512-V+Nq70NxKhYt89ArVcaNL9FDryB3vQOd+BFXZIfO3RP6rwtj+2yqqqdHEkacutglPaZLkJeuXKCjCJDMGPtPqg== -axios@0.21.1: - version "0.21.1" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.1.tgz#22563481962f4d6bde9a76d516ef0e5d3c09b2b8" - integrity sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA== - dependencies: - follow-redirects "^1.10.0" - -axios@0.26.1: - version "0.26.1" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9" - integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA== +axios@1.3.2, axios@^1.0.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.3.2.tgz#7ac517f0fa3ec46e0e636223fd973713a09c72b3" + integrity sha512-1M3O703bYqYuPhbHeya5bnhpYVsDDRyQSabNja04mZtboLNSuZ4YrltestrLXfHgmzua4TpUqRiVKbiQuo2epw== dependencies: - follow-redirects "^1.14.8" + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" axios@^0.19.2: version "0.19.2" @@ -5330,17 +5666,17 @@ big.js@^5.2.2: resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -bin-links@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/bin-links/-/bin-links-2.2.1.tgz#347d9dbb48f7d60e6c11fe68b77a424bee14d61b" - integrity sha512-wFzVTqavpgCCYAh8SVBdnZdiQMxTkGR+T3b14CNpBXIBe2neJWaMGAZ55XWWHELJJ89dscuq0VCBqcVaIOgCMg== +bin-links@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/bin-links/-/bin-links-3.0.3.tgz#3842711ef3db2cd9f16a5f404a996a12db355a6e" + integrity sha512-zKdnMPWEdh4F5INR07/eBrodC7QrF5JKvqskjz/ZZRXg5YSAZIbn8zGhbhUrElzHBZ2fvEQdOU59RHcTG3GiwA== dependencies: - cmd-shim "^4.0.1" - mkdirp "^1.0.3" - npm-normalize-package-bin "^1.0.0" - read-cmd-shim "^2.0.0" + cmd-shim "^5.0.0" + mkdirp-infer-owner "^2.0.0" + npm-normalize-package-bin "^2.0.0" + read-cmd-shim "^3.0.0" rimraf "^3.0.0" - write-file-atomic "^3.0.3" + write-file-atomic "^4.0.0" binary-extensions@^1.0.0: version "1.13.1" @@ -5391,6 +5727,15 @@ bl@^3.0.0: dependencies: readable-stream "^3.0.1" +bl@^4.0.3, bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + blob-util@2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/blob-util/-/blob-util-2.0.2.tgz#3b4e3c281111bb7f11128518006cdc60b403a1eb" @@ -5463,6 +5808,13 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + braces@^2.3.1, braces@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" @@ -5740,6 +6092,13 @@ builtins@^1.0.3: resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88" integrity sha1-y5T662HIaWRR2zZTThQi+U8K7og= +builtins@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/builtins/-/builtins-5.0.1.tgz#87f6db9ab0458be728564fa81d876d8d74552fa9" + integrity sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ== + dependencies: + semver "^7.0.0" + bunyan@1.8.15: version "1.8.15" resolved "https://registry.yarnpkg.com/bunyan/-/bunyan-1.8.15.tgz#8ce34ca908a17d0776576ca1b2f6cbd916e93b46" @@ -5760,11 +6119,6 @@ bunyan@^1.8.12: mv "~2" safe-json-stringify "~1" -byline@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" - integrity sha1-dBxSFkaOrcRXsDQQEYrXfejB3bE= - byte-size@^7.0.0: version "7.0.1" resolved "https://registry.yarnpkg.com/byte-size/-/byte-size-7.0.1.tgz#b1daf3386de7ab9d706b941a748dbfc71130dee3" @@ -5876,6 +6230,30 @@ cacache@^15.2.0: tar "^6.0.2" unique-filename "^1.1.1" +cacache@^16.0.0, cacache@^16.0.6, cacache@^16.1.0: + version "16.1.3" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-16.1.3.tgz#a02b9f34ecfaf9a78c9f4bc16fceb94d5d67a38e" + integrity sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ== + dependencies: + "@npmcli/fs" "^2.1.0" + "@npmcli/move-file" "^2.0.0" + chownr "^2.0.0" + fs-minipass "^2.1.0" + glob "^8.0.1" + infer-owner "^1.0.4" + lru-cache "^7.7.1" + minipass "^3.1.6" + minipass-collect "^1.0.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + mkdirp "^1.0.4" + p-map "^4.0.0" + promise-inflight "^1.0.1" + rimraf "^3.0.2" + ssri "^9.0.0" + tar "^6.1.11" + unique-filename "^2.0.0" + cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" @@ -5941,14 +6319,6 @@ camel-case@^4.1.1: pascal-case "^3.1.2" tslib "^2.0.3" -camelcase-keys@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" - integrity sha1-MIvur/3ygRkFHvodkyITyRuPkuc= - dependencies: - camelcase "^2.0.0" - map-obj "^1.0.0" - camelcase-keys@^6.2.2: version "6.2.2" resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-6.2.2.tgz#5e755d6ba51aa223ec7d3d52f25778210f9dc3c0" @@ -5963,11 +6333,6 @@ camelcase@5.3.1, camelcase@^5.0.0, camelcase@^5.3.1: resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" - integrity sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8= - camelcase@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.0.0.tgz#5259f7c30e35e278f1bdc2a4d91230b37cad981e" @@ -6054,7 +6419,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.1.2: +chalk@^4.0.2, chalk@^4.1.1, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6116,7 +6481,7 @@ chokidar@^2.1.8: optionalDependencies: fsevents "^1.2.7" -chownr@^1.1.1, chownr@^1.1.4: +chownr@^1.1.1: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== @@ -6180,6 +6545,13 @@ clean-stack@^3.0.0, clean-stack@^3.0.1: dependencies: escape-string-regexp "4.0.0" +cli-cursor@3.1.0, cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + cli-cursor@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" @@ -6194,13 +6566,6 @@ cli-cursor@^2.0.0, cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-cursor@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" - integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== - dependencies: - restore-cursor "^3.1.0" - cli-progress@^3.10.0: version "3.10.0" resolved "https://registry.yarnpkg.com/cli-progress/-/cli-progress-3.10.0.tgz#63fd9d6343c598c93542fdfa3563a8b59887d78a" @@ -6216,6 +6581,16 @@ cli-progress@^3.4.0: colors "^1.1.2" string-width "^2.1.1" +cli-spinners@2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" + integrity sha512-x/5fWmGMnbKQAaNwN+UZlV79qBLM9JFnJuJ03gIi5whrob0xV0ofNVHy9DhwGdsMJQc2OKv0oGmLzvaqvAVv+g== + +cli-spinners@^2.5.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.7.0.tgz#f815fd30b5f9eaac02db604c7a231ed7cb2f797a" + integrity sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw== + cli-table3@0.6.0, cli-table3@~0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.0.tgz#b7b1bc65ca8e7b5cef9124e13dc2b21e2ce4faee" @@ -6314,6 +6689,15 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-buffer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58" @@ -6357,10 +6741,10 @@ cloneable-readable@^1.0.0: process-nextick-args "^2.0.0" readable-stream "^2.3.5" -cmd-shim@^4.0.1, cmd-shim@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/cmd-shim/-/cmd-shim-4.1.0.tgz#b3a904a6743e9fede4148c6f3800bf2a08135bdd" - integrity sha512-lb9L7EM4I/ZRVuljLPEtUJOP+xiQVknZ4ZMpMgEp4JzNldPb27HU03hi6K1/6CoIuit/Zm/LQXySErFeXxDprw== +cmd-shim@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/cmd-shim/-/cmd-shim-5.0.0.tgz#8d0aaa1a6b0708630694c4dbde070ed94c707724" + integrity sha512-qkCtZ59BidfEwHltnJwkyVZn+XQojdAySM1D1gSeh11Z4pW1Kpolkyo53L5noc0nrxmIvyFwTmJRo4xs7FFLPw== dependencies: mkdirp-infer-owner "^2.0.0" @@ -6433,6 +6817,11 @@ color-string@^1.5.4: color-name "^1.0.0" simple-swizzle "^0.2.2" +color-support@^1.1.2, color-support@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== + color@^3.0.0: version "3.1.3" resolved "https://registry.yarnpkg.com/color/-/color-3.1.3.tgz#ca67fb4e7b97d611dcde39eceed422067d91596e" @@ -6451,12 +6840,12 @@ colors@^1.1.2: resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== -columnify@^1.5.4: - version "1.5.4" - resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.5.4.tgz#4737ddf1c7b69a8a7c340570782e947eec8e78bb" - integrity sha1-Rzfd8ce2mop8NAVweC6UfuyOeLs= +columnify@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.6.0.tgz#6989531713c9008bb29735e61e37acf5bd553cf3" + integrity sha512-lomjuFZKfM6MSAnV9aCZC9sc0qGbmZdfygNv+nCpqVkSKdCxCklLtd16O0EILGkImHw9ZpHkAnHaB+8Zxq5W6Q== dependencies: - strip-ansi "^3.0.0" + strip-ansi "^6.0.1" wcwidth "^1.0.0" combine-source-map@^0.8.0, combine-source-map@~0.8.0: @@ -6469,7 +6858,7 @@ combine-source-map@^0.8.0, combine-source-map@~0.8.0: lodash.memoize "~3.0.3" source-map "~0.5.3" -combined-stream@^1.0.6, combined-stream@~1.0.6: +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== @@ -6609,7 +6998,7 @@ console-browserify@^1.1.0: resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== -console-control-strings@^1.0.0, console-control-strings@~1.1.0: +console-control-strings@^1.0.0, console-control-strings@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= @@ -6651,16 +7040,16 @@ conventional-changelog-angular@^5.0.12: compare-func "^2.0.0" q "^1.5.1" -conventional-changelog-core@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/conventional-changelog-core/-/conventional-changelog-core-4.2.2.tgz#f0897df6d53b5d63dec36b9442bd45354f8b3ce5" - integrity sha512-7pDpRUiobQDNkwHyJG7k9f6maPo9tfPzkSWbRq97GGiZqisElhnvUZSvyQH20ogfOjntB5aadvv6NNcKL1sReg== +conventional-changelog-core@^4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/conventional-changelog-core/-/conventional-changelog-core-4.2.4.tgz#e50d047e8ebacf63fac3dc67bf918177001e1e9f" + integrity sha512-gDVS+zVJHE2v4SLc6B0sLsPiloR0ygU7HaDW14aNJE1v4SlqJPILPl/aJC7YdtRE4CybBf8gDwObBvKha8Xlyg== dependencies: add-stream "^1.0.0" - conventional-changelog-writer "^4.0.18" + conventional-changelog-writer "^5.0.0" conventional-commits-parser "^3.2.0" dateformat "^3.0.0" - get-pkg-repo "^1.0.0" + get-pkg-repo "^4.0.0" git-raw-commits "^2.0.8" git-remote-origin-url "^2.0.0" git-semver-tags "^4.1.1" @@ -6669,7 +7058,6 @@ conventional-changelog-core@^4.2.2: q "^1.5.1" read-pkg "^3.0.0" read-pkg-up "^3.0.0" - shelljs "^0.8.3" through2 "^4.0.0" conventional-changelog-preset-loader@^2.3.4: @@ -6677,15 +7065,14 @@ conventional-changelog-preset-loader@^2.3.4: resolved "https://registry.yarnpkg.com/conventional-changelog-preset-loader/-/conventional-changelog-preset-loader-2.3.4.tgz#14a855abbffd59027fd602581f1f34d9862ea44c" integrity sha512-GEKRWkrSAZeTq5+YjUZOYxdHq+ci4dNwHvpaBC3+ENalzFWuCWa9EZXSuZBpkr72sMdKB+1fyDV4takK1Lf58g== -conventional-changelog-writer@^4.0.18: - version "4.1.0" - resolved "https://registry.yarnpkg.com/conventional-changelog-writer/-/conventional-changelog-writer-4.1.0.tgz#1ca7880b75aa28695ad33312a1f2366f4b12659f" - integrity sha512-WwKcUp7WyXYGQmkLsX4QmU42AZ1lqlvRW9mqoyiQzdD+rJWbTepdWoKJuwXTS+yq79XKnQNa93/roViPQrAQgw== +conventional-changelog-writer@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/conventional-changelog-writer/-/conventional-changelog-writer-5.0.1.tgz#e0757072f045fe03d91da6343c843029e702f359" + integrity sha512-5WsuKUfxW7suLblAbFnxAcrvf6r+0b7GvNaWUwUIk0bXMnENP/PEieGKVUQrjPqwPT4o3EPAASBXiY6iHooLOQ== dependencies: - compare-func "^2.0.0" conventional-commits-filter "^2.0.7" dateformat "^3.0.0" - handlebars "^4.7.6" + handlebars "^4.7.7" json-stringify-safe "^5.0.1" lodash "^4.17.15" meow "^8.0.0" @@ -7168,13 +7555,6 @@ csstype@^3.0.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.6.tgz#865d0b5833d7d8d40f4e5b8a6d76aea3de4725ef" integrity sha512-+ZAmfyWMT7TiIlzdqJgjMb7S4f1beorDbWbsocyK4RaiqA5RTX3K14bnBWmmA9QEM0gRdsjyyrEmcyga8Zsxmw== -currently-unhandled@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" - integrity sha1-mI3zP+qxke95mmE2nddsF635V+o= - dependencies: - array-find-index "^1.0.1" - cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" @@ -7295,14 +7675,14 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.9: dependencies: ms "2.0.0" -debug@4, debug@4.3.1, debug@^4.0.0, debug@^4.2.0, debug@^4.3.1: +debug@4, debug@4.3.1, debug@^4.2.0, debug@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== dependencies: ms "2.1.2" -debug@4.3.4, debug@^4.3.3: +debug@4.3.4, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -7343,7 +7723,7 @@ decamelize-keys@^1.1.0: decamelize "^1.1.0" map-obj "^1.0.0" -decamelize@^1.1.0, decamelize@^1.1.2, decamelize@^1.2.0: +decamelize@^1.1.0, decamelize@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= @@ -7413,6 +7793,11 @@ deferred-leveldown@~5.3.0: abstract-leveldown "~6.2.1" inherits "^2.0.3" +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" @@ -7723,6 +8108,11 @@ dotenv@8.2.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== +dotenv@~10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" + integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== + dtrace-provider@~0.8: version "0.8.8" resolved "https://registry.yarnpkg.com/dtrace-provider/-/dtrace-provider-0.8.8.tgz#2996d5490c37e1347be263b423ed7b297fb0d97e" @@ -7784,12 +8174,12 @@ ejs@^2.6.1: resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba" integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== -ejs@^3.1.6: - version "3.1.6" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.6.tgz#5bfd0a0689743bb5268b3550cceeebbc1702822a" - integrity sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw== +ejs@^3.1.6, ejs@^3.1.7: + version "3.1.8" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" + integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== dependencies: - jake "^10.6.1" + jake "^10.8.5" electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.649: version "1.3.669" @@ -7854,7 +8244,7 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= -encoding@^0.1.12: +encoding@^0.1.12, encoding@^0.1.13: version "0.1.13" resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== @@ -7884,7 +8274,7 @@ enquirer@^2.3.5: dependencies: ansi-colors "^3.2.1" -enquirer@^2.3.6: +enquirer@^2.3.6, enquirer@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== @@ -8328,7 +8718,7 @@ esprima@^4.0.0, esprima@^4.0.1, esprima@~4.0.0: resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esquery@^1.4.0: +esquery@^1.0.1, esquery@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== @@ -8771,6 +9161,17 @@ fast-deep-equal@^3.1.1: resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== +fast-glob@3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" + integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + fast-glob@^3.0.3: version "3.1.1" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.1.1.tgz#87ee30e9e9f3eb40d6f254a7997655da753d7c82" @@ -8872,6 +9273,13 @@ figgy-pudding@^3.5.1: resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e" integrity sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw== +figures@3.2.0, figures@^3.0.0, figures@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + figures@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" @@ -8887,13 +9295,6 @@ figures@^2.0.0: dependencies: escape-string-regexp "^1.0.5" -figures@^3.0.0, figures@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" - integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== - dependencies: - escape-string-regexp "^1.0.5" - file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" @@ -8943,11 +9344,6 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -filter-obj@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" - integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= - finalhandler@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" @@ -8987,14 +9383,6 @@ find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - integrity sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8= - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - find-up@^2.0.0, find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" @@ -9047,6 +9435,11 @@ flat-cache@^3.0.4: flatted "^3.1.0" rimraf "^3.0.2" +flat@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" + integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== + flatted@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" @@ -9072,11 +9465,16 @@ follow-redirects@1.5.10: dependencies: debug "=3.1.0" -follow-redirects@^1.0.0, follow-redirects@^1.10.0, follow-redirects@^1.14.8: +follow-redirects@^1.0.0: version "1.14.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.9.tgz#dd4ea157de7bfaf9ea9b3fbd85aa16951f78d8d7" integrity sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w== +follow-redirects@^1.15.0: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -9100,6 +9498,15 @@ fork-ts-checker-webpack-plugin@4.1.6: tapable "^1.0.0" worker-rpc "^0.1.0" +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + form-data@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" @@ -9159,6 +9566,15 @@ fs-extra@9.1.0, fs-extra@^9.1.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.1.0.tgz#5784b102104433bb0e090f48bfc4a30742c357ed" + integrity sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-6.0.1.tgz#8abc128f7946e310135ddc93b98bddb410e7a34b" @@ -9196,13 +9612,6 @@ fs-extra@^9.0.1: jsonfile "^6.0.1" universalify "^1.0.0" -fs-minipass@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" - integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== - dependencies: - minipass "^2.6.0" - fs-minipass@^2.0.0, fs-minipass@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -9253,19 +9662,34 @@ functional-red-black-tree@^1.0.1: resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= +gauge@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-3.0.2.tgz#03bf4441c044383908bcfa0656ad91803259b395" + integrity sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q== dependencies: - aproba "^1.0.3" + aproba "^1.0.3 || ^2.0.0" + color-support "^1.1.2" console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" + has-unicode "^2.0.1" + object-assign "^4.1.1" signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wide-align "^1.1.2" + +gauge@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce" + integrity sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg== + dependencies: + aproba "^1.0.3 || ^2.0.0" + color-support "^1.1.3" + console-control-strings "^1.1.0" + has-unicode "^2.0.1" + signal-exit "^3.0.7" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wide-align "^1.1.5" gensync@^1.0.0-beta.1: version "1.0.0-beta.1" @@ -9301,27 +9725,21 @@ get-package-type@^0.1.0: resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-pkg-repo@^1.0.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-1.4.0.tgz#c73b489c06d80cc5536c2c853f9e05232056972d" - integrity sha1-xztInAbYDMVTbCyFP54FIyBWly0= +get-pkg-repo@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz#75973e1c8050c73f48190c52047c4cee3acbf385" + integrity sha512-2+QbHjFRfGB74v/pYWjd5OhU3TDIC2Gv/YKUTk/tCvAz0pkn/Mz6P3uByuBimLOcPvN2jYdScl3xGFSrx0jEcA== dependencies: - hosted-git-info "^2.1.4" - meow "^3.3.0" - normalize-package-data "^2.3.0" - parse-github-repo-url "^1.3.0" + "@hutson/parse-repository-url" "^3.0.0" + hosted-git-info "^4.0.0" through2 "^2.0.0" + yargs "^16.2.0" get-port@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== -get-stdin@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" - integrity sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4= - get-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" @@ -9392,20 +9810,20 @@ git-semver-tags@^4.1.1: meow "^8.0.0" semver "^6.0.0" -git-up@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/git-up/-/git-up-4.0.2.tgz#10c3d731051b366dc19d3df454bfca3f77913a7c" - integrity sha512-kbuvus1dWQB2sSW4cbfTeGpCMd8ge9jx9RKnhXhuJ7tnvT+NIrTVfYZxjtflZddQYcmdOTlkAcjmx7bor+15AQ== +git-up@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/git-up/-/git-up-7.0.0.tgz#bace30786e36f56ea341b6f69adfd83286337467" + integrity sha512-ONdIrbBCFusq1Oy0sC71F5azx8bVkvtZtMJAsv+a6lz5YAmbNnLD6HAB4gptHZVLPR8S2/kVN6Gab7lryq5+lQ== dependencies: - is-ssh "^1.3.0" - parse-url "^5.0.0" + is-ssh "^1.4.0" + parse-url "^8.1.0" -git-url-parse@^11.4.4: - version "11.4.4" - resolved "https://registry.yarnpkg.com/git-url-parse/-/git-url-parse-11.4.4.tgz#5d747debc2469c17bc385719f7d0427802d83d77" - integrity sha512-Y4o9o7vQngQDIU9IjyCmRJBin5iYjI5u9ZITnddRZpD7dcCFQj2sL2XuMNbLRE4b4B/4ENPsp2Q8P44fjAZ0Pw== +git-url-parse@^13.1.0: + version "13.1.0" + resolved "https://registry.yarnpkg.com/git-url-parse/-/git-url-parse-13.1.0.tgz#07e136b5baa08d59fabdf0e33170de425adf07b4" + integrity sha512-5FvPJP/70WkIprlUZ33bm4UAaFdjcLkJLpWft1BeZKqwR0uhhNGoKwlUaPtVb4LxCSQ++erHapRak9kWGj+FCA== dependencies: - git-up "^4.0.0" + git-up "^7.0.0" gitconfiglocal@^1.0.0: version "1.0.0" @@ -9443,6 +9861,18 @@ glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@^5.1.1, glob-parent@^5.1.2, dependencies: is-glob "^4.0.1" +glob@7.1.4: + version "7.1.4" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" + integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + glob@^6.0.1: version "6.0.4" resolved "https://registry.yarnpkg.com/glob/-/glob-6.0.4.tgz#0f08860f6a155127b2fadd4f9ce24b1aab6e4d22" @@ -9466,6 +9896,17 @@ glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.0, glob@^7.1.1, glob@^7.1.2, gl once "^1.3.0" path-is-absolute "^1.0.0" +glob@^8.0.1: + version "8.1.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" + integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + global-dirs@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-2.0.1.tgz#acdf3bb6685bcd55cb35e8a052266569e9469201" @@ -9569,7 +10010,7 @@ globby@^6.1.0: pify "^2.0.0" pinkie-promise "^2.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.5, graceful-fs@^4.2.2: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.5: version "4.2.6" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== @@ -9589,6 +10030,11 @@ graceful-fs@^4.2.4: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== +graceful-fs@^4.2.6: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + grouped-queue@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/grouped-queue/-/grouped-queue-2.0.0.tgz#a2c6713f2171e45db2c300a3a9d7c119d694dac8" @@ -9612,7 +10058,7 @@ handle-thing@^2.0.0: resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== -handlebars@^4.7.6: +handlebars@^4.7.7: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== @@ -9674,7 +10120,7 @@ has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== -has-unicode@^2.0.0, has-unicode@^2.0.1: +has-unicode@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= @@ -9781,6 +10227,20 @@ hosted-git-info@^2.1.4: resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== +hosted-git-info@^3.0.6: + version "3.0.8" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-3.0.8.tgz#6e35d4cc87af2c5f816e4cb9ce350ba87a3f370d" + integrity sha512-aXpmwoOhRBrw6X3j0h5RloK4x1OzsxMPyxqIHyNfSe2pypkVTZFpEiRoSipPEPlMrh0HW/XsjkJ5WgnCirpNUw== + dependencies: + lru-cache "^6.0.0" + +hosted-git-info@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" + integrity sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA== + dependencies: + lru-cache "^6.0.0" + hosted-git-info@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.0.2.tgz#5e425507eede4fea846b7262f0838456c4209961" @@ -9788,6 +10248,13 @@ hosted-git-info@^4.0.1: dependencies: lru-cache "^6.0.0" +hosted-git-info@^5.0.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-5.2.1.tgz#0ba1c97178ef91f3ab30842ae63d6a272341156f" + integrity sha512-xIcQYMnhcx2Nr4JTjsFmwwnr9vldugPy9uVm0o87bjqqWMv9GaqsTeT+i99wTl0mk1uLxJtHxLb8kymqTENQsw== + dependencies: + lru-cache "^7.5.1" + hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" @@ -9876,9 +10343,9 @@ htmlparser2@^3.10.1: readable-stream "^3.1.1" http-cache-semantics@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" - integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== + version "4.1.1" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" + integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== http-call@^5.1.2: version "5.3.0" @@ -9954,6 +10421,15 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" +http-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43" + integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w== + dependencies: + "@tootallnate/once" "2" + agent-base "6" + debug "4" + http-proxy-middleware@0.19.1: version "0.19.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" @@ -10065,18 +10541,30 @@ iferr@^0.1.5: resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501" integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= -ignore-walk@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37" - integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw== +ignore-walk@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-4.0.1.tgz#fc840e8346cf88a3a9380c5b17933cd8f4d39fa3" + integrity sha512-rzDQLaW4jQbh2YrOFlJdCtX8qgJTehFRYiUB2r1osqTeDzV/3+Jh8fz1oAPzUThf3iku8Ds4IDqawI5d8mUiQw== dependencies: minimatch "^3.0.4" +ignore-walk@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-5.0.1.tgz#5f199e23e1288f518d90358d461387788a154776" + integrity sha512-yemi4pMf51WKT7khInJqAvsIGzoqYXblnsz0ql8tM+yi1EKYTY1evX4NAbJrLL/Aanr2HyZeluqU+Oi7MGHokw== + dependencies: + minimatch "^5.0.1" + ignore@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== +ignore@^5.0.4: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== + ignore@^5.1.1: version "5.1.4" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf" @@ -10161,13 +10649,6 @@ imurmurhash@^0.1.4: resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= -indent-string@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" - integrity sha1-ji1INIdCEhtKghi3oTfppSBJ3IA= - dependencies: - repeating "^2.0.0" - indent-string@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-3.2.0.tgz#4a5fd6d27cc332f37e5419a504dbb837105c9289" @@ -10216,19 +10697,18 @@ ini@^1.3.2, ini@^1.3.4, ini@^1.3.5: resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== -init-package-json@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/init-package-json/-/init-package-json-2.0.2.tgz#d81a7e6775af9b618f20bba288e440b8d1ce05f3" - integrity sha512-PO64kVeArePvhX7Ff0jVWkpnE1DfGRvaWcStYrPugcJz9twQGYibagKJuIMHCX7ENcp0M6LJlcjLBuLD5KeJMg== +init-package-json@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/init-package-json/-/init-package-json-3.0.2.tgz#f5bc9bac93f2bdc005778bc2271be642fecfcd69" + integrity sha512-YhlQPEjNFqlGdzrBfDNRLhvoSgX7iQRgSxgsNknRQ9ITXFT7UMfVMWhBTOh2Y+25lRnGrv5Xz8yZwQ3ACR6T3A== dependencies: - glob "^7.1.1" - npm-package-arg "^8.1.0" + npm-package-arg "^9.0.1" promzard "^0.3.0" - read "~1.0.1" - read-package-json "^3.0.0" - semver "^7.3.2" + read "^1.0.7" + read-package-json "^5.0.0" + semver "^7.3.5" validate-npm-package-license "^3.0.4" - validate-npm-package-name "^3.0.0" + validate-npm-package-name "^4.0.0" inline-source-map@~0.6.0: version "0.6.2" @@ -10256,24 +10736,26 @@ inquirer@8.0.0, inquirer@^8.0.0: strip-ansi "^6.0.0" through "^2.3.6" -inquirer@^7.3.3: - version "7.3.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== +inquirer@^8.2.4: + version "8.2.5" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.5.tgz#d8654a7542c35a9b9e069d27e2df4858784d54f8" + integrity sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ== dependencies: ansi-escapes "^4.2.1" - chalk "^4.1.0" + chalk "^4.1.1" cli-cursor "^3.1.0" cli-width "^3.0.0" external-editor "^3.0.3" figures "^3.0.0" - lodash "^4.17.19" + lodash "^4.17.21" mute-stream "0.0.8" + ora "^5.4.1" run-async "^2.4.0" - rxjs "^6.6.0" + rxjs "^7.5.5" string-width "^4.1.0" strip-ansi "^6.0.0" through "^2.3.6" + wrap-ansi "^7.0.0" insert-module-globals@^7.0.0: version "7.2.0" @@ -10323,6 +10805,11 @@ ip@1.1.5, ip@^1.1.0, ip@^1.1.5: resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= +ip@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da" + integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ== + ipaddr.js@1.9.1, ipaddr.js@^1.9.0: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" @@ -10449,6 +10936,13 @@ is-core-module@^2.0.0, is-core-module@^2.1.0, is-core-module@^2.2.0: dependencies: has "^1.0.3" +is-core-module@^2.8.1: + version "2.11.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + is-data-descriptor@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" @@ -10496,6 +10990,11 @@ is-docker@^2.0.0: resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== +is-docker@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" @@ -10513,11 +11012,6 @@ is-extglob@^2.1.0, is-extglob@^2.1.1: resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= -is-finite@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3" - integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w== - is-fullwidth-code-point@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" @@ -10562,6 +11056,11 @@ is-installed-globally@^0.3.2: global-dirs "^2.0.1" is-path-inside "^3.0.1" +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + is-lambda@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" @@ -10726,12 +11225,12 @@ is-scoped@^2.1.0: dependencies: scoped-regex "^2.0.0" -is-ssh@^1.3.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/is-ssh/-/is-ssh-1.3.2.tgz#a4b82ab63d73976fd8263cceee27f99a88bdae2b" - integrity sha512-elEw0/0c2UscLrNG+OAorbP539E3rhliKPg+hDMWN9VwrDXfYK+4PBEykDPfxlYYtQvl84TascnQyobfQLHEhQ== +is-ssh@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/is-ssh/-/is-ssh-1.4.0.tgz#4f8220601d2839d8fa624b3106f8e8884f01b8b2" + integrity sha512-x7+VxdxOdlV3CYpjvRLBv5Lo9OJerlYanjwFrPR9fuGPjCiNiCzFgAWpiLAohSbsnH4ZAys3SBh+hq5rJosxUQ== dependencies: - protocols "^1.1.0" + protocols "^2.0.1" is-stream@^1.1.0: version "1.1.0" @@ -10781,6 +11280,11 @@ is-unc-path@^1.0.0: dependencies: unc-path-regex "^0.1.2" +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + is-utf8@^0.2.0, is-utf8@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" @@ -10818,6 +11322,11 @@ isbinaryfile@^4.0.0: resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-4.0.6.tgz#edcb62b224e2b4710830b67498c8e4e5a4d2610b" integrity sha512-ORrEy+SNVqUhrCaal4hA4fBzhggQQ+BaLntyPOdoEiwlKZW9BZiJXjg3RMiruE4tPEI3pyVPpySHQF/dKWperg== +isbinaryfile@^4.0.10: + version "4.0.10" + resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-4.0.10.tgz#0c5b5e30c2557a2f06febd37b7322946aaee42b3" + integrity sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw== + isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" @@ -10907,13 +11416,13 @@ istanbul-reports@^3.0.2: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -jake@^10.6.1: - version "10.8.2" - resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b" - integrity sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A== +jake@^10.8.5: + version "10.8.5" + resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" + integrity sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw== dependencies: - async "0.9.x" - chalk "^2.4.2" + async "^3.2.3" + chalk "^4.0.2" filelist "^1.0.1" minimatch "^3.0.4" @@ -11484,7 +11993,14 @@ jquery@^3.4.0: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.14.1: +js-yaml@4.1.0, js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +js-yaml@^3.10.0, js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.14.1: version "3.14.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -11604,9 +12120,9 @@ json5@2.x, json5@^2.1.0: minimist "^1.2.0" json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" @@ -11617,6 +12133,16 @@ json5@^2.1.2: dependencies: minimist "^1.2.5" +json5@^2.2.2: + version "2.2.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonc-parser@3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" + integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== + jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" @@ -11677,15 +12203,15 @@ jsprim@^1.2.2: array-includes "^3.1.2" object.assign "^4.1.2" -just-diff-apply@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/just-diff-apply/-/just-diff-apply-3.0.0.tgz#a77348d24f0694e378b57293dceb65bdf5a91c4f" - integrity sha512-K2MLc+ZC2DVxX4V61bIKPeMUUfj1YYZ3h0myhchDXOW1cKoPZMnjIoNCqv9bF2n5Oob1PFxuR2gVJxkxz4e58w== +just-diff-apply@^5.2.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/just-diff-apply/-/just-diff-apply-5.5.0.tgz#771c2ca9fa69f3d2b54e7c3f5c1dfcbcc47f9f0f" + integrity sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw== -just-diff@^3.0.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/just-diff/-/just-diff-3.1.1.tgz#d50c597c6fd4776495308c63bdee1b6839082647" - integrity sha512-sdMWKjRq8qWZEjDcVA6llnUT8RDEBIfOiGpYFPYa9u+2c39JCsejktSP7mj5eRid5EIvTzIpQ2kDOCw1Nq9BjQ== +just-diff@^5.0.1: + version "5.2.0" + resolved "https://registry.yarnpkg.com/just-diff/-/just-diff-5.2.0.tgz#60dca55891cf24cd4a094e33504660692348a241" + integrity sha512-6ufhP9SHjb7jibNFrNxyFZ6od3g+An6Ai9mhGRvcYe8UJlH0prseN64M+6ZBBUoKYHZsitDP42gAJ8+eVWr3lw== jwa@^1.4.1: version "1.4.1" @@ -11776,29 +12302,36 @@ lazy-ass@1.6.0, lazy-ass@^1.6.0: resolved "https://registry.yarnpkg.com/lazy-ass/-/lazy-ass-1.6.0.tgz#7999655e8646c17f089fdd187d150d3324d54513" integrity sha1-eZllXoZGwX8In90YfRUNMyTVRRM= -lerna@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/lerna/-/lerna-4.0.0.tgz#b139d685d50ea0ca1be87713a7c2f44a5b678e9e" - integrity sha512-DD/i1znurfOmNJb0OBw66NmNqiM8kF6uIrzrJ0wGE3VNdzeOhz9ziWLYiRaZDGGwgbcjOo6eIfcx9O5Qynz+kg== - dependencies: - "@lerna/add" "4.0.0" - "@lerna/bootstrap" "4.0.0" - "@lerna/changed" "4.0.0" - "@lerna/clean" "4.0.0" - "@lerna/cli" "4.0.0" - "@lerna/create" "4.0.0" - "@lerna/diff" "4.0.0" - "@lerna/exec" "4.0.0" - "@lerna/import" "4.0.0" - "@lerna/info" "4.0.0" - "@lerna/init" "4.0.0" - "@lerna/link" "4.0.0" - "@lerna/list" "4.0.0" - "@lerna/publish" "4.0.0" - "@lerna/run" "4.0.0" - "@lerna/version" "4.0.0" +lerna@6.4.1: + version "6.4.1" + resolved "https://registry.yarnpkg.com/lerna/-/lerna-6.4.1.tgz#a1e5abcb6c00de3367f50d75eca449e382525e0f" + integrity sha512-0t8TSG4CDAn5+vORjvTFn/ZEGyc4LOEsyBUpzcdIxODHPKM4TVOGvbW9dBs1g40PhOrQfwhHS+3fSx/42j42dQ== + dependencies: + "@lerna/add" "6.4.1" + "@lerna/bootstrap" "6.4.1" + "@lerna/changed" "6.4.1" + "@lerna/clean" "6.4.1" + "@lerna/cli" "6.4.1" + "@lerna/command" "6.4.1" + "@lerna/create" "6.4.1" + "@lerna/diff" "6.4.1" + "@lerna/exec" "6.4.1" + "@lerna/filter-options" "6.4.1" + "@lerna/import" "6.4.1" + "@lerna/info" "6.4.1" + "@lerna/init" "6.4.1" + "@lerna/link" "6.4.1" + "@lerna/list" "6.4.1" + "@lerna/publish" "6.4.1" + "@lerna/run" "6.4.1" + "@lerna/validation-error" "6.4.1" + "@lerna/version" "6.4.1" + "@nrwl/devkit" ">=15.4.2 < 16" import-local "^3.0.2" - npmlog "^4.1.2" + inquirer "^8.2.4" + npmlog "^6.0.2" + nx ">=15.4.2 < 16" + typescript "^3 || ^4" level-concat-iterator@^3.0.0: version "3.0.0" @@ -11879,32 +12412,37 @@ levn@~0.3.0: prelude-ls "~1.1.2" type-check "~0.3.2" -libnpmaccess@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/libnpmaccess/-/libnpmaccess-4.0.1.tgz#17e842e03bef759854adf6eb6c2ede32e782639f" - integrity sha512-ZiAgvfUbvmkHoMTzdwmNWCrQRsDkOC+aM5BDfO0C9aOSwF3R1LdFDBD+Rer1KWtsoQYO35nXgmMR7OUHpDRxyA== +libnpmaccess@^6.0.3: + version "6.0.4" + resolved "https://registry.yarnpkg.com/libnpmaccess/-/libnpmaccess-6.0.4.tgz#2dd158bd8a071817e2207d3b201d37cf1ad6ae6b" + integrity sha512-qZ3wcfIyUoW0+qSFkMBovcTrSGJ3ZeyvpR7d5N9pEYv/kXs8sHP2wiqEIXBKLFrZlmM0kR0RJD7mtfLngtlLag== dependencies: aproba "^2.0.0" minipass "^3.1.1" - npm-package-arg "^8.0.0" - npm-registry-fetch "^9.0.0" + npm-package-arg "^9.0.1" + npm-registry-fetch "^13.0.0" -libnpmpublish@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/libnpmpublish/-/libnpmpublish-4.0.0.tgz#ad6413914e0dfd78df868ce14ba3d3a4cc8b385b" - integrity sha512-2RwYXRfZAB1x/9udKpZmqEzSqNd7ouBRU52jyG14/xG8EF+O9A62d7/XVR3iABEQHf1iYhkm0Oq9iXjrL3tsXA== +libnpmpublish@^6.0.4: + version "6.0.5" + resolved "https://registry.yarnpkg.com/libnpmpublish/-/libnpmpublish-6.0.5.tgz#5a894f3de2e267d62f86be2a508e362599b5a4b1" + integrity sha512-LUR08JKSviZiqrYTDfywvtnsnxr+tOvBU0BF8H+9frt7HMvc6Qn6F8Ubm72g5hDTHbq8qupKfDvDAln2TVPvFg== dependencies: - normalize-package-data "^3.0.0" - npm-package-arg "^8.1.0" - npm-registry-fetch "^9.0.0" - semver "^7.1.3" - ssri "^8.0.0" + normalize-package-data "^4.0.0" + npm-package-arg "^9.0.1" + npm-registry-fetch "^13.0.0" + semver "^7.3.7" + ssri "^9.0.0" lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= +lines-and-columns@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-2.0.3.tgz#b2f0badedb556b747020ab8ea7f0373e22efac1b" + integrity sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w== + linked-list@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/linked-list/-/linked-list-0.1.0.tgz#798b0ff97d1b92a4fd08480f55aea4e9d49d37bf" @@ -11990,17 +12528,6 @@ listr@0.14.3, listr@^0.14.3: p-map "^2.0.0" rxjs "^6.3.3" -load-json-file@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" - integrity sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA= - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - pinkie-promise "^2.0.0" - strip-bom "^2.0.0" - load-json-file@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" @@ -12193,7 +12720,7 @@ lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= -lodash@4.x, "lodash@>=3.5 <5", lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.7.0: +lodash@4.x, "lodash@>=3.5 <5", lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.17.5: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -12212,6 +12739,14 @@ log-symbols@^4.0.0: dependencies: chalk "^4.0.0" +log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + log-update@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/log-update/-/log-update-2.3.0.tgz#88328fd7d1ce7938b29283746f0b1bc126b24708" @@ -12248,14 +12783,6 @@ loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4 dependencies: js-tokens "^3.0.0 || ^4.0.0" -loud-rejection@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" - integrity sha1-W0b4AUft7leIcPCG0Eghz5mOVR8= - dependencies: - currently-unhandled "^0.4.1" - signal-exit "^3.0.0" - lower-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" @@ -12277,6 +12804,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" +lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1: + version "7.14.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" + integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== + magic-string@^0.25.0, magic-string@^0.25.7: version "0.25.7" resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" @@ -12311,31 +12843,32 @@ make-error@1.x, make-error@^1.1.1: resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== -make-fetch-happen@^8.0.9: - version "8.0.14" - resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-8.0.14.tgz#aaba73ae0ab5586ad8eaa68bd83332669393e222" - integrity sha512-EsS89h6l4vbfJEtBZnENTOFk8mCRpY5ru36Xe5bcX1KYIli2mkSHqoFsp5O1wMDvTJJzxe/4THpCTtygjeeGWQ== +make-fetch-happen@^10.0.1, make-fetch-happen@^10.0.3, make-fetch-happen@^10.0.6: + version "10.2.1" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz#f5e3835c5e9817b617f2770870d9492d28678164" + integrity sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w== dependencies: - agentkeepalive "^4.1.3" - cacache "^15.0.5" + agentkeepalive "^4.2.1" + cacache "^16.1.0" http-cache-semantics "^4.1.0" - http-proxy-agent "^4.0.1" + http-proxy-agent "^5.0.0" https-proxy-agent "^5.0.0" is-lambda "^1.0.1" - lru-cache "^6.0.0" - minipass "^3.1.3" + lru-cache "^7.7.1" + minipass "^3.1.6" minipass-collect "^1.0.2" - minipass-fetch "^1.3.2" + minipass-fetch "^2.0.3" minipass-flush "^1.0.5" minipass-pipeline "^1.2.4" + negotiator "^0.6.3" promise-retry "^2.0.1" - socks-proxy-agent "^5.0.0" - ssri "^8.0.0" + socks-proxy-agent "^7.0.0" + ssri "^9.0.0" -make-fetch-happen@^9.0.1: +make-fetch-happen@^9.1.0: version "9.1.0" resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz#53085a09e7971433e6765f7971bf63f4e05cb968" - integrity "sha1-UwhaCeeXFDPmdl95cb9j9OBcuWg= sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==" + integrity sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg== dependencies: agentkeepalive "^4.1.3" cacache "^15.2.0" @@ -12366,7 +12899,7 @@ map-cache@^0.2.2: resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= -map-obj@^1.0.0, map-obj@^1.0.1: +map-obj@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" integrity sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0= @@ -12461,22 +12994,6 @@ memory-fs@^0.5.0: errno "^0.1.3" readable-stream "^2.0.1" -meow@^3.3.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" - integrity sha1-cstmi0JSKCkKu/qFaJJYcwioAfs= - dependencies: - camelcase-keys "^2.0.0" - decamelize "^1.1.2" - loud-rejection "^1.0.0" - map-obj "^1.0.1" - minimist "^1.1.3" - normalize-package-data "^2.3.4" - object-assign "^4.0.1" - read-pkg-up "^1.0.1" - redent "^1.0.0" - trim-newlines "^1.0.0" - meow@^8.0.0: version "8.1.2" resolved "https://registry.yarnpkg.com/meow/-/meow-8.1.2.tgz#bcbe45bda0ee1729d350c03cffc8395a36c4e897" @@ -12656,13 +13173,34 @@ minimalistic-crypto-utils@^1.0.1: resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= -"minimatch@2 || 3", minimatch@3.0.4, minimatch@^3.0.3, minimatch@^3.0.4: +"minimatch@2 || 3", minimatch@^3.0.3, minimatch@^3.0.4: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" +minimatch@3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" + integrity sha512-tUpxzX0VAzJHjLu0xUfFv1gwVp9ba3IOuRAVH2EGuRW8a5emA2FlACLqiT/lDVtS1W+TGNwqz3sWaNyLgDJWuw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.0.1: + version "5.1.6" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96" + integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g== + dependencies: + brace-expansion "^2.0.1" + minimist-options@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" @@ -12672,7 +13210,7 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.1.0, minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== @@ -12684,7 +13222,7 @@ minipass-collect@^1.0.2: dependencies: minipass "^3.0.0" -minipass-fetch@^1.3.0, minipass-fetch@^1.3.2: +minipass-fetch@^1.3.2: version "1.3.3" resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-1.3.3.tgz#34c7cea038c817a8658461bf35174551dce17a0a" integrity sha512-akCrLDWfbdAWkMLBxJEeWTdNsjML+dt5YgOI4gJ53vuO0vrmYQkUPxa6j6V65s9CcePIr2SSWqjT2EcrNseryQ== @@ -12695,6 +13233,28 @@ minipass-fetch@^1.3.0, minipass-fetch@^1.3.2: optionalDependencies: encoding "^0.1.12" +minipass-fetch@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-1.4.1.tgz#d75e0091daac1b0ffd7e9d41629faff7d0c1f1b6" + integrity sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw== + dependencies: + minipass "^3.1.0" + minipass-sized "^1.0.3" + minizlib "^2.0.0" + optionalDependencies: + encoding "^0.1.12" + +minipass-fetch@^2.0.3: + version "2.1.2" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-2.1.2.tgz#95560b50c472d81a3bc76f20ede80eaed76d8add" + integrity sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA== + dependencies: + minipass "^3.1.6" + minipass-sized "^1.0.3" + minizlib "^2.1.2" + optionalDependencies: + encoding "^0.1.13" + minipass-flush@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" @@ -12724,14 +13284,6 @@ minipass-sized@^1.0.3: dependencies: minipass "^3.0.0" -minipass@^2.6.0, minipass@^2.9.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" - integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== - dependencies: - safe-buffer "^5.1.2" - yallist "^3.0.0" - minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" @@ -12739,14 +13291,19 @@ minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3: dependencies: yallist "^4.0.0" -minizlib@^1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" - integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== +minipass@^3.1.6: + version "3.3.6" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== dependencies: - minipass "^2.9.0" + yallist "^4.0.0" -minizlib@^2.0.0, minizlib@^2.1.1: +minipass@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-4.0.2.tgz#26fc3364d5ea6cb971c6e5259eac67a0887510d1" + integrity sha512-4Hbzei7ZyBp+1aw0874YWpKOubZd/jc53/XU+gkYry1QV+VvrbO8icLM5CUtm4F0hyXn85DXYKEMIS26gitD3A== + +minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== @@ -12843,9 +13400,9 @@ module-deps@^6.0.0: xtend "^4.0.0" moment@^2.10.6, moment@^2.19.3, moment@^2.27.0: - version "2.29.3" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.3.tgz#edd47411c322413999f7a5940d526de183c031f3" - integrity "sha1-7dR0EcMiQTmZ96WUDVJt4YPAMfM= sha512-c6YRvhEo//6T2Jz/vVtYzqBzwvPT95JBQ+smCytzf7c50oMZRsR/a4w88aD34I+/QVSfnoAnSBFPJHItlOMJVw==" + version "2.29.4" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" + integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== move-concurrently@^1.0.1: version "1.0.1" @@ -12981,7 +13538,7 @@ negotiator@0.6.2, negotiator@^0.6.2: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== -negotiator@0.6.3: +negotiator@0.6.3, negotiator@^0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== @@ -13009,6 +13566,11 @@ no-case@^3.0.4: lower-case "^2.0.2" tslib "^2.0.3" +node-addon-api@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" + integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== + node-fetch@^2.6.1: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -13016,6 +13578,13 @@ node-fetch@^2.6.1: dependencies: whatwg-url "^5.0.0" +node-fetch@^2.6.7: + version "2.6.9" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" + integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== + dependencies: + whatwg-url "^5.0.0" + node-forge@^0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" @@ -13031,37 +13600,36 @@ node-gyp-build@^4.3.0: resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.3.0.tgz#9f256b03e5826150be39c764bf51e993946d71a3" integrity sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q== -node-gyp@^5.0.2: - version "5.1.1" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-5.1.1.tgz#eb915f7b631c937d282e33aed44cb7a025f62a3e" - integrity sha512-WH0WKGi+a4i4DUt2mHnvocex/xPLp9pYt5R6M2JdFB7pJ7Z34hveZ4nDTGTiLXCkitA9T8HFZjhinBCiVHYcWw== +node-gyp@^8.2.0: + version "8.4.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-8.4.1.tgz#3d49308fc31f768180957d6b5746845fbd429937" + integrity sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w== dependencies: env-paths "^2.2.0" glob "^7.1.4" - graceful-fs "^4.2.2" - mkdirp "^0.5.1" - nopt "^4.0.1" - npmlog "^4.1.2" - request "^2.88.0" - rimraf "^2.6.3" - semver "^5.7.1" - tar "^4.4.12" - which "^1.3.1" + graceful-fs "^4.2.6" + make-fetch-happen "^9.1.0" + nopt "^5.0.0" + npmlog "^6.0.0" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.2" + which "^2.0.2" -node-gyp@^7.1.0: - version "7.1.2" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-7.1.2.tgz#21a810aebb187120251c3bcec979af1587b188ae" - integrity sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ== +node-gyp@^9.0.0: + version "9.3.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-9.3.1.tgz#1e19f5f290afcc9c46973d68700cbd21a96192e4" + integrity sha512-4Q16ZCqq3g8awk6UplT7AuxQ35XN4R/yf/+wSAwcBUAjg7l58RTactWaP8fIDTi0FzI7YcVLujwExakZlfWkXg== dependencies: env-paths "^2.2.0" glob "^7.1.4" - graceful-fs "^4.2.3" - nopt "^5.0.0" - npmlog "^4.1.2" - request "^2.88.2" + graceful-fs "^4.2.6" + make-fetch-happen "^10.0.3" + nopt "^6.0.0" + npmlog "^6.0.0" rimraf "^3.0.2" - semver "^7.3.2" - tar "^6.0.2" + semver "^7.3.5" + tar "^6.1.2" which "^2.0.2" node-int64@^0.4.0: @@ -13128,14 +13696,6 @@ noms@0.0.0: inherits "^2.0.1" readable-stream "~1.0.31" -nopt@^4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48" - integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg== - dependencies: - abbrev "1" - osenv "^0.1.4" - nopt@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" @@ -13143,7 +13703,14 @@ nopt@^5.0.0: dependencies: abbrev "1" -normalize-package-data@^2.0.0, normalize-package-data@^2.3.0, normalize-package-data@^2.3.2, normalize-package-data@^2.3.4, normalize-package-data@^2.5.0: +nopt@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-6.0.0.tgz#245801d8ebf409c6df22ab9d95b65e1309cdb16d" + integrity sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g== + dependencies: + abbrev "^1.0.0" + +normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== @@ -13163,6 +13730,16 @@ normalize-package-data@^3.0.0: semver "^7.3.4" validate-npm-package-license "^3.0.1" +normalize-package-data@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-4.0.1.tgz#b46b24e0616d06cadf9d5718b29b6d445a82a62c" + integrity sha512-EBk5QKKuocMJhB3BILuKhmaPjI8vNRSpIfO9woLC6NyHVkKKdVEdAO1mrT0ZfxNR1lKwCcTkuZfmGIFdizZ8Pg== + dependencies: + hosted-git-info "^5.0.0" + is-core-module "^2.8.1" + semver "^7.3.5" + validate-npm-package-license "^3.0.4" + normalize-path@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" @@ -13190,7 +13767,7 @@ normalize-url@1.9.1: query-string "^4.1.0" sort-keys "^1.0.0" -normalize-url@^3.0.0, normalize-url@^3.3.0: +normalize-url@^3.0.0: version "3.3.0" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== @@ -13202,6 +13779,13 @@ npm-bundled@^1.1.1: dependencies: npm-normalize-package-bin "^1.0.1" +npm-bundled@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-2.0.1.tgz#94113f7eb342cd7a67de1e789f896b04d2c600f4" + integrity sha512-gZLxXdjEzE/+mOstGDqR6b0EkhJ+kM6fxM6vUuckuctuVPh80Q6pw/rSZj9s4Gex9GxWtIicO1pc8DB9KZWudw== + dependencies: + npm-normalize-package-bin "^2.0.0" + npm-install-checks@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/npm-install-checks/-/npm-install-checks-4.0.0.tgz#a37facc763a2fde0497ef2c6d0ac7c3fbe00d7b4" @@ -13209,26 +13793,33 @@ npm-install-checks@^4.0.0: dependencies: semver "^7.1.1" -npm-lifecycle@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/npm-lifecycle/-/npm-lifecycle-3.1.5.tgz#9882d3642b8c82c815782a12e6a1bfeed0026309" - integrity sha512-lDLVkjfZmvmfvpvBzA4vzee9cn+Me4orq0QF8glbswJVEbIcSNWib7qGOffolysc3teCqbbPZZkzbr3GQZTL1g== +npm-install-checks@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/npm-install-checks/-/npm-install-checks-5.0.0.tgz#5ff27d209a4e3542b8ac6b0c1db6063506248234" + integrity sha512-65lUsMI8ztHCxFz5ckCEC44DRvEGdZX5usQFriauxHEwt7upv1FKaQEmAtU0YnOAdwuNWCmk64xYiQABNrEyLA== dependencies: - byline "^5.0.0" - graceful-fs "^4.1.15" - node-gyp "^5.0.2" - resolve-from "^4.0.0" - slide "^1.1.6" - uid-number "0.0.6" - umask "^1.1.0" - which "^1.3.1" + semver "^7.1.1" -npm-normalize-package-bin@^1.0.0, npm-normalize-package-bin@^1.0.1: +npm-normalize-package-bin@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2" integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA== -npm-package-arg@^8.0.0, npm-package-arg@^8.0.1, npm-package-arg@^8.1.0, npm-package-arg@^8.1.2: +npm-normalize-package-bin@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-2.0.0.tgz#9447a1adaaf89d8ad0abe24c6c84ad614a675fff" + integrity sha512-awzfKUO7v0FscrSpRoogyNm0sajikhBWpU0QMrW09AMi9n1PoKU6WaIqUzuJSQnpciZZmJ/jMZ2Egfmb/9LiWQ== + +npm-package-arg@8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-8.1.1.tgz#00ebf16ac395c63318e67ce66780a06db6df1b04" + integrity sha512-CsP95FhWQDwNqiYS+Q0mZ7FAEDytDZAkNxQqea6IaAFJTAY9Lhhqyl0irU/6PMc7BGfUmnsbHcqxJD7XuVM/rg== + dependencies: + hosted-git-info "^3.0.6" + semver "^7.0.0" + validate-npm-package-name "^3.0.0" + +npm-package-arg@^8.0.1, npm-package-arg@^8.1.2: version "8.1.2" resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-8.1.2.tgz#b868016ae7de5619e729993fbd8d11dc3c52ab62" integrity sha512-6Eem455JsSMJY6Kpd3EyWE+n5hC+g9bSyHr9K9U2zqZb7+02+hObQ2c0+8iDk/mNF+8r1MhY44WypKJAkySIYA== @@ -13246,16 +13837,36 @@ npm-package-arg@^8.1.5: semver "^7.3.4" validate-npm-package-name "^3.0.0" -npm-packlist@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-2.1.4.tgz#40e96b2b43787d0546a574542d01e066640d09da" - integrity sha512-Qzg2pvXC9U4I4fLnUrBmcIT4x0woLtUgxUi9eC+Zrcv1Xx5eamytGAfbDWQ67j7xOcQ2VW1I3su9smVTIdu7Hw== +npm-package-arg@^9.0.0, npm-package-arg@^9.0.1: + version "9.1.2" + resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-9.1.2.tgz#fc8acecb00235f42270dda446f36926ddd9ac2bc" + integrity sha512-pzd9rLEx4TfNJkovvlBSLGhq31gGu2QDexFPWT19yCDh0JgnRhlBLNo5759N0AJmBk+kQ9Y/hXoLnlgFD+ukmg== + dependencies: + hosted-git-info "^5.0.0" + proc-log "^2.0.1" + semver "^7.3.5" + validate-npm-package-name "^4.0.0" + +npm-packlist@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-3.0.0.tgz#0370df5cfc2fcc8f79b8f42b37798dd9ee32c2a9" + integrity sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ== dependencies: glob "^7.1.6" - ignore-walk "^3.0.3" + ignore-walk "^4.0.1" npm-bundled "^1.1.1" npm-normalize-package-bin "^1.0.1" +npm-packlist@^5.1.0, npm-packlist@^5.1.1: + version "5.1.3" + resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-5.1.3.tgz#69d253e6fd664b9058b85005905012e00e69274b" + integrity sha512-263/0NGrn32YFYi4J533qzrQ/krmmrWwhKkzwTuM4f/07ug51odoaNjUexxO4vxlzURHcmYMH1QjvHjsNDKLVg== + dependencies: + glob "^8.0.1" + ignore-walk "^5.0.1" + npm-bundled "^2.0.0" + npm-normalize-package-bin "^2.0.0" + npm-pick-manifest@^6.0.0, npm-pick-manifest@^6.1.0, npm-pick-manifest@^6.1.1: version "6.1.1" resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-6.1.1.tgz#7b5484ca2c908565f43b7f27644f36bb816f5148" @@ -13266,31 +13877,40 @@ npm-pick-manifest@^6.0.0, npm-pick-manifest@^6.1.0, npm-pick-manifest@^6.1.1: npm-package-arg "^8.1.2" semver "^7.3.4" -npm-registry-fetch@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz#68c1bb810c46542760d62a6a965f85a702d43a76" - integrity "sha1-aMG7gQxGVCdg1ipqll+FpwLUOnY= sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA==" +npm-pick-manifest@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-7.0.2.tgz#1d372b4e7ea7c6712316c0e99388a73ed3496e84" + integrity sha512-gk37SyRmlIjvTfcYl6RzDbSmS9Y4TOBXfsPnoYqTHARNgWbyDiCSMLUpmALDj4jjcTZpURiEfsSHJj9k7EV4Rw== dependencies: - make-fetch-happen "^9.0.1" - minipass "^3.1.3" - minipass-fetch "^1.3.0" + npm-install-checks "^5.0.0" + npm-normalize-package-bin "^2.0.0" + npm-package-arg "^9.0.0" + semver "^7.3.5" + +npm-registry-fetch@^12.0.0, npm-registry-fetch@^12.0.1: + version "12.0.2" + resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-12.0.2.tgz#ae583bb3c902a60dae43675b5e33b5b1f6159f1e" + integrity sha512-Df5QT3RaJnXYuOwtXBXS9BWs+tHH2olvkCLh6jcR/b/u3DvPMlp3J0TvvYwplPKxHMOwfg287PYih9QqaVFoKA== + dependencies: + make-fetch-happen "^10.0.1" + minipass "^3.1.6" + minipass-fetch "^1.4.1" minipass-json-stream "^1.0.1" - minizlib "^2.0.0" - npm-package-arg "^8.0.0" + minizlib "^2.1.2" + npm-package-arg "^8.1.5" -npm-registry-fetch@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-9.0.0.tgz#86f3feb4ce00313bc0b8f1f8f69daae6face1661" - integrity sha512-PuFYYtnQ8IyVl6ib9d3PepeehcUeHN9IO5N/iCRhyg9tStQcqGQBRVHmfmMWPDERU3KwZoHFvbJ4FPXPspvzbA== +npm-registry-fetch@^13.0.0, npm-registry-fetch@^13.0.1, npm-registry-fetch@^13.3.0: + version "13.3.1" + resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-13.3.1.tgz#bb078b5fa6c52774116ae501ba1af2a33166af7e" + integrity sha512-eukJPi++DKRTjSBRcDZSDDsGqRK3ehbxfFUcgaRd0Yp6kRwOwh2WVn0r+8rMB4nnuzvAk6rQVzl6K5CkYOmnvw== dependencies: - "@npmcli/ci-detect" "^1.0.0" - lru-cache "^6.0.0" - make-fetch-happen "^8.0.9" - minipass "^3.1.3" - minipass-fetch "^1.3.0" + make-fetch-happen "^10.0.6" + minipass "^3.1.6" + minipass-fetch "^2.0.3" minipass-json-stream "^1.0.1" - minizlib "^2.0.0" - npm-package-arg "^8.0.0" + minizlib "^2.1.2" + npm-package-arg "^9.0.1" + proc-log "^2.0.0" npm-run-path@^2.0.0: version "2.0.2" @@ -13306,15 +13926,25 @@ npm-run-path@^4.0.0, npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -npmlog@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== +npmlog@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0" + integrity sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw== + dependencies: + are-we-there-yet "^2.0.0" + console-control-strings "^1.1.0" + gauge "^3.0.0" + set-blocking "^2.0.0" + +npmlog@^6.0.0, npmlog@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" + integrity sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg== dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" + are-we-there-yet "^3.0.0" + console-control-strings "^1.1.0" + gauge "^4.0.3" + set-blocking "^2.0.0" nth-check@^1.0.2: version "1.0.2" @@ -13338,6 +13968,47 @@ nwsapi@^2.2.0: resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== +nx@15.6.3, "nx@>=15.4.2 < 16": + version "15.6.3" + resolved "https://registry.yarnpkg.com/nx/-/nx-15.6.3.tgz#900087bce38c6e5975660c23ebd41ead1bf54f98" + integrity sha512-3t0A0GPLNen1yPAyE+VGZ3nkAzZYb5nfXtAcx8SHBlKq4u42yBY3khBmP1y4Og3jhIwFIj7J7Npeh8ZKrthmYQ== + dependencies: + "@nrwl/cli" "15.6.3" + "@nrwl/tao" "15.6.3" + "@parcel/watcher" "2.0.4" + "@yarnpkg/lockfile" "^1.1.0" + "@yarnpkg/parsers" "^3.0.0-rc.18" + "@zkochan/js-yaml" "0.0.6" + axios "^1.0.0" + chalk "^4.1.0" + cli-cursor "3.1.0" + cli-spinners "2.6.1" + cliui "^7.0.2" + dotenv "~10.0.0" + enquirer "~2.3.6" + fast-glob "3.2.7" + figures "3.2.0" + flat "^5.0.2" + fs-extra "^11.1.0" + glob "7.1.4" + ignore "^5.0.4" + js-yaml "4.1.0" + jsonc-parser "3.2.0" + lines-and-columns "~2.0.3" + minimatch "3.0.5" + npm-run-path "^4.0.1" + open "^8.4.0" + semver "7.3.4" + string-width "^4.2.3" + strong-log-transformer "^2.1.0" + tar-stream "~2.2.0" + tmp "~0.2.1" + tsconfig-paths "^4.1.2" + tslib "^2.3.0" + v8-compile-cache "2.3.0" + yargs "^17.6.2" + yargs-parser "21.1.1" + oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" @@ -13528,6 +14199,15 @@ open@^7.0.2: is-docker "^2.0.0" is-wsl "^2.1.1" +open@^8.4.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + opn@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" @@ -13567,6 +14247,21 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + original@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" @@ -13579,24 +14274,11 @@ os-browserify@^0.3.0, os-browserify@~0.3.0: resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= - -os-tmpdir@^1.0.0, os-tmpdir@~1.0.2: +os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= -osenv@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" - integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g== - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - ospath@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/ospath/-/ospath-1.2.2.tgz#1276639774a3f8ef2572f7fe4280e0ea4550c07b" @@ -13715,6 +14397,14 @@ p-timeout@^3.2.0: dependencies: p-finally "^1.0.0" +p-transform@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-transform/-/p-transform-1.3.0.tgz#2da960ba92c6a56efbe75cbd1edf3ea7b3191049" + integrity sha512-UJKdSzgd3KOnXXAtqN5+/eeHcvTn1hBkesEmElVgvO/NAYcxAvmjzIGmnNd3Tb/gRAvMBdNRFD4qAWdHxY6QXg== + dependencies: + debug "^4.3.2" + p-queue "^6.6.2" + p-try@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" @@ -13732,15 +14422,15 @@ p-waterfall@^2.1.1: dependencies: p-reduce "^2.0.0" -pacote@^11.1.11, pacote@^11.2.6: - version "11.3.1" - resolved "https://registry.yarnpkg.com/pacote/-/pacote-11.3.1.tgz#6ce95dd230db475cbd8789fd1f986bec51b4bf7c" - integrity sha512-TymtwoAG12cczsJIrwI/euOQKtjrQHlD0k0oyt9QSmZGpqa+KdlxKdWR/YUjYizkixaVyztxt/Wsfo8bL3A6Fg== +pacote@^12.0.0, pacote@^12.0.2: + version "12.0.3" + resolved "https://registry.yarnpkg.com/pacote/-/pacote-12.0.3.tgz#b6f25868deb810e7e0ddf001be88da2bcaca57c7" + integrity sha512-CdYEl03JDrRO3x18uHjBYA9TyoW8gy+ThVcypcDkxPtKlw76e4ejhYB6i9lJ+/cebbjpqPW/CijjqxwDTts8Ow== dependencies: - "@npmcli/git" "^2.0.1" + "@npmcli/git" "^2.1.0" "@npmcli/installed-package-contents" "^1.0.6" "@npmcli/promise-spawn" "^1.2.0" - "@npmcli/run-script" "^1.8.2" + "@npmcli/run-script" "^2.0.0" cacache "^15.0.5" chownr "^2.0.0" fs-minipass "^2.1.0" @@ -13748,39 +14438,41 @@ pacote@^11.1.11, pacote@^11.2.6: minipass "^3.1.3" mkdirp "^1.0.3" npm-package-arg "^8.0.1" - npm-packlist "^2.1.4" + npm-packlist "^3.0.0" npm-pick-manifest "^6.0.0" - npm-registry-fetch "^9.0.0" + npm-registry-fetch "^12.0.0" promise-retry "^2.0.1" read-package-json-fast "^2.0.1" rimraf "^3.0.2" ssri "^8.0.1" tar "^6.1.0" -pacote@^11.3.5: - version "11.3.5" - resolved "https://registry.yarnpkg.com/pacote/-/pacote-11.3.5.tgz#73cf1fc3772b533f575e39efa96c50be8c3dc9d2" - integrity "sha1-c88fw3crUz9XXjnvqWxQvow9ydI= sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg==" +pacote@^13.0.3, pacote@^13.6.1: + version "13.6.2" + resolved "https://registry.yarnpkg.com/pacote/-/pacote-13.6.2.tgz#0d444ba3618ab3e5cd330b451c22967bbd0ca48a" + integrity sha512-Gu8fU3GsvOPkak2CkbojR7vjs3k3P9cA6uazKTHdsdV0gpCEQq2opelnEv30KRQWgVzP5Vd/5umjcedma3MKtg== dependencies: - "@npmcli/git" "^2.1.0" - "@npmcli/installed-package-contents" "^1.0.6" - "@npmcli/promise-spawn" "^1.2.0" - "@npmcli/run-script" "^1.8.2" - cacache "^15.0.5" + "@npmcli/git" "^3.0.0" + "@npmcli/installed-package-contents" "^1.0.7" + "@npmcli/promise-spawn" "^3.0.0" + "@npmcli/run-script" "^4.1.0" + cacache "^16.0.0" chownr "^2.0.0" fs-minipass "^2.1.0" infer-owner "^1.0.4" - minipass "^3.1.3" - mkdirp "^1.0.3" - npm-package-arg "^8.0.1" - npm-packlist "^2.1.4" - npm-pick-manifest "^6.0.0" - npm-registry-fetch "^11.0.0" + minipass "^3.1.6" + mkdirp "^1.0.4" + npm-package-arg "^9.0.0" + npm-packlist "^5.1.0" + npm-pick-manifest "^7.0.0" + npm-registry-fetch "^13.0.1" + proc-log "^2.0.0" promise-retry "^2.0.1" - read-package-json-fast "^2.0.1" + read-package-json "^5.0.0" + read-package-json-fast "^2.0.3" rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.1.0" + ssri "^9.0.0" + tar "^6.1.11" pako@~1.0.5: version "1.0.11" @@ -13830,19 +14522,14 @@ parse-asn1@^5.0.0: pbkdf2 "^3.0.3" safe-buffer "^5.1.1" -parse-conflict-json@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/parse-conflict-json/-/parse-conflict-json-1.1.1.tgz#54ec175bde0f2d70abf6be79e0e042290b86701b" - integrity sha512-4gySviBiW5TRl7XHvp1agcS7SOe0KZOjC//71dzZVWJrY9hCrgtvl5v3SyIxCZ4fZF47TxD9nfzmxcx76xmbUw== +parse-conflict-json@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/parse-conflict-json/-/parse-conflict-json-2.0.2.tgz#3d05bc8ffe07d39600dc6436c6aefe382033d323" + integrity sha512-jDbRGb00TAPFsKWCpZZOT93SxVP9nONOSgES3AevqRq/CHvavEBvKAjxX9p5Y5F0RZLxH9Ufd9+RwtCsa+lFDA== dependencies: - json-parse-even-better-errors "^2.3.0" - just-diff "^3.0.1" - just-diff-apply "^3.0.0" - -parse-github-repo-url@^1.3.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz#9e7d8bb252a6cb6ba42595060b7bf6df3dbc1f50" - integrity sha1-nn2LslKmy2ukJZUGC3v23z28H1A= + json-parse-even-better-errors "^2.3.1" + just-diff "^5.0.1" + just-diff-apply "^5.2.0" parse-json@^2.2.0: version "2.2.0" @@ -13869,25 +14556,19 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" -parse-path@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-4.0.3.tgz#82d81ec3e071dcc4ab49aa9f2c9c0b8966bb22bf" - integrity sha512-9Cepbp2asKnWTJ9x2kpw6Fe8y9JDbqwahGCTvklzd/cEq5C5JC59x2Xb0Kx+x0QZ8bvNquGO8/BWP0cwBHzSAA== +parse-path@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-7.0.0.tgz#605a2d58d0a749c8594405d8cc3a2bf76d16099b" + integrity sha512-Euf9GG8WT9CdqwuWJGdf3RkUcTBArppHABkO7Lm8IzRQp0e2r/kkFnmhu4TSK30Wcu5rVAZLmfPKSBBi9tWFog== dependencies: - is-ssh "^1.3.0" - protocols "^1.4.0" - qs "^6.9.4" - query-string "^6.13.8" + protocols "^2.0.0" -parse-url@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/parse-url/-/parse-url-5.0.2.tgz#856a3be1fcdf78dc93fc8b3791f169072d898b59" - integrity sha512-Czj+GIit4cdWtxo3ISZCvLiUjErSo0iI3wJ+q9Oi3QuMYTI6OZu+7cewMWZ+C1YAnKhYTk6/TLuhIgCypLthPA== +parse-url@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/parse-url/-/parse-url-8.1.0.tgz#972e0827ed4b57fc85f0ea6b0d839f0d8a57a57d" + integrity sha512-xDvOoLU5XRrcOZvnI6b8zA6n9O9ejNk/GExuz1yBuWUGn9KA97GI6HTs6u02wKara1CeVmZhH+0TZFdWScR89w== dependencies: - is-ssh "^1.3.0" - normalize-url "^3.3.0" - parse-path "^4.0.0" - protocols "^1.4.0" + parse-path "^7.0.0" parse5@5.1.1: version "5.1.1" @@ -13930,13 +14611,6 @@ path-dirname@^1.0.0: resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - integrity sha1-D+tsZPD8UY2adU3V77YscCJ2H0s= - dependencies: - pinkie-promise "^2.0.0" - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -13989,15 +14663,6 @@ path-to-regexp@^1.7.0: dependencies: isarray "0.0.1" -path-type@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" - integrity sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE= - dependencies: - graceful-fs "^4.1.2" - pify "^2.0.0" - pinkie-promise "^2.0.0" - path-type@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" @@ -14939,6 +15604,11 @@ proc-log@^1.0.0: resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-1.0.0.tgz#0d927307401f69ed79341e83a0b2c9a13395eb77" integrity "sha1-DZJzB0Afae15NB6DoLLJoTOV63c= sha512-aCk8AO51s+4JyuYGg3Q/a6gnrlDO09NpVWePtjp7xwphcoQ04x5WAfCyugcsbLooWcMJ87CLkD4+604IckEdhg==" +proc-log@^2.0.0, proc-log@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-2.0.1.tgz#8f3f69a1f608de27878f91f5c688b225391cb685" + integrity sha512-Kcmo2FhfDTXdcbfDH76N7uBYHINxc/8GW7UAVuVP9I+Va3uHSerrnKV6dLooga/gh7GlgzuCCr/eoldnL1muGw== + process-nextick-args@^2.0.0, process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" @@ -15048,10 +15718,10 @@ protobufjs@6.9.0: "@types/node" "^13.7.0" long "^4.0.0" -protocols@^1.1.0, protocols@^1.4.0: - version "1.4.8" - resolved "https://registry.yarnpkg.com/protocols/-/protocols-1.4.8.tgz#48eea2d8f58d9644a4a32caae5d5db290a075ce8" - integrity sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg== +protocols@^2.0.0, protocols@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/protocols/-/protocols-2.0.1.tgz#8f155da3fc0f32644e83c5782c8e8212ccf70a86" + integrity sha512-/XJ368cyBJ7fzLMwLKv1e4vLxOju2MNAIokcr7meSaNcVbWz/CPcW22cP04mwxOErdA5mwjA8Q6w/cdAQxVn7Q== proxy-addr@~2.0.5: version "2.0.6" @@ -15069,6 +15739,11 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" +proxy-from-env@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" + integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -15177,13 +15852,6 @@ qs@6.9.7: resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.7.tgz#4610846871485e1e048f44ae3b94033f0e675afe" integrity sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw== -qs@^6.9.4: - version "6.10.1" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.1.tgz#4931482fa8d647a5aab799c5271d2133b981fb6a" - integrity sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg== - dependencies: - side-channel "^1.0.4" - qs@~6.5.2: version "6.5.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" @@ -15197,16 +15865,6 @@ query-string@^4.1.0: object-assign "^4.1.0" strict-uri-encode "^1.0.0" -query-string@^6.13.8: - version "6.14.1" - resolved "https://registry.yarnpkg.com/query-string/-/query-string-6.14.1.tgz#7ac2dca46da7f309449ba0f86b1fd28255b0c86a" - integrity sha512-XDxAeVmpfu1/6IjyT/gXHOl+S0vQ9owggJ30hhWKdHAsNPOcasn5o9BW0eejZqL2e4vMjhAxoW3jVHcD6mbcYw== - dependencies: - decode-uri-component "^0.2.0" - filter-obj "^1.1.0" - split-on-first "^1.0.0" - strict-uri-encode "^2.0.0" - querystring-es3@^0.2.0, querystring-es3@~0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" @@ -15448,10 +16106,10 @@ react@^17.0.1: loose-envify "^1.1.0" object-assign "^4.1.1" -read-cmd-shim@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-2.0.0.tgz#4a50a71d6f0965364938e9038476f7eede3928d9" - integrity sha512-HJpV9bQpkl6KwjxlJcBoqu9Ba0PQg8TqSNIOrulGt54a0uup0HtevreFHzYzkm0lpnleRdNBzXznKrgxglEHQw== +read-cmd-shim@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-3.0.1.tgz#868c235ec59d1de2db69e11aec885bc095aea087" + integrity sha512-kEmDUoYf/CDy8yZbLTmhB1X9kkjf9Q80PCNsDMb7ufrGd6zZSQA1+UyjrO+pZm5K/S4OXCWJeiIt1JA8kAsa6g== read-only-stream@^2.0.0: version "2.0.0" @@ -15468,42 +16126,23 @@ read-package-json-fast@^2.0.1, read-package-json-fast@^2.0.2: json-parse-even-better-errors "^2.3.0" npm-normalize-package-bin "^1.0.1" -read-package-json@^2.0.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-2.1.2.tgz#6992b2b66c7177259feb8eaac73c3acd28b9222a" - integrity sha512-D1KmuLQr6ZSJS0tW8hf3WGpRlwszJOXZ3E8Yd/DNRaM5d+1wVRZdHlpGBLAuovjr28LbWvjpWkBHMxpRGGjzNA== - dependencies: - glob "^7.1.1" - json-parse-even-better-errors "^2.3.0" - normalize-package-data "^2.0.0" - npm-normalize-package-bin "^1.0.0" - -read-package-json@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-3.0.1.tgz#c7108f0b9390257b08c21e3004d2404c806744b9" - integrity sha512-aLcPqxovhJTVJcsnROuuzQvv6oziQx4zd3JvG0vGCL5MjTONUc4uJ90zCBC6R7W7oUKBNoR/F8pkyfVwlbxqng== +read-package-json-fast@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/read-package-json-fast/-/read-package-json-fast-2.0.3.tgz#323ca529630da82cb34b36cc0b996693c98c2b83" + integrity sha512-W/BKtbL+dUjTuRL2vziuYhp76s5HZ9qQhd/dKfWIZveD0O40453QNyZhC0e63lqZrAQ4jiOapVoeJ7JrszenQQ== dependencies: - glob "^7.1.1" json-parse-even-better-errors "^2.3.0" - normalize-package-data "^3.0.0" - npm-normalize-package-bin "^1.0.0" - -read-package-tree@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/read-package-tree/-/read-package-tree-5.3.1.tgz#a32cb64c7f31eb8a6f31ef06f9cedf74068fe636" - integrity sha512-mLUDsD5JVtlZxjSlPPx1RETkNjjvQYuweKwNVt1Sn8kP5Jh44pvYuUHCp6xSVDZWbNxVxG5lyZJ921aJH61sTw== - dependencies: - read-package-json "^2.0.0" - readdir-scoped-modules "^1.0.0" - util-promisify "^2.1.0" + npm-normalize-package-bin "^1.0.1" -read-pkg-up@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" - integrity sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI= +read-package-json@^5.0.0, read-package-json@^5.0.1: + version "5.0.2" + resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-5.0.2.tgz#b8779ccfd169f523b67208a89cc912e3f663f3fa" + integrity sha512-BSzugrt4kQ/Z0krro8zhTwV1Kd79ue25IhNN/VtHFy1mG/6Tluyi+msc0UpwaoQzxSHa28mntAjIZY6kEgfR9Q== dependencies: - find-up "^1.0.0" - read-pkg "^1.0.0" + glob "^8.0.1" + json-parse-even-better-errors "^2.3.1" + normalize-package-data "^4.0.0" + npm-normalize-package-bin "^2.0.0" read-pkg-up@^2.0.0: version "2.0.0" @@ -15530,15 +16169,6 @@ read-pkg-up@^7.0.1: read-pkg "^5.2.0" type-fest "^0.8.1" -read-pkg@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" - integrity sha1-9f+qXs0pyzHAR0vKfXVra7KePyg= - dependencies: - load-json-file "^1.0.0" - normalize-package-data "^2.3.2" - path-type "^1.0.0" - read-pkg@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" @@ -15567,14 +16197,14 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -read@1, read@~1.0.1: +read@1, read@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" integrity sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ= dependencies: mute-stream "~0.0.4" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6: +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -15587,7 +16217,7 @@ read@1, read@~1.0.1: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.0.2, readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0: +readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.0.2, readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -15606,7 +16236,7 @@ readable-stream@~1.0.31: isarray "0.0.1" string_decoder "~0.10.x" -readdir-scoped-modules@^1.0.0, readdir-scoped-modules@^1.1.0: +readdir-scoped-modules@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz#8d45407b4f870a0dcaebc0e28670d18e74514309" integrity sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw== @@ -15653,14 +16283,6 @@ recursive-readdir@2.2.2: dependencies: minimatch "3.0.4" -redent@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" - integrity sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94= - dependencies: - indent-string "^2.1.0" - strip-indent "^1.0.1" - redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -15791,13 +16413,6 @@ repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= -repeating@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" - integrity sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo= - dependencies: - is-finite "^1.0.0" - replace-ext@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" @@ -15826,7 +16441,7 @@ request-promise-native@^1.0.8: stealthy-require "^1.1.1" tough-cookie "^2.3.3" -request@^2.88.0, request@^2.88.2: +request@^2.88.2: version "2.88.2" resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== @@ -16132,7 +16747,7 @@ rxjs-compat@6.5.4: resolved "https://registry.yarnpkg.com/rxjs-compat/-/rxjs-compat-6.5.4.tgz#03825692af3fe363e04c43f41ff4113d76bbd305" integrity sha512-rkn+lbOHUQOurdd74J/hjmDsG9nFx0z66fvnbs8M95nrtKvNqCKdk7iZqdY51CGmDemTQk+kUPy4s8HVOHtkfA== -rxjs@>=6.4.0, rxjs@^6.6.0: +rxjs@>=6.4.0: version "6.6.3" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.3.tgz#8ca84635c4daa900c0d3967a6ee7ac60271ee552" integrity sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ== @@ -16160,12 +16775,19 @@ rxjs@^6.5.5: dependencies: tslib "^1.9.0" +rxjs@^7.5.5: + version "7.8.0" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4" + integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg== + dependencies: + tslib "^2.1.0" + safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -16336,7 +16958,7 @@ semver-compare@^1.0.0: resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" integrity sha1-De4hahyUGrN+nvsXiPavxf9VN/w= -"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.1: +"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -16346,6 +16968,13 @@ semver@7.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== +semver@7.3.4, semver@7.x, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2: + version "7.3.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" + integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== + dependencies: + lru-cache "^6.0.0" + semver@7.3.5, semver@^7.1.1, semver@^7.3.4, semver@^7.3.5: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" @@ -16353,18 +16982,18 @@ semver@7.3.5, semver@^7.1.1, semver@^7.3.4, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -semver@7.x, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2: - version "7.3.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" - integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== - dependencies: - lru-cache "^6.0.0" - semver@^6.0.0, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^7.0.0, semver@^7.3.7: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" @@ -16450,7 +17079,7 @@ serve-static@1.14.2: parseurl "~1.3.3" send "0.17.2" -set-blocking@^2.0.0, set-blocking@~2.0.0: +set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= @@ -16544,7 +17173,7 @@ shell-quote@1.7.2, shell-quote@^1.6.1: resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== -shelljs@^0.8.3, shelljs@^0.8.4: +shelljs@^0.8.4: version "0.8.5" resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" integrity "sha1-3gVUCNg2G+1mxmnS8ABTjO2O4gw= sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==" @@ -16572,6 +17201,11 @@ signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== +signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + simple-concat@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.0.tgz#7344cbb8b6e26fb27d66b2fc86f9f6d5997521c6" @@ -16627,16 +17261,16 @@ slice-ansi@^4.0.0: astral-regex "^2.0.0" is-fullwidth-code-point "^3.0.0" -slide@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" - integrity sha1-VusCfWW00tzmyy4tMsTUr8nh1wc= - smart-buffer@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.1.0.tgz#91605c25d91652f4661ea69ccf45f1b331ca21ba" integrity sha512-iVICrxOzCynf/SNaBQCw34eM9jROU/s5rzIhpOvzhzuYHfJR/DhZfDkXiZSgKXfgv26HT3Yni3AV/DGw0cGnnw== +smart-buffer@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" + integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== + snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" @@ -16720,15 +17354,6 @@ sockjs@0.3.20: uuid "^3.4.0" websocket-driver "0.6.5" -socks-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-5.0.0.tgz#7c0f364e7b1cf4a7a437e71253bed72e9004be60" - integrity sha512-lEpa1zsWCChxiynk+lCycKuC502RxDWLKJZoIhnxrWNjLSDGYRFflHA1/228VkRcnv9TIb8w98derGbpKxJRgA== - dependencies: - agent-base "6" - debug "4" - socks "^2.3.3" - socks-proxy-agent@^6.0.0: version "6.1.1" resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.1.1.tgz#e664e8f1aaf4e1fb3df945f09e3d94f911137f87" @@ -16738,13 +17363,14 @@ socks-proxy-agent@^6.0.0: debug "^4.3.1" socks "^2.6.1" -socks@^2.3.3: - version "2.6.0" - resolved "https://registry.yarnpkg.com/socks/-/socks-2.6.0.tgz#6b984928461d39871b3666754b9000ecf39dfac2" - integrity sha512-mNmr9owlinMplev0Wd7UHFlqI4ofnBnNzFuzrm63PPaHgbkqCFe4T5LzwKmtQ/f2tX0NTpcdVLyD/FHxFBstYw== +socks-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz#dc069ecf34436621acb41e3efa66ca1b5fed15b6" + integrity sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww== dependencies: - ip "^1.1.5" - smart-buffer "^4.1.0" + agent-base "^6.0.2" + debug "^4.3.3" + socks "^2.6.2" socks@^2.6.1: version "2.6.1" @@ -16754,6 +17380,14 @@ socks@^2.6.1: ip "^1.1.5" smart-buffer "^4.1.0" +socks@^2.6.2: + version "2.7.1" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55" + integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ== + dependencies: + ip "^2.0.0" + smart-buffer "^4.2.0" + sodium-native@3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/sodium-native/-/sodium-native-3.2.0.tgz#68a9469b96edadffef320cbce51294ad5f72a37f" @@ -16897,11 +17531,6 @@ spdy@^4.0.2: select-hose "^2.0.0" spdy-transport "^3.0.0" -split-on-first@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f" - integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw== - split-string@^3.0.1, split-string@^3.0.2: version "3.1.0" resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" @@ -16964,6 +17593,13 @@ ssri@^8.0.0, ssri@^8.0.1: dependencies: minipass "^3.1.1" +ssri@^9.0.0, ssri@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057" + integrity sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q== + dependencies: + minipass "^3.1.1" + stable@^0.1.8: version "0.1.8" resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" @@ -17100,11 +17736,6 @@ strict-uri-encode@^1.0.0: resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= -strict-uri-encode@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz#b9c7330c7042862f6b142dc274bbcc5866ce3546" - integrity sha1-ucczDHBChi9rFC3CdLvMWGbONUY= - string-argv@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.1.tgz#95e2fbec0427ae19184935f816d74aaa4c5c19da" @@ -17132,7 +17763,16 @@ string-width@^1.0.1: is-fullwidth-code-point "^1.0.0" strip-ansi "^3.0.0" -"string-width@^1.0.2 || 2", string-width@^2.1.1: +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== @@ -17158,15 +17798,6 @@ string-width@^4.0.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" -string-width@^4.1.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" @@ -17368,13 +17999,6 @@ strip-final-newline@^2.0.0: resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== -strip-indent@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" - integrity sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI= - dependencies: - get-stdin "^4.0.1" - strip-indent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" @@ -17563,6 +18187,17 @@ tar-stream@^2.0.0: inherits "^2.0.3" readable-stream "^3.1.1" +tar-stream@~2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + tar@6.1.11, tar@^6.0.2, tar@^6.1.0: version "6.1.11" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" @@ -17575,35 +18210,23 @@ tar@6.1.11, tar@^6.0.2, tar@^6.1.0: mkdirp "^1.0.3" yallist "^4.0.0" -tar@^4.4.12: - version "4.4.19" - resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" - integrity "sha1-Lk1yY98m8rkU3uEMglqxMhI3QvM= sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA==" +tar@^6.1.11, tar@^6.1.2: + version "6.1.13" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b" + integrity sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw== dependencies: - chownr "^1.1.4" - fs-minipass "^1.2.7" - minipass "^2.9.0" - minizlib "^1.3.3" - mkdirp "^0.5.5" - safe-buffer "^5.2.1" - yallist "^3.1.1" + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^4.0.0" + minizlib "^2.1.1" + mkdirp "^1.0.3" + yallist "^4.0.0" temp-dir@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0= -temp-write@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/temp-write/-/temp-write-4.0.0.tgz#cd2e0825fc826ae72d201dc26eef3bf7e6fc9320" - integrity sha512-HIeWmj77uOOHb0QX7siN3OtwV3CTntquin6TNVg6SHOqCP3hYKmox90eeFOGaY1MqJ9WYDDjkyZrW6qS5AWpbw== - dependencies: - graceful-fs "^4.1.15" - is-stream "^2.0.0" - make-dir "^3.0.0" - temp-dir "^1.0.0" - uuid "^3.3.2" - tempy@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.3.0.tgz#6f6c5b295695a16130996ad5ab01a8bd726e8bf8" @@ -17878,10 +18501,10 @@ treeverse@^1.0.4: resolved "https://registry.yarnpkg.com/treeverse/-/treeverse-1.0.4.tgz#a6b0ebf98a1bca6846ddc7ecbc900df08cb9cd5f" integrity sha512-whw60l7r+8ZU8Tu/Uc2yxtc4ZTZbR/PF3u1IPNKGQ6p8EICLb3Z2lAgoqw9bqYd8IkgnsaOcLzYHFckjqNsf0g== -trim-newlines@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" - integrity sha1-WIeWa7WCpFA6QetST301ARgVphM= +treeverse@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/treeverse/-/treeverse-2.0.0.tgz#036dcef04bc3fd79a9b79a68d4da03e882d8a9ca" + integrity sha512-N5gJCkLu1aXccpOTtqV6ddSEi6ZmGkh3hjmbu1IjcavJK4qyOVQmi0myQKM7z5jVGmD68SJoliaVrMmVObhj6A== trim-newlines@^3.0.0: version "3.0.1" @@ -17961,6 +18584,15 @@ tsconfig-paths@3.9.0, tsconfig-paths@^3.9.0: minimist "^1.2.0" strip-bom "^3.0.0" +tsconfig-paths@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.1.2.tgz#4819f861eef82e6da52fb4af1e8c930a39ed979a" + integrity sha512-uhxiMgnXQp1IR622dUXI+9Ehnws7i/y6xvpZB9IbUVOPy0muvdvgXeZOn88UcGPiT98Vp3rJPTa8bFoalZ3Qhw== + dependencies: + json5 "^2.2.2" + minimist "^1.2.6" + strip-bom "^3.0.0" + tslib@1.14.1, tslib@^1.9.0: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" @@ -17986,6 +18618,11 @@ tslib@^2.0.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== +tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.4.1: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.0.tgz#42bfed86f5787aeb41d031866c8f402429e0fddf" + integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg== + tslib@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" @@ -18124,6 +18761,11 @@ typescript@4.2.3: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.2.3.tgz#39062d8019912d43726298f09493d598048c1ce3" integrity sha512-qOcYwxaByStAWrBf4x0fibwZvMRG+r4cQoTjbPtUlrWjBHbmCAww1i448U0GJ+3cNNEtebDteo/cHOR3xJ4wEw== +"typescript@^3 || ^4": + version "4.9.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" + integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== + typescript@~4.1.2: version "4.1.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.3.tgz#519d582bd94cba0cf8934c7d8e8467e473f53bb7" @@ -18134,16 +18776,6 @@ uglify-js@^3.1.4: resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.2.tgz#fe10319861bccc8682bfe2e8151fbdd8aa921c44" integrity sha512-SbMu4D2Vo95LMC/MetNaso1194M1htEA+JrqE9Hk+G2DhI+itfS9TRu9ZKeCahLDNa/J3n4MqUJ/fOHMzQpRWw== -uid-number@0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" - integrity sha1-DqEOgDXo61uOREnwbaHHMGY7qoE= - -umask@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/umask/-/umask-1.1.0.tgz#f29cebf01df517912bb58ff9c4e50fde8e33320d" - integrity sha1-8pzr8B31F5ErtY/5xOUP3o4zMg0= - umd@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/umd/-/umd-3.0.3.tgz#aa9fe653c42b9097678489c01000acb69f0b26cf" @@ -18225,6 +18857,13 @@ unique-filename@^1.1.1: dependencies: unique-slug "^2.0.0" +unique-filename@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-2.0.1.tgz#e785f8675a9a7589e0ac77e0b5c34d2eaeac6da2" + integrity sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A== + dependencies: + unique-slug "^3.0.0" + unique-slug@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" @@ -18232,6 +18871,13 @@ unique-slug@^2.0.0: dependencies: imurmurhash "^0.1.4" +unique-slug@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-3.0.0.tgz#6d347cf57c8a7a7a6044aabd0e2d74e4d76dc7c9" + integrity sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w== + dependencies: + imurmurhash "^0.1.4" + unique-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" @@ -18339,13 +18985,6 @@ util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -util-promisify@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/util-promisify/-/util-promisify-2.1.0.tgz#3c2236476c4d32c5ff3c47002add7c13b9a82a53" - integrity sha1-PCI2R2xNMsX/PEcAKt18E7moKlM= - dependencies: - object.getownpropertydescriptors "^2.0.3" - util.promisify@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" @@ -18405,11 +19044,16 @@ uuid@^3.3.2, uuid@^3.4.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.3.0: +uuid@^8.3.0, uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +v8-compile-cache@2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + v8-compile-cache@^2.0.3: version "2.1.1" resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745" @@ -18439,6 +19083,13 @@ validate-npm-package-name@^3.0.0: dependencies: builtins "^1.0.3" +validate-npm-package-name@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-4.0.0.tgz#fe8f1c50ac20afdb86f177da85b3600f0ac0d747" + integrity sha512-mzR0L8ZDktZjpX4OB46KT+56MAhl4EIazWP/+G/HPGuvfdaqg4YsCdtOm6U9+LOFyYDoh4dpnpxZRB9MQQns5Q== + dependencies: + builtins "^5.0.0" + validator@13.7.0: version "13.7.0" resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857" @@ -18565,10 +19216,10 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" -wcwidth@^1.0.0: +wcwidth@^1.0.0, wcwidth@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== dependencies: defaults "^1.0.3" @@ -18752,15 +19403,6 @@ whatwg-url@^8.0.0: tr46 "^2.0.2" webidl-conversions "^5.0.0" -whatwg-url@^8.4.0: - version "8.5.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.5.0.tgz#7752b8464fc0903fec89aa9846fc9efe07351fd3" - integrity sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg== - dependencies: - lodash "^4.7.0" - tr46 "^2.0.2" - webidl-conversions "^6.1.0" - which-boxed-primitive@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" @@ -18799,12 +19441,12 @@ which@^2.0.1, which@^2.0.2: dependencies: isexe "^2.0.0" -wide-align@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" - integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== +wide-align@^1.1.2, wide-align@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" + integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== dependencies: - string-width "^1.0.2 || 2" + string-width "^1.0.2 || 2 || 3 || 4" widest-line@^3.1.0: version "3.1.0" @@ -19039,7 +19681,7 @@ write-file-atomic@^2.4.2: imurmurhash "^0.1.4" signal-exit "^3.0.2" -write-file-atomic@^3.0.0, write-file-atomic@^3.0.3: +write-file-atomic@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== @@ -19049,6 +19691,14 @@ write-file-atomic@^3.0.0, write-file-atomic@^3.0.3: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" +write-file-atomic@^4.0.0, write-file-atomic@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + write-json-file@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/write-json-file/-/write-json-file-3.2.0.tgz#65bbdc9ecd8a1458e15952770ccbadfcff5fe62a" @@ -19131,7 +19781,7 @@ y18n@^5.0.5: resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== -yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: +yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== @@ -19161,6 +19811,11 @@ yargs-parser@20.x, yargs-parser@^20.2.3: resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== +yargs-parser@21.1.1, yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + yargs-parser@^13.1.2: version "13.1.2" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" @@ -19223,6 +19878,19 @@ yargs@^16.1.0, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.6.2: + version "17.6.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.2.tgz#2e23f2944e976339a1ee00f18c77fedee8332541" + integrity sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yarn@1.22.17: version "1.22.17" resolved "https://registry.yarnpkg.com/yarn/-/yarn-1.22.17.tgz#bf910747d22497b573131f7341c0e1d15c74036c" @@ -19236,13 +19904,13 @@ yauzl@^2.10.0: buffer-crc32 "~0.2.3" fd-slicer "~1.1.0" -yeoman-environment@3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/yeoman-environment/-/yeoman-environment-3.3.0.tgz#bfb1f5bc1338e09e77e621f49762c61b1934ac65" - integrity sha512-2OV2hgRoLjkQrtNIfaTejinMHR5yjJ4DF/aG1Le/qnzHRAsE6gfFm9YL2Sq5FW5l16XSmt7BCMQlcDVyPTxpSg== +yeoman-environment@3.13.0: + version "3.13.0" + resolved "https://registry.yarnpkg.com/yeoman-environment/-/yeoman-environment-3.13.0.tgz#9db29f47352cb4a38eb0ef830a86091be3fd7240" + integrity sha512-eBPpBZCvFzx6yk17x+ZrOHp8ADDv6qHradV+SgdugaQKIy9NjEX5AkbwdTHLOgccSTkQ9rN791xvYOu6OmqjBg== dependencies: - "@npmcli/arborist" "^2.2.2" - are-we-there-yet "^1.1.5" + "@npmcli/arborist" "^4.0.4" + are-we-there-yet "^2.0.0" arrify "^2.0.1" binaryextensions "^4.15.0" chalk "^4.1.0" @@ -19259,14 +19927,16 @@ yeoman-environment@3.3.0: grouped-queue "^2.0.0" inquirer "^8.0.0" is-scoped "^2.1.0" + isbinaryfile "^4.0.10" lodash "^4.17.10" log-symbols "^4.0.0" mem-fs "^1.2.0 || ^2.0.0" mem-fs-editor "^8.1.2 || ^9.0.0" minimatch "^3.0.4" - npmlog "^4.1.2" + npmlog "^5.0.1" p-queue "^6.6.2" - pacote "^11.2.6" + p-transform "^1.3.0" + pacote "^12.0.2" preferred-pm "^3.0.3" pretty-bytes "^5.3.0" semver "^7.1.3" From 494095d99aef68b751ae85f2c01991bde10c10ea Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Tue, 2 May 2023 19:50:39 +0200 Subject: [PATCH 024/170] Missing unit tests for TokenMethod (#8406) * :white_check_mark: Adds unit test for isTokenIDAvailable * :white_check_mark: :recycle: Rephrases unit test case for supply exceeding maximum range allowed for burn method * :white_check_mark: Tests event emission for initializeToken method * :white_check_mark: Adds introspective matchers for calls SupportedTokensStore * :recycle: Removes redundant event checks in case of error --- .../test/unit/modules/token/method.spec.ts | 80 ++++++++++++++----- 1 file changed, 62 insertions(+), 18 deletions(-) diff --git a/framework/test/unit/modules/token/method.spec.ts b/framework/test/unit/modules/token/method.spec.ts index b2dd823f555..b964f5bda5c 100644 --- a/framework/test/unit/modules/token/method.spec.ts +++ b/framework/test/unit/modules/token/method.spec.ts @@ -237,22 +237,36 @@ describe('token module', () => { const tokenID = Buffer.concat([ownChainID, Buffer.alloc(4, 255)]); it('should reject if token is not native', async () => { - await expect( - method.initializeToken(methodContext, Buffer.from([2, 0, 0, 0, 0, 0, 0, 0])), - ).rejects.toThrow('Only native token can be initialized'); + try { + await method.initializeToken(methodContext, Buffer.from([2, 0, 0, 0, 0, 0, 0, 0])); + } catch (e: any) { + expect(e.message).toBe('Only native token can be initialized.'); + checkEventResult( + methodContext.eventQueue, + InitializeTokenEvent, + TokenEventResult.TOKEN_ID_NOT_NATIVE, + ); + } }); it('should reject if there is no available local ID', async () => { - const supplyStore = tokenModule.stores.get(SupplyStore); - await supplyStore.set(methodContext, tokenID, { - totalSupply: defaultTotalSupply, - }); - await expect(method.initializeToken(methodContext, tokenID)).rejects.toThrow( - 'The specified token ID is not available', - ); - }); - - it('log initialize token event', async () => { + try { + const supplyStore = tokenModule.stores.get(SupplyStore); + await supplyStore.set(methodContext, tokenID, { + totalSupply: defaultTotalSupply, + }); + await method.initializeToken(methodContext, tokenID); + } catch (e: any) { + expect(e.message).toBe('The specified token ID is not available.'); + checkEventResult( + methodContext.eventQueue, + InitializeTokenEvent, + TokenEventResult.TOKEN_ID_NOT_AVAILABLE, + ); + } + }); + + it('logs initialize token event', async () => { await method.initializeToken(methodContext, tokenID); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); checkEventResult(methodContext.eventQueue, InitializeTokenEvent, TokenEventResult.SUCCESSFUL); @@ -291,7 +305,7 @@ describe('token module', () => { ); }); - it('should reject if supply exceed max balance', async () => { + it('should reject if supply exceeds maximum range allowed', async () => { await expect( method.mint( methodContext, @@ -1132,8 +1146,10 @@ describe('token module', () => { describe('supportAllTokens', () => { it('should call support all token', async () => { + const supportAllSpy = jest.spyOn(tokenModule.stores.get(SupportedTokensStore), 'supportAll'); await expect(method.supportAllTokens(methodContext)).resolves.toBeUndefined(); + expect(supportAllSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new AllTokensSupportedEvent('token').name, @@ -1143,9 +1159,13 @@ describe('token module', () => { describe('removeAllTokensSupport', () => { it('should call remove support all token', async () => { - await tokenModule.stores.get(SupportedTokensStore).supportAll(methodContext); + const supportedTokensStore = tokenModule.stores.get(SupportedTokensStore); + + await supportedTokensStore.supportAll(methodContext); + const removeAllSpy = jest.spyOn(supportedTokensStore, 'removeAll'); await expect(method.removeAllTokensSupport(methodContext)).resolves.toBeUndefined(); + expect(removeAllSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new AllTokensSupportRemovedEvent('token').name, @@ -1155,10 +1175,15 @@ describe('token module', () => { describe('supportAllTokensFromChainID', () => { it('should call support chain', async () => { + const supportChainSpy = jest.spyOn( + tokenModule.stores.get(SupportedTokensStore), + 'supportChain', + ); await expect( method.supportAllTokensFromChainID(methodContext, Buffer.from([1, 2, 3, 4])), ).resolves.toBeUndefined(); + expect(supportChainSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new AllTokensFromChainSupportedEvent('token').name, @@ -1168,6 +1193,8 @@ describe('token module', () => { describe('removeAllTokensSupportFromChainID', () => { it('should call remove support from chain', async () => { + const supportedTokenStore = tokenModule.stores.get(SupportedTokensStore); + const removeSupportForChainSpy = jest.spyOn(supportedTokenStore, 'removeSupportForChain'); await tokenModule.stores .get(SupportedTokensStore) .supportChain(methodContext, Buffer.from([1, 2, 3, 4])); @@ -1175,6 +1202,7 @@ describe('token module', () => { method.removeAllTokensSupportFromChainID(methodContext, Buffer.from([1, 2, 3, 4])), ).resolves.toBeUndefined(); + expect(removeSupportForChainSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new AllTokensFromChainSupportRemovedEvent('token').name, @@ -1184,10 +1212,15 @@ describe('token module', () => { describe('supportTokenID', () => { it('should call support token', async () => { + const supportTokenSpy = jest.spyOn( + tokenModule.stores.get(SupportedTokensStore), + 'supportToken', + ); await expect( method.supportTokenID(methodContext, Buffer.from([1, 2, 3, 4, 0, 0, 0, 0])), ).resolves.toBeUndefined(); + expect(supportTokenSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new TokenIDSupportedEvent('token').name, @@ -1197,13 +1230,14 @@ describe('token module', () => { describe('removeSupportTokenID', () => { it('should call remove support for token', async () => { - await tokenModule.stores - .get(SupportedTokensStore) - .supportToken(methodContext, Buffer.from([1, 2, 3, 4, 0, 0, 0, 0])); + const supportedTokensStore = tokenModule.stores.get(SupportedTokensStore); + await supportedTokensStore.supportToken(methodContext, Buffer.from([1, 2, 3, 4, 0, 0, 0, 0])); + const removeSupportForTokenSpy = jest.spyOn(supportedTokensStore, 'removeSupportForToken'); await expect( method.removeSupportTokenID(methodContext, Buffer.from([1, 2, 3, 4, 0, 0, 0, 0])), ).resolves.toBeUndefined(); + expect(removeSupportForTokenSpy).toHaveBeenCalledOnce(); expect(methodContext.eventQueue.getEvents()).toHaveLength(1); expect(methodContext.eventQueue.getEvents()[0].toObject().name).toEqual( new TokenIDSupportRemovedEvent('token').name, @@ -1241,4 +1275,14 @@ describe('token module', () => { ).resolves.toBeTrue(); }); }); + + describe('isTokenIDAvailable', () => { + it('should return true if provided tokenID exists in SupplyStore', async () => { + await expect(method.isTokenSupported(methodContext, defaultTokenID)).resolves.toBeTrue(); + }); + + it('should return false if provided tokenID does not exist in SupplyStore', async () => { + await expect(method.isTokenSupported(methodContext, Buffer.alloc(8, 1))).resolves.toBeFalse(); + }); + }); }); From 950f6ad632b3ae0fc617d4428f99163b979a0755 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Thu, 4 May 2023 09:48:37 +0200 Subject: [PATCH 025/170] Add missing unit tests for the Validator Registration Command (#8402) * :white_check_mark: Add missing unit tests * Clean. up the tests --- ...ion.spec.ts => register_validator.spec.ts} | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) rename framework/test/unit/modules/pos/commands/{validator_registration.spec.ts => register_validator.spec.ts} (94%) diff --git a/framework/test/unit/modules/pos/commands/validator_registration.spec.ts b/framework/test/unit/modules/pos/commands/register_validator.spec.ts similarity index 94% rename from framework/test/unit/modules/pos/commands/validator_registration.spec.ts rename to framework/test/unit/modules/pos/commands/register_validator.spec.ts index 843b91fe36c..7d2986f62f9 100644 --- a/framework/test/unit/modules/pos/commands/validator_registration.spec.ts +++ b/framework/test/unit/modules/pos/commands/register_validator.spec.ts @@ -324,6 +324,37 @@ describe('Validator registration command', () => { expect(result.status).toBe(VerifyStatus.FAIL); expect(result.error?.message).toInclude('Insufficient transaction fee.'); }); + + it('should throw error if name is empty', async () => { + const invalidParams = codec.encode(validatorRegistrationCommandParamsSchema, { + ...transactionParams, + name: '', + }); + + const invalidTransaction = new Transaction({ + module: 'pos', + command: 'registerValidator', + senderPublicKey: publicKey, + nonce: BigInt(0), + fee: BigInt(100000000), + params: invalidParams, + signatures: [publicKey], + }); + + const context = testing + .createTransactionContext({ + transaction: invalidTransaction, + chainID, + }) + .createCommandVerifyContext( + validatorRegistrationCommandParamsSchema, + ); + + const result = await validatorRegistrationCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude("'name' is in an unsupported format: "); + }); }); describe('execute', () => { From 50c3a4b38de427565864963a4e575a218cc0a7d5 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Fri, 5 May 2023 15:41:58 +0200 Subject: [PATCH 026/170] Add missing unit tests for the pos methods (#8421) :white_check_mark: Add unit tests Add missing `getStaker ` and `getValidator` unit tests. Refactor the invalid address in the unit test --- framework/test/unit/modules/pos/method.spec.ts | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/framework/test/unit/modules/pos/method.spec.ts b/framework/test/unit/modules/pos/method.spec.ts index 1df961858dd..6cb92ec9124 100644 --- a/framework/test/unit/modules/pos/method.spec.ts +++ b/framework/test/unit/modules/pos/method.spec.ts @@ -41,6 +41,7 @@ describe('PoSMethod', () => { let validatorSubStore: ValidatorStore; let nameSubStore: NameStore; const address = utils.getRandomBytes(20); + const invalidAddress = Buffer.alloc(0); const stakerData = { stakes: [ { @@ -139,6 +140,14 @@ describe('PoSMethod', () => { expect(stakerDataReturned).toStrictEqual(stakerData); }); }); + + describe('when input address is invalid', () => { + it('should throw error', async () => { + await expect(posMethod.getStaker(methodContext, invalidAddress)).rejects.toThrow( + invalidAddress.toString('hex'), + ); + }); + }); }); describe('getValidator', () => { @@ -150,6 +159,14 @@ describe('PoSMethod', () => { expect(validatorDataReturned).toStrictEqual(validatorData); }); }); + + describe('when input address is invalid', () => { + it('should throw error', async () => { + await expect(posMethod.getValidator(methodContext, invalidAddress)).rejects.toThrow( + invalidAddress.toString('hex'), + ); + }); + }); }); describe('updateSharedRewards', () => { From 047498c29c0b137511c17011d7d4ddf4b8b6d722 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 8 May 2023 13:23:59 +0200 Subject: [PATCH 027/170] Add template files for NFT module (#8423) * :seedling: Add template files * :bug: Fix test template --- .../src/modules/nft/cc_commands/.gitkeep | 0 framework/src/modules/nft/cc_method.ts | 25 ++++++++ framework/src/modules/nft/commands/.gitkeep | 0 framework/src/modules/nft/constants.ts | 13 ++++ framework/src/modules/nft/endpoint.ts | 25 ++++++++ framework/src/modules/nft/events/.gitkeep | 0 framework/src/modules/nft/index.ts | 16 +++++ framework/src/modules/nft/internal_method.ts | 25 ++++++++ framework/src/modules/nft/method.ts | 37 ++++++++++++ framework/src/modules/nft/module.ts | 60 +++++++++++++++++++ framework/src/modules/nft/schemas.ts | 13 ++++ framework/src/modules/nft/stores/.gitkeep | 0 framework/src/modules/nft/types.ts | 39 ++++++++++++ .../test/unit/modules/nft/module.spec.ts | 18 ++++++ 14 files changed, 271 insertions(+) create mode 100644 framework/src/modules/nft/cc_commands/.gitkeep create mode 100644 framework/src/modules/nft/cc_method.ts create mode 100644 framework/src/modules/nft/commands/.gitkeep create mode 100644 framework/src/modules/nft/constants.ts create mode 100644 framework/src/modules/nft/endpoint.ts create mode 100644 framework/src/modules/nft/events/.gitkeep create mode 100644 framework/src/modules/nft/index.ts create mode 100644 framework/src/modules/nft/internal_method.ts create mode 100644 framework/src/modules/nft/method.ts create mode 100644 framework/src/modules/nft/module.ts create mode 100644 framework/src/modules/nft/schemas.ts create mode 100644 framework/src/modules/nft/stores/.gitkeep create mode 100644 framework/src/modules/nft/types.ts create mode 100644 framework/test/unit/modules/nft/module.spec.ts diff --git a/framework/src/modules/nft/cc_commands/.gitkeep b/framework/src/modules/nft/cc_commands/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/framework/src/modules/nft/cc_method.ts b/framework/src/modules/nft/cc_method.ts new file mode 100644 index 00000000000..91c38ea11ce --- /dev/null +++ b/framework/src/modules/nft/cc_method.ts @@ -0,0 +1,25 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCCMethod } from '../interoperability/base_cc_method'; +import { InteroperabilityMethod } from './types'; + +export class NFTInteroperableMethod extends BaseCCMethod { + // @ts-expect-error TODO: unused error. Remove when implementing. + private _interopMethod!: InteroperabilityMethod; + + public addDependencies(interoperabilityMethod: InteroperabilityMethod) { + this._interopMethod = interoperabilityMethod; + } +} diff --git a/framework/src/modules/nft/commands/.gitkeep b/framework/src/modules/nft/commands/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts new file mode 100644 index 00000000000..206ba71de27 --- /dev/null +++ b/framework/src/modules/nft/constants.ts @@ -0,0 +1,13 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts new file mode 100644 index 00000000000..aa2637fa295 --- /dev/null +++ b/framework/src/modules/nft/endpoint.ts @@ -0,0 +1,25 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { ModuleConfig } from './types'; +import { BaseEndpoint } from '../base_endpoint'; + +export class NFTEndpoint extends BaseEndpoint { + // @ts-expect-error TODO: unused error. Remove when implementing. + private _moduleConfig!: ModuleConfig; + + public init(moduleConfig: ModuleConfig) { + this._moduleConfig = moduleConfig; + } +} diff --git a/framework/src/modules/nft/events/.gitkeep b/framework/src/modules/nft/events/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/framework/src/modules/nft/index.ts b/framework/src/modules/nft/index.ts new file mode 100644 index 00000000000..14063d827fe --- /dev/null +++ b/framework/src/modules/nft/index.ts @@ -0,0 +1,16 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export { NFTModule } from './module'; +export { NFTMethod } from './method'; diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts new file mode 100644 index 00000000000..28c3ed9e09c --- /dev/null +++ b/framework/src/modules/nft/internal_method.ts @@ -0,0 +1,25 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from '../base_method'; +import { ModuleConfig } from './types'; + +export class InternalMethod extends BaseMethod { + // @ts-expect-error TODO: unused error. Remove when implementing. + private _config!: ModuleConfig; + + public init(config: ModuleConfig): void { + this._config = config; + } +} diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts new file mode 100644 index 00000000000..16b611a5766 --- /dev/null +++ b/framework/src/modules/nft/method.ts @@ -0,0 +1,37 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseMethod } from '../base_method'; +import { InteroperabilityMethod, ModuleConfig } from './types'; +import { InternalMethod } from './internal_method'; + +export class NFTMethod extends BaseMethod { + // @ts-expect-error TODO: unused error. Remove when implementing. + private _config!: ModuleConfig; + // @ts-expect-error TODO: unused error. Remove when implementing. + private _interoperabilityMethod!: InteroperabilityMethod; + // @ts-expect-error TODO: unused error. Remove when implementing. + private _internalMethod!: InternalMethod; + + public init(config: ModuleConfig): void { + this._config = config; + } + + public addDependencies( + interoperabilityMethod: InteroperabilityMethod, + internalMethod: InternalMethod, + ) { + this._interoperabilityMethod = interoperabilityMethod; + this._internalMethod = internalMethod; + } +} diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts new file mode 100644 index 00000000000..55e24c14fc8 --- /dev/null +++ b/framework/src/modules/nft/module.ts @@ -0,0 +1,60 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { GenesisBlockExecuteContext } from '../../state_machine'; +import { ModuleInitArgs, ModuleMetadata } from '../base_module'; +import { BaseInteroperableModule } from '../interoperability'; +import { InteroperabilityMethod } from '../token/types'; +import { NFTInteroperableMethod } from './cc_method'; +import { NFTEndpoint } from './endpoint'; +import { InternalMethod } from './internal_method'; +import { NFTMethod } from './method'; +import { FeeMethod } from './types'; + +export class NFTModule extends BaseInteroperableModule { + public method = new NFTMethod(this.stores, this.events); + public endpoint = new NFTEndpoint(this.stores, this.offchainStores); + public crossChainMethod = new NFTInteroperableMethod(this.stores, this.events); + + private readonly _internalMethod = new InternalMethod(this.stores, this.events); + // @ts-expect-error TODO: unused error. Remove when implementing. + private _interoperabilityMethod!: InteroperabilityMethod; + + public commands = []; + + // eslint-disable-next-line no-useless-constructor + public constructor() { + super(); + } + + public addDependencies(interoperabilityMethod: InteroperabilityMethod, _feeMethod: FeeMethod) { + this._interoperabilityMethod = interoperabilityMethod; + this.method.addDependencies(interoperabilityMethod, this._internalMethod); + this.crossChainMethod.addDependencies(interoperabilityMethod); + } + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [], + assets: [], + }; + } + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async init(_args: ModuleInitArgs) {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async initGenesisState(_context: GenesisBlockExecuteContext): Promise {} +} diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts new file mode 100644 index 00000000000..206ba71de27 --- /dev/null +++ b/framework/src/modules/nft/schemas.ts @@ -0,0 +1,13 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ diff --git a/framework/src/modules/nft/stores/.gitkeep b/framework/src/modules/nft/stores/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts new file mode 100644 index 00000000000..40fa051c2f8 --- /dev/null +++ b/framework/src/modules/nft/types.ts @@ -0,0 +1,39 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { MethodContext } from '../../state_machine'; +import { CCMsg } from '../interoperability'; + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface ModuleConfig {} + +export interface InteroperabilityMethod { + send( + methodContext: MethodContext, + feeAddress: Buffer, + module: string, + crossChainCommand: string, + receivingChainID: Buffer, + fee: bigint, + status: number, + parameters: Buffer, + timestamp?: number, + ): Promise; + error(methodContext: MethodContext, ccm: CCMsg, code: number): Promise; + terminateChain(methodContext: MethodContext, chainID: Buffer): Promise; +} + +export interface FeeMethod { + payFee(methodContext: MethodContext, amount: bigint): void; +} diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts new file mode 100644 index 00000000000..ccbc53346ba --- /dev/null +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +describe('nft module', () => { + it('should be implemented', () => { + expect(true).toBeTrue(); + }); +}); From d702f7d279e201478485d1bc80ac091e5a1b15ab Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 8 May 2023 13:54:30 +0200 Subject: [PATCH 028/170] Add template files for NFT module (#8423) * :seedling: Add template files * :bug: Fix test template From 672790dc5254e5066a2be564d44ed6a4f4c6c357 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 12 May 2023 18:06:24 +0200 Subject: [PATCH 029/170] Improve BFT tests (#8422) :white_check_mark: Improve BFT tests --- .../test/unit/engine/bft/bft_votes.spec.ts | 74 ++++++++++++++++--- framework/test/unit/engine/bft/method.spec.ts | 60 ++++++++++++++- framework/test/unit/engine/bft/utils.spec.ts | 9 +++ .../pos/commands/register_validator.spec.ts | 2 +- 4 files changed, 133 insertions(+), 12 deletions(-) diff --git a/framework/test/unit/engine/bft/bft_votes.spec.ts b/framework/test/unit/engine/bft/bft_votes.spec.ts index 2d3f905d831..7d2b14d0ef2 100644 --- a/framework/test/unit/engine/bft/bft_votes.spec.ts +++ b/framework/test/unit/engine/bft/bft_votes.spec.ts @@ -39,7 +39,7 @@ describe('BFT votes', () => { beforeEach(() => { accounts = [utils.getRandomBytes(20), utils.getRandomBytes(20), utils.getRandomBytes(20)]; bftVotes = { - maxHeightPrevoted: 103, + maxHeightPrevoted: 149, maxHeightPrecommitted: 56, maxHeightCertified: 5, blockBFTInfos: [ @@ -237,7 +237,60 @@ describe('BFT votes', () => { expect(paramsCache.getParameters).not.toHaveBeenCalled(); }); - it('should not stake on blocks if generator is not in the validators', async () => { + it('should vote on blocks with more than 1 BFT weight when validator holds more BFT weight', async () => { + const stateStore = new StateStore(new InMemoryDatabase()); + const paramsStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_PARAMETERS); + await paramsStore.setWithSchema( + utils.intToBuffer(101, 4), + { + prevoteThreshold: BigInt(68), + precommitThreshold: BigInt(68), + certificateThreshold: BigInt(68), + validators: [ + { + address: accounts[0], + bftWeight: BigInt(40), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + { + address: accounts[1], + bftWeight: BigInt(0), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + { + address: accounts[2], + bftWeight: BigInt(20), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + ], + validatorsHash: utils.getRandomBytes(32), + }, + bftParametersSchema, + ); + paramsCache = new BFTParametersCache(paramsStore); + insertBlockBFTInfo( + bftVotes, + createFakeBlockHeader({ + height: 152, + maxHeightGenerated: 151, + generatorAddress: accounts[0], + }), + 5, + ); + await updatePrevotesPrecommits(bftVotes, paramsCache); + + expect(bftVotes.blockBFTInfos[0].prevoteWeight).toEqual(BigInt(40)); + expect(bftVotes.blockBFTInfos[3].precommitWeight).toEqual(BigInt(104)); + expect(bftVotes.blockBFTInfos[4].precommitWeight).toEqual(BigInt(107)); + // accounts[0] already voted on blockBFTInfos[1], so after this it should not get affected + expect(bftVotes.blockBFTInfos[1].prevoteWeight).toEqual(BigInt(65)); + expect(bftVotes.blockBFTInfos[2].prevoteWeight).toEqual(BigInt(65)); + }); + + it('should not vote on blocks if generator is not in the validators', async () => { jest.spyOn(paramsCache, 'getParameters'); insertBlockBFTInfo( bftVotes, @@ -323,7 +376,7 @@ describe('BFT votes', () => { bftVotes, createFakeBlockHeader({ height: 152, - maxHeightGenerated: 0, + maxHeightGenerated: 152, generatorAddress: accounts[2], }), 5, @@ -337,13 +390,13 @@ describe('BFT votes', () => { describe('updateMaxHeightPrevoted', () => { let paramsCache: BFTParametersCache; - it('should store maximum height where prevote exceeds threshold', async () => { + it('should store maxHeightPrevoted where prevote exceeds threshold', async () => { const stateStore = new StateStore(new InMemoryDatabase()); const paramsStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_PARAMETERS); await paramsStore.setWithSchema( utils.intToBuffer(101, 4), { - prevoteThreshold: BigInt(68), + prevoteThreshold: BigInt(65), precommitThreshold: BigInt(68), certificateThreshold: BigInt(68), validators: [ @@ -372,7 +425,7 @@ describe('BFT votes', () => { ); paramsCache = new BFTParametersCache(paramsStore); await expect(updateMaxHeightPrevoted(bftVotes, paramsCache)).toResolve(); - expect(bftVotes.maxHeightPrevoted).toBe(149); + expect(bftVotes.maxHeightPrevoted).toBe(151); }); it('should not update maxHeightPrevoted if no block info exceeds threshold', async () => { @@ -410,14 +463,14 @@ describe('BFT votes', () => { ); paramsCache = new BFTParametersCache(paramsStore); await expect(updateMaxHeightPrevoted(bftVotes, paramsCache)).toResolve(); - expect(bftVotes.maxHeightPrevoted).toBe(103); + expect(bftVotes.maxHeightPrevoted).toBe(149); }); }); describe('updateMaxHeightPrecommitted', () => { let paramsCache: BFTParametersCache; - it('should store maximum height where prevote exceeds threshold', async () => { + it('should store maxHeightPrecommitted where prevote exceeds threshold', async () => { const stateStore = new StateStore(new InMemoryDatabase()); const paramsStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_PARAMETERS); await paramsStore.setWithSchema( @@ -455,14 +508,14 @@ describe('BFT votes', () => { expect(bftVotes.maxHeightPrecommitted).toBe(148); }); - it('should not update maxHeightPrevoted if no block info exceeds threshold', async () => { + it('should not update maxHeightPrecommitted if no block info exceeds threshold', async () => { const stateStore = new StateStore(new InMemoryDatabase()); const paramsStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_PARAMETERS); await paramsStore.setWithSchema( utils.intToBuffer(101, 4), { prevoteThreshold: BigInt(68), - precommitThreshold: BigInt(103), + precommitThreshold: BigInt(69), certificateThreshold: BigInt(68), validators: [ { @@ -519,6 +572,7 @@ describe('BFT votes', () => { aggregateCommit: { aggregationBits: Buffer.alloc(0), certificateSignature: Buffer.alloc(0), + // this should never happen, because in the validation this height is required to be the same as bftVotes.maxheightCertified. height: 10, }, }), diff --git a/framework/test/unit/engine/bft/method.spec.ts b/framework/test/unit/engine/bft/method.spec.ts index e1314a269d6..cc6b42ee0e5 100644 --- a/framework/test/unit/engine/bft/method.spec.ts +++ b/framework/test/unit/engine/bft/method.spec.ts @@ -93,7 +93,7 @@ describe('BFT Method', () => { await votesStore.setWithSchema( EMPTY_KEY, { - maxHeightPrevoted: 10, + maxHeightPrevoted: 0, maxHeightPrecommitted: 0, maxHeightCertified: 0, blockBFTInfos: [ @@ -230,10 +230,12 @@ describe('BFT Method', () => { it('should return BFT parameters if it exists for the lower height', async () => { await expect(bftMethod.getBFTParameters(stateStore, 25)).resolves.toEqual(params20); + await expect(bftMethod.getBFTParameters(stateStore, 29)).resolves.toEqual(params20); }); it('should return BFT parameters if it exists for the height', async () => { await expect(bftMethod.getBFTParameters(stateStore, 20)).resolves.toEqual(params20); + await expect(bftMethod.getBFTParameters(stateStore, 30)).resolves.toEqual(params30); }); it('should throw if the BFT parameter does not exist for the height or lower', async () => { @@ -603,6 +605,7 @@ describe('BFT Method', () => { }); it('should return the next height strictly higher than the input where BFT parameter exists', async () => { + await expect(bftMethod.getNextHeightBFTParameters(stateStore, 19)).resolves.toBe(20); await expect(bftMethod.getNextHeightBFTParameters(stateStore, 20)).resolves.toBe(30); }); @@ -1057,4 +1060,59 @@ describe('BFT Method', () => { }); }); }); + + describe('getGeneratorAtTimestamp', () => { + const validators = new Array(103).fill(0).map(() => ({ + address: utils.getRandomBytes(20), + bftWeight: BigInt(1), + generatorKey: utils.getRandomBytes(32), + blsKey: utils.getRandomBytes(48), + })); + + beforeEach(async () => { + const bftParamsStore = stateStore.getStore( + MODULE_STORE_PREFIX_BFT, + STORE_PREFIX_BFT_PARAMETERS, + ); + await bftParamsStore.setWithSchema( + utils.intToBuffer(20, 4), + { + prevoteThreshold: BigInt(68), + precommitThreshold: BigInt(68), + certificateThreshold: BigInt(68), + validators, + validatorsHash: utils.getRandomBytes(32), + }, + bftParametersSchema, + ); + }); + + it('should return a validator in round robin', async () => { + for (let i = 0; i < 103; i += 1) { + // timestamp is computed to cover all possible modulo of 103 + await expect( + bftMethod.getGeneratorAtTimestamp(stateStore, 20, (103 * 1000000 + i) * 10), + ).resolves.toEqual(validators[i]); + } + }); + }); + + describe('getSlotNumber', () => { + it.each([ + { + input: 1683057470, + expected: 168305747, + }, + { + input: 1683057475, + expected: 168305747, + }, + { + input: 1683057479, + expected: 168305747, + }, + ])('should return expected value', ({ input, expected }) => { + expect(bftMethod.getSlotNumber(input)).toBe(expected); + }); + }); }); diff --git a/framework/test/unit/engine/bft/utils.spec.ts b/framework/test/unit/engine/bft/utils.spec.ts index 124f0f3cdb6..474fcdd866b 100644 --- a/framework/test/unit/engine/bft/utils.spec.ts +++ b/framework/test/unit/engine/bft/utils.spec.ts @@ -38,11 +38,13 @@ describe('bft utils', () => { const header1 = createFakeBlockHeader({ height: 10999, maxHeightPrevoted: 1099, + maxHeightGenerated: 0, generatorAddress, }); const header2 = createFakeBlockHeader({ height: 10999, maxHeightPrevoted: 1099, + maxHeightGenerated: 0, generatorAddress, }); @@ -53,11 +55,13 @@ describe('bft utils', () => { const header1 = createFakeBlockHeader({ height: 10999, maxHeightPrevoted: 1099, + maxHeightGenerated: 0, generatorAddress, }); const header2 = createFakeBlockHeader({ height: 11999, maxHeightPrevoted: 1099, + maxHeightGenerated: 0, generatorAddress, }); @@ -68,10 +72,13 @@ describe('bft utils', () => { const header1 = createFakeBlockHeader({ generatorAddress, height: 120, + maxHeightPrevoted: 0, + maxHeightGenerated: 0, }); const header2 = createFakeBlockHeader({ generatorAddress, height: 123, + maxHeightPrevoted: 0, maxHeightGenerated: 98, }); @@ -83,11 +90,13 @@ describe('bft utils', () => { generatorAddress, height: 133, maxHeightPrevoted: 101, + maxHeightGenerated: 0, }); const header2 = createFakeBlockHeader({ generatorAddress, height: 123, maxHeightPrevoted: 98, + maxHeightGenerated: 0, }); expect(areDistinctHeadersContradicting(header1, header2)).toBeTrue(); diff --git a/framework/test/unit/modules/pos/commands/register_validator.spec.ts b/framework/test/unit/modules/pos/commands/register_validator.spec.ts index 50774382608..ecbe9a76f15 100644 --- a/framework/test/unit/modules/pos/commands/register_validator.spec.ts +++ b/framework/test/unit/modules/pos/commands/register_validator.spec.ts @@ -413,7 +413,7 @@ describe('Validator registration command', () => { const result = await validatorRegistrationCommand.verify(context); expect(result.status).toBe(VerifyStatus.FAIL); - expect(result.error?.message).toInclude("'name' is in an unsupported format: "); + expect(result.error?.message).toInclude("'.name' must NOT have fewer than 1 characters"); }); }); From 72b4bb4492f7a2bfef5f17a906e7d584aec365c3 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 15 May 2023 12:23:43 +0200 Subject: [PATCH 030/170] Improve block processing tests (#8444) * :white_check_mark: Improve block processing tests * :white_check_mark: Add test for block header property and command execute * Update elements/lisk-chain/test/unit/transactions.spec.ts Co-authored-by: AndreasKendziorra <40799768+AndreasKendziorra@users.noreply.github.com> * :white_check_mark: Add abi commit error check * :recycle: Improve transaction size check --------- Co-authored-by: AndreasKendziorra <40799768+AndreasKendziorra@users.noreply.github.com> --- elements/lisk-chain/src/block_assets.ts | 8 ++- .../lisk-chain/test/unit/block_assets.spec.ts | 16 +++--- .../lisk-chain/test/unit/block_header.spec.ts | 3 ++ elements/lisk-chain/test/unit/chain.spec.ts | 50 ++++++++++++++++++- .../lisk-chain/test/unit/transactions.spec.ts | 39 +++++++++++---- .../unit/engine/consensus/consensus.spec.ts | 35 +++++++++++-- .../unit/state_machine/state_machine.spec.ts | 1 + 7 files changed, 127 insertions(+), 25 deletions(-) diff --git a/elements/lisk-chain/src/block_assets.ts b/elements/lisk-chain/src/block_assets.ts index 9d06176dd90..176157f7bbf 100644 --- a/elements/lisk-chain/src/block_assets.ts +++ b/elements/lisk-chain/src/block_assets.ts @@ -100,7 +100,9 @@ export class BlockAssets { ); } if (last.module > asset.module) { - throw new Error('Assets are not sorted in the increasing values of moduleID.'); + throw new Error( + 'Assets are not sorted by the module property value in lexicographical order.', + ); } // Check for duplicates if (i > 0 && asset.module === last.module) { @@ -118,7 +120,9 @@ export class BlockAssets { validator.validate(blockAssetSchema, asset); if (last.module > asset.module) { - throw new Error('Assets are not sorted in the increasing values of moduleID.'); + throw new Error( + 'Assets are not sorted by the module property value in lexicographical order.', + ); } if (i > 0 && asset.module === last.module) { throw new Error(`Module with ID ${this._assets[i].module} has duplicate entries.`); diff --git a/elements/lisk-chain/test/unit/block_assets.spec.ts b/elements/lisk-chain/test/unit/block_assets.spec.ts index a8a98d01053..ad4b8b3b377 100644 --- a/elements/lisk-chain/test/unit/block_assets.spec.ts +++ b/elements/lisk-chain/test/unit/block_assets.spec.ts @@ -129,11 +129,11 @@ describe('block assets', () => { assetList = [ { module: 'auth', - data: utils.getRandomBytes(64), + data: utils.getRandomBytes(MAX_ASSET_DATA_SIZE_BYTES), }, { module: 'random', - data: utils.getRandomBytes(128), + data: utils.getRandomBytes(MAX_ASSET_DATA_SIZE_BYTES + 1), }, ]; assets = new BlockAssets(assetList); @@ -146,11 +146,11 @@ describe('block assets', () => { assetList = [ { module: 'auth', - data: utils.getRandomBytes(64), + data: utils.getRandomBytes(MAX_ASSET_DATA_SIZE_BYTES / 2), }, { module: 'random', - data: utils.getRandomBytes(64), + data: utils.getRandomBytes(MAX_ASSET_DATA_SIZE_BYTES / 2), }, ]; assets = new BlockAssets(assetList); @@ -158,8 +158,8 @@ describe('block assets', () => { }); }); - describe('when the assets are not sorted by moduleID', () => { - it('should throw error when assets are not sorted by moduleID', () => { + describe('when the assets are not sorted by module', () => { + it('should throw error when assets are not sorted by module', () => { assetList = [ { module: 'random', @@ -172,7 +172,7 @@ describe('block assets', () => { ]; assets = new BlockAssets(assetList); expect(() => assets.validate()).toThrow( - 'Assets are not sorted in the increasing values of moduleID.', + 'Assets are not sorted by the module property value in lexicographical order.', ); }); @@ -300,7 +300,7 @@ describe('block assets', () => { ]; assets = new BlockAssets(assetList); expect(() => assets.validateGenesis()).toThrow( - 'Assets are not sorted in the increasing values of moduleID.', + 'Assets are not sorted by the module property value in lexicographical order.', ); }); diff --git a/elements/lisk-chain/test/unit/block_header.spec.ts b/elements/lisk-chain/test/unit/block_header.spec.ts index a60ac0fcce8..b9aed276897 100644 --- a/elements/lisk-chain/test/unit/block_header.spec.ts +++ b/elements/lisk-chain/test/unit/block_header.spec.ts @@ -81,8 +81,10 @@ const blockHeaderProps = [ 'previousBlockID', 'generatorAddress', 'transactionRoot', + 'eventRoot', 'assetRoot', 'stateRoot', + 'impliesMaxPrevotes', 'maxHeightPrevoted', 'maxHeightGenerated', 'validatorsHash', @@ -143,6 +145,7 @@ describe('block_header', () => { expect(blockHeader.validatorsHash).toEqual(data.validatorsHash); expect(blockHeader.aggregateCommit).toEqual(data.aggregateCommit); expect(blockHeader.maxHeightPrevoted).toEqual(data.maxHeightPrevoted); + expect(blockHeader.impliesMaxPrevotes).toEqual(data.impliesMaxPrevotes); expect(blockHeader.maxHeightGenerated).toEqual(data.maxHeightGenerated); expect(blockHeader.assetRoot).toEqual(data.assetRoot); expect(blockHeader.transactionRoot).toEqual(data.transactionRoot); diff --git a/elements/lisk-chain/test/unit/chain.spec.ts b/elements/lisk-chain/test/unit/chain.spec.ts index d86980c6ff5..70c6a2754b1 100644 --- a/elements/lisk-chain/test/unit/chain.spec.ts +++ b/elements/lisk-chain/test/unit/chain.spec.ts @@ -33,6 +33,7 @@ import { DEFAULT_MAX_BLOCK_HEADER_CACHE, DEFAULT_MIN_BLOCK_HEADER_CACHE, } from '../../src/constants'; +import { BlockAssets, BlockHeader, Transaction } from '../../src'; describe('chain', () => { const constants = { @@ -296,11 +297,19 @@ describe('chain', () => { it('should not throw error with a valid block', async () => { const txs = new Array(20).fill(0).map(() => getTransaction()); + const totalSize = txs.reduce((prev, curr) => prev + curr.getBytes().length, 0); + (chainInstance as any).constants.maxTransactionsSize = totalSize; block = await createValidDefaultBlock({ transactions: txs, }); + jest.spyOn(BlockHeader.prototype, 'validate'); + jest.spyOn(BlockAssets.prototype, 'validate'); + jest.spyOn(Transaction.prototype, 'validate'); // Act & assert expect(() => chainInstance.validateBlock(block, { version: 2 })).not.toThrow(); + expect(BlockHeader.prototype.validate).toHaveBeenCalledTimes(1); + expect(BlockAssets.prototype.validate).toHaveBeenCalledTimes(1); + expect(Transaction.prototype.validate).toHaveBeenCalledTimes(txs.length); }); it('should throw error if transaction root does not match', async () => { @@ -317,12 +326,13 @@ describe('chain', () => { it('should throw error if transactions exceeds max transactions length', async () => { // Arrange - (chainInstance as any).constants.maxTransactionsSize = 100; const txs = new Array(200).fill(0).map(() => getTransaction()); + const totalSize = txs.reduce((prev, curr) => prev + curr.getBytes().length, 0); + (chainInstance as any).constants.maxTransactionsSize = totalSize - 1; block = await createValidDefaultBlock({ transactions: txs }); // Act & assert expect(() => chainInstance.validateBlock(block, { version: 2 })).toThrow( - 'Transactions length is longer than configured length: 100.', + `Transactions length is longer than configured length: ${totalSize - 1}.`, ); }); @@ -337,5 +347,41 @@ describe('chain', () => { 'Block version must be 2.', ); }); + + it('should throw error if block header validation fails', async () => { + const txs = new Array(20).fill(0).map(() => getTransaction()); + block = await createValidDefaultBlock({ + transactions: txs, + }); + jest.spyOn(BlockHeader.prototype, 'validate').mockImplementation(() => { + throw new Error('invalid header'); + }); + // Act & assert + expect(() => chainInstance.validateBlock(block, { version: 2 })).toThrow('invalid header'); + }); + + it('should throw error if block asset validation fails', async () => { + const txs = new Array(20).fill(0).map(() => getTransaction()); + block = await createValidDefaultBlock({ + transactions: txs, + }); + jest.spyOn(BlockAssets.prototype, 'validate').mockImplementation(() => { + throw new Error('invalid assets'); + }); + // Act & assert + expect(() => chainInstance.validateBlock(block, { version: 2 })).toThrow('invalid assets'); + }); + + it('should throw error if transaction validation fails', async () => { + const txs = new Array(20).fill(0).map(() => getTransaction()); + block = await createValidDefaultBlock({ + transactions: txs, + }); + jest.spyOn(Transaction.prototype, 'validate').mockImplementation(() => { + throw new Error('invalid tx'); + }); + // Act & assert + expect(() => chainInstance.validateBlock(block, { version: 2 })).toThrow('invalid tx'); + }); }); }); diff --git a/elements/lisk-chain/test/unit/transactions.spec.ts b/elements/lisk-chain/test/unit/transactions.spec.ts index 5f098a8d85d..4c1bc833a4d 100644 --- a/elements/lisk-chain/test/unit/transactions.spec.ts +++ b/elements/lisk-chain/test/unit/transactions.spec.ts @@ -13,21 +13,26 @@ */ import { utils } from '@liskhq/lisk-cryptography'; import { Transaction } from '../../src/transaction'; +import { TRANSACTION_MAX_PARAMS_SIZE } from '../../src/constants'; describe('blocks/transactions', () => { - describe('transaction', () => { - it.todo('should have id'); - it.todo('should have senderAddress'); - it.todo('should throw when module is invalid'); - it.todo('should throw when command is invalid'); - it.todo('should throw when sender public key is invalid'); - it.todo('should throw when nonce is invalid'); - it.todo('should throw when fee is invalid'); - it.todo('should throw when params is invalid'); - }); describe('#validateTransaction', () => { let transaction: Transaction; + it('should not throw when transaction is valid', () => { + transaction = new Transaction({ + module: 'token', + command: 'transfer', + fee: BigInt(613000), + // 126 is the size of other properties + params: utils.getRandomBytes(TRANSACTION_MAX_PARAMS_SIZE), + nonce: BigInt(2), + senderPublicKey: utils.getRandomBytes(32), + signatures: [utils.getRandomBytes(64)], + }); + expect(() => transaction.validate()).not.toThrow(); + }); + it('should throw when module name is invalid', () => { transaction = new Transaction({ module: 'token_mod', @@ -54,6 +59,20 @@ describe('blocks/transactions', () => { expect(() => transaction.validate()).toThrow('Invalid command name'); }); + it('should throw when transaction is too big', () => { + transaction = new Transaction({ + module: 'token', + command: 'transfer', + fee: BigInt(613000), + // 126 is the size of other properties + params: utils.getRandomBytes(TRANSACTION_MAX_PARAMS_SIZE + 1), + nonce: BigInt(2), + senderPublicKey: utils.getRandomBytes(32), + signatures: [utils.getRandomBytes(64)], + }); + expect(() => transaction.validate()).toThrow('Params exceeds max size allowed'); + }); + it('should throw when sender public key is not 32 bytes', () => { transaction = new Transaction({ module: 'token', diff --git a/framework/test/unit/engine/consensus/consensus.spec.ts b/framework/test/unit/engine/consensus/consensus.spec.ts index f776314f0af..31dd10b1288 100644 --- a/framework/test/unit/engine/consensus/consensus.spec.ts +++ b/framework/test/unit/engine/consensus/consensus.spec.ts @@ -708,6 +708,14 @@ describe('consensus', () => { expect(savingEvents).toHaveLength(3); savingEvents.forEach((e: Event, i: number) => expect(e.toObject().index).toEqual(i)); }); + + it('should reject when ABI.commit fails and it should not store the block', async () => { + jest.spyOn(chain, 'saveBlock'); + jest.spyOn(consensus['_abi'], 'commit').mockRejectedValue(new Error('fail to commit')); + + await expect(consensus['_executeValidated'](block)).rejects.toThrow('fail to commit'); + expect(chain.saveBlock).not.toHaveBeenCalled(); + }); }); describe('block verification', () => { @@ -738,9 +746,12 @@ describe('consensus', () => { it('should throw error when block timestamp is from future', () => { const invalidBlock = { ...block }; - jest.spyOn(bft.method, 'getSlotNumber').mockReturnValue(Math.floor(Date.now() / 10)); - - (invalidBlock.header as any).timestamp = Math.floor((Date.now() + 10000) / 1000); + jest + .spyOn(bft.method, 'getSlotNumber') + // return blockSlotNumber in the future + .mockReturnValueOnce(Math.floor(Date.now() / 1000 / 10) + 10000) + // return blockSlotNumber for the currrent value + .mockReturnValueOnce(Math.floor(Date.now() / 1000 / 10)); expect(() => consensus['_verifyTimestamp'](invalidBlock as any)).toThrow( `Invalid timestamp ${ @@ -876,6 +887,20 @@ describe('consensus', () => { ); }); + it('should throw error if the header impliesMaxPrevotes is not the same as the computed value', async () => { + when(consensus['_bft'].method.getBFTHeights as never) + .calledWith(stateStore) + .mockResolvedValue({ maxHeightPrevoted: block.header.maxHeightPrevoted } as never); + + when(consensus['_bft'].method.impliesMaximalPrevotes as never) + .calledWith(stateStore, block.header) + .mockResolvedValue(false as never); + + await expect(consensus['_verifyBFTProperties'](stateStore, block as any)).rejects.toThrow( + 'Invalid imply max prevote', + ); + }); + it('should be success if maxHeightPrevoted is valid and header is not contradicting', async () => { when(consensus['_bft'].method.getBFTHeights as never) .calledWith(stateStore) @@ -885,6 +910,10 @@ describe('consensus', () => { .calledWith(stateStore, block.header) .mockResolvedValue(false as never); + when(consensus['_bft'].method.impliesMaximalPrevotes as never) + .calledWith(stateStore, block.header) + .mockResolvedValue(true as never); + await expect( consensus['_verifyBFTProperties'](stateStore, block as any), ).resolves.toBeUndefined(); diff --git a/framework/test/unit/state_machine/state_machine.spec.ts b/framework/test/unit/state_machine/state_machine.spec.ts index 63de9cd9cd2..e1825e04549 100644 --- a/framework/test/unit/state_machine/state_machine.spec.ts +++ b/framework/test/unit/state_machine/state_machine.spec.ts @@ -184,6 +184,7 @@ describe('state_machine', () => { getMethodContext: expect.any(Function), getStore: expect.any(Function), }); + expect(mod.commands[0].execute).toHaveBeenCalledTimes(1); expect(mod.afterCommandExecute).toHaveBeenCalledTimes(1); }); From d78eb5e69433ebea384421a2ed868ec99444cea6 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 16 May 2023 18:04:24 +0200 Subject: [PATCH 031/170] Update genesis block tests (#8440) * :white_check_mark: Update genesis block tests * :white_check_mark: Add test for valid genesis block * :white_check_mark: Add abi commit error check * :white_check_mark: Fix merge error --- .../lisk-chain/test/unit/block_header.spec.ts | 29 +++++++++++++++++++ .../unit/engine/consensus/consensus.spec.ts | 7 +++++ .../unit/state_machine/state_machine.spec.ts | 17 +++++++++++ 3 files changed, 53 insertions(+) diff --git a/elements/lisk-chain/test/unit/block_header.spec.ts b/elements/lisk-chain/test/unit/block_header.spec.ts index b9aed276897..f713ccd9fca 100644 --- a/elements/lisk-chain/test/unit/block_header.spec.ts +++ b/elements/lisk-chain/test/unit/block_header.spec.ts @@ -215,6 +215,14 @@ describe('block_header', () => { }); describe('validateGenesis', () => { + it('should not throw when genesis block is valid', () => { + const block = getGenesisBlockAttrs(); + const blockHeader = new BlockHeader({ + ...block, + }); + + expect(() => blockHeader.validateGenesis()).not.toThrow(); + }); it('should throw error if previousBlockID is not 32 bytes', () => { const block = getGenesisBlockAttrs(); const blockHeader = new BlockHeader({ @@ -251,6 +259,15 @@ describe('block_header', () => { ); }); + it('should throw error if maxHeightGenerated is not zero', () => { + const block = getGenesisBlockAttrs(); + const blockHeader = new BlockHeader({ ...block, maxHeightGenerated: 10 }); + + expect(() => blockHeader.validateGenesis()).toThrow( + 'Genesis block header maxHeightGenerated must equal 0', + ); + }); + it('should throw error if maxHeightPrevoted is not equal to header.height', () => { const block = getGenesisBlockAttrs(); const blockHeader = new BlockHeader({ ...block, maxHeightPrevoted: 10 }); @@ -299,6 +316,18 @@ describe('block_header', () => { ); }); + it('should throw error if impliesMaxPrevotes is false', () => { + const block = getGenesisBlockAttrs(); + const blockHeader = new BlockHeader({ + ...block, + impliesMaxPrevotes: false, + }); + + expect(() => blockHeader.validateGenesis()).toThrow( + 'Genesis block header impliesMaxPrevotes must be true', + ); + }); + it('should throw error if signature is not empty buffer', () => { const block = getGenesisBlockAttrs(); const blockHeader = new BlockHeader({ ...block, signature: utils.getRandomBytes(32) }); diff --git a/framework/test/unit/engine/consensus/consensus.spec.ts b/framework/test/unit/engine/consensus/consensus.spec.ts index 31dd10b1288..a830e3a062e 100644 --- a/framework/test/unit/engine/consensus/consensus.spec.ts +++ b/framework/test/unit/engine/consensus/consensus.spec.ts @@ -220,6 +220,13 @@ describe('consensus', () => { } as never); await expect(initConsensus()).rejects.toThrow('Genesis block validators hash is invalid'); }); + + it('should fail initialization if ABI.commit fails', async () => { + // Arrange + (chain.genesisBlockExist as jest.Mock).mockResolvedValue(false); + jest.spyOn(consensus['_abi'], 'commit').mockRejectedValue(new Error('fail to commit')); + await expect(initConsensus()).rejects.toThrow('fail to commit'); + }); }); describe('certifySingleCommit', () => { diff --git a/framework/test/unit/state_machine/state_machine.spec.ts b/framework/test/unit/state_machine/state_machine.spec.ts index e1825e04549..4cf695453ec 100644 --- a/framework/test/unit/state_machine/state_machine.spec.ts +++ b/framework/test/unit/state_machine/state_machine.spec.ts @@ -314,6 +314,23 @@ describe('state_machine', () => { // expect(systemMod.verifyAssets).toHaveBeenCalledTimes(1); expect(mod.verifyAssets).toHaveBeenCalledTimes(1); }); + + it('should fail if module is not registered', async () => { + await expect( + stateMachine.verifyAssets( + new BlockContext({ + eventQueue, + logger, + stateStore, + contextStore, + header, + assets: new BlockAssets([{ module: 'unknown', data: Buffer.alloc(30) }]), + chainID, + transactions: [transaction], + }), + ), + ).rejects.toThrow('Module unknown is not registered'); + }); }); describe('beforeExecuteBlock', () => { From a04e4ffbddb5e0a74bf1162602cb6009c1a593f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Wed, 17 May 2023 23:13:44 +0800 Subject: [PATCH 032/170] Clean up Auth module tests (#8447) * Fix and simplify multisig transaction execution tests * Clean up register multisig tests * Transaction and params are now specified as a decoded human-readable object * Auth tests now use a shared fixture with human-readable test data * Auth tests now sign a copy of imported multisig registration transaction * Store mocks are now re-initialized between each test case run * Revert renaming of auth account store variable --- elements/lisk-chain/src/index.ts | 1 + framework/src/modules/fee/module.ts | 1 - .../test/unit/modules/auth/fixtures.json | 1 - .../test/unit/modules/auth/module.spec.ts | 799 ++++-------------- .../unit/modules/auth/multisig_fixture.ts | 91 ++ .../auth/register_multisignature.spec.ts | 191 ++--- 6 files changed, 356 insertions(+), 728 deletions(-) delete mode 120000 framework/test/unit/modules/auth/fixtures.json create mode 100644 framework/test/unit/modules/auth/multisig_fixture.ts diff --git a/elements/lisk-chain/src/index.ts b/elements/lisk-chain/src/index.ts index 063909b55fb..4a63b79e9bd 100644 --- a/elements/lisk-chain/src/index.ts +++ b/elements/lisk-chain/src/index.ts @@ -34,6 +34,7 @@ export { MAX_MODULE_NAME_LENGTH, MIN_CROSS_CHAIN_COMMAND_NAME_LENGTH, MIN_MODULE_NAME_LENGTH, + EMPTY_BUFFER, } from './constants'; export * from './db_keys'; export type { RawBlock } from './types'; diff --git a/framework/src/modules/fee/module.ts b/framework/src/modules/fee/module.ts index c4cf27478e0..932b740856e 100644 --- a/framework/src/modules/fee/module.ts +++ b/framework/src/modules/fee/module.ts @@ -103,7 +103,6 @@ export class FeeModule extends BaseInteroperableModule { this._feePoolAddress = moduleConfig.feePoolAddress; } - // eslint-disable-next-line @typescript-eslint/require-await public async verifyTransaction(context: TransactionVerifyContext): Promise { const { getMethodContext, transaction, header } = context; diff --git a/framework/test/unit/modules/auth/fixtures.json b/framework/test/unit/modules/auth/fixtures.json deleted file mode 120000 index b7aa1791298..00000000000 --- a/framework/test/unit/modules/auth/fixtures.json +++ /dev/null @@ -1 +0,0 @@ -../../../../../protocol-specs/generator_outputs/multisignature_registration_transaction/multisignature_registration_transaction.json \ No newline at end of file diff --git a/framework/test/unit/modules/auth/module.spec.ts b/framework/test/unit/modules/auth/module.spec.ts index 197c552bbde..29ce88b55a7 100644 --- a/framework/test/unit/modules/auth/module.spec.ts +++ b/framework/test/unit/modules/auth/module.spec.ts @@ -11,14 +11,11 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { Mnemonic } from '@liskhq/lisk-passphrase'; import { codec } from '@liskhq/lisk-codec'; -import { utils, ed, address as cryptoAddress, legacy } from '@liskhq/lisk-cryptography'; -import { Transaction, transactionSchema, TAG_TRANSACTION, BlockAssets } from '@liskhq/lisk-chain'; -import { objects as ObjectUtils } from '@liskhq/lisk-utils'; +import { utils, address as cryptoAddress } from '@liskhq/lisk-cryptography'; +import { Transaction, BlockAssets, EMPTY_BUFFER } from '@liskhq/lisk-chain'; import { when } from 'jest-when'; import { AuthModule } from '../../../../src/modules/auth'; -import * as fixtures from './fixtures.json'; import * as testing from '../../../../src/testing'; import { genesisAuthStoreSchema } from '../../../../src/modules/auth/schemas'; import { TransactionExecuteContext, VerifyStatus } from '../../../../src/state_machine'; @@ -30,75 +27,36 @@ import { authAccountSchema, AuthAccountStore, } from '../../../../src/modules/auth/stores/auth_account'; +import { + chainID, + unsignedRegisterMultisigTx, + multisigAddress, + keyPairs, + multisigParams, +} from './multisig_fixture'; describe('AuthModule', () => { - let decodedMultiSignature: any; - let validTestTransaction: any; + let authAccountStoreMock: jest.Mock; + let storeMock: jest.Mock; let stateStore: any; let authModule: AuthModule; - let decodedBaseTransaction: any; - let passphrase: any; - let passphraseDerivedKeys: any; - let senderAccount: any; - - const { cloneDeep } = ObjectUtils; - const subStoreMock = jest.fn(); - const storeMock = jest.fn().mockReturnValue({ getWithSchema: subStoreMock }); - const defaultTestCase = fixtures.testCases[0]; - const chainID = Buffer.from(defaultTestCase.input.chainID, 'hex'); + const registerMultisigTx = new Transaction(unsignedRegisterMultisigTx); beforeEach(() => { - authModule = new AuthModule(); - const buffer = Buffer.from(defaultTestCase.output.transaction, 'hex'); - const id = utils.hash(buffer); - decodedBaseTransaction = codec.decode(transactionSchema, buffer); - - decodedMultiSignature = { - ...decodedBaseTransaction, - id, - }; - - validTestTransaction = new Transaction(decodedMultiSignature); - - stateStore = { - getStore: storeMock, - }; + authAccountStoreMock = jest.fn(); + storeMock = jest.fn().mockReturnValue({ getWithSchema: authAccountStoreMock }); - senderAccount = { - address: Buffer.from(defaultTestCase.input.account.address, 'hex'), - }; - - when(subStoreMock) - .calledWith(senderAccount.address, authAccountSchema) - .mockReturnValue({ - mandatoryKeys: [], - optionalKeys: [], - nonce: BigInt(1), - numberOfSignatures: 0, - }); - - passphrase = Mnemonic.generateMnemonic(); - passphraseDerivedKeys = legacy.getPrivateAndPublicKeyFromPassphrase(passphrase); - const address = cryptoAddress.getAddressFromPublicKey(passphraseDerivedKeys.publicKey); - - when(subStoreMock) - .calledWith(address, authAccountSchema) - .mockReturnValue({ - mandatoryKeys: [], - optionalKeys: [], - nonce: BigInt(0), - numberOfSignatures: 0, - }); + authModule = new AuthModule(); + stateStore = { getStore: storeMock }; }); describe('initGenesisState', () => { - const address = utils.getRandomBytes(20); const publicKey = utils.getRandomBytes(32); const validAsset = { authDataSubstore: [ { - storeKey: address, + storeKey: utils.getRandomBytes(20), storeValue: { numberOfSignatures: 0, mandatoryKeys: [], @@ -293,7 +251,6 @@ describe('AuthModule', () => { describe.each(invalidTestData)('%p', (_, data) => { it('should throw error when asset is invalid', async () => { - // eslint-disable-next-line @typescript-eslint/ban-types const assetBytes = codec.encode(genesisAuthStoreSchema, data as object); const context = createGenesisBlockContext({ stateStore, @@ -307,60 +264,49 @@ describe('AuthModule', () => { describe('verifyTransaction', () => { describe('Invalid nonce errors', () => { - it('should return FAIL status with error when trx nonce is lower than account nonce', async () => { - // Arrange - const accountNonce = BigInt(2); + const accountNonce = BigInt(2); - when(subStoreMock).calledWith(senderAccount.address, authAccountSchema).mockReturnValue({ + beforeEach(() => { + when(authAccountStoreMock).calledWith(multisigAddress, authAccountSchema).mockReturnValue({ mandatoryKeys: [], optionalKeys: [], nonce: accountNonce, numberOfSignatures: 0, }); + }); + it('should return FAIL status with error when trx nonce is lower than account nonce', async () => { const context = testing .createTransactionContext({ stateStore, - transaction: validTestTransaction, + transaction: registerMultisigTx, chainID, }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( + await expect(authModule.verifyTransaction(context)).rejects.toThrow( new InvalidNonceError( - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - `Transaction with id:${validTestTransaction.id.toString( + `Transaction with id:${registerMultisigTx.id.toString( 'hex', )} nonce is lower than account nonce.`, - validTestTransaction.nonce, + registerMultisigTx.nonce, accountNonce, ), ); }); it('should return PENDING status with no error when trx nonce is higher than account nonce', async () => { - // Arrange const transaction = new Transaction({ module: 'token', command: 'transfer', - nonce: BigInt('2'), - fee: BigInt('100000000'), - senderPublicKey: passphraseDerivedKeys.publicKey, + nonce: BigInt(4), + fee: BigInt(100_000_000), + senderPublicKey: keyPairs[0].publicKey, params: utils.getRandomBytes(100), signatures: [], }); - validTestTransaction = new Transaction(decodedMultiSignature); - - const signature = ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getBytes(), - passphraseDerivedKeys.privateKey, - ); - - transaction.signatures.push(signature); + transaction.sign(chainID, keyPairs[0].privateKey); const context = testing .createTransactionContext({ @@ -370,53 +316,68 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.PENDING, }); }); }); describe('Multi-signature registration transaction', () => { - it('should not throw for valid transaction', async () => { - // Arrange + registerMultisigTx.sign(chainID, keyPairs[0].privateKey); + + it('should not throw for a valid transaction', async () => { + when(authAccountStoreMock) + .calledWith(multisigAddress, authAccountSchema) + .mockReturnValue({ + mandatoryKeys: [], + optionalKeys: [], + nonce: BigInt(0), + numberOfSignatures: 0, + }); + const context = testing .createTransactionContext({ stateStore, - transaction: validTestTransaction, + transaction: registerMultisigTx, chainID, }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.OK, }); }); }); - describe('Transaction from single signatures account', () => { - it('should not throw for valid transaction', async () => { - // Arrange - const transaction = new Transaction({ + describe('Transaction from a single signature account', () => { + let transaction: Transaction; + + const singleSigAddress = cryptoAddress.getAddressFromPublicKey(keyPairs[1].publicKey); + + beforeEach(() => { + transaction = new Transaction({ module: 'token', command: 'transfer', - nonce: BigInt('0'), - fee: BigInt('100000000'), - senderPublicKey: passphraseDerivedKeys.publicKey, + nonce: BigInt(0), + fee: BigInt(100_000_000), + senderPublicKey: keyPairs[1].publicKey, params: utils.getRandomBytes(100), signatures: [], }); - const signature = ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getBytes(), - passphraseDerivedKeys.privateKey, - ); + transaction.sign(chainID, keyPairs[1].privateKey); - transaction.signatures.push(signature); + when(authAccountStoreMock) + .calledWith(singleSigAddress, authAccountSchema) + .mockReturnValue({ + mandatoryKeys: [], + optionalKeys: [], + nonce: BigInt(0), + numberOfSignatures: 0, + }); + }); + it('should not throw for a valid transaction', async () => { const context = testing .createTransactionContext({ stateStore, @@ -425,23 +386,13 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.OK, }); }); it('should throw if signature is missing', async () => { - // Arrange - const transaction = new Transaction({ - module: 'token', - command: 'transfer', - nonce: BigInt('0'), - fee: BigInt('100000000'), - senderPublicKey: passphraseDerivedKeys.publicKey, - params: utils.getRandomBytes(100), - signatures: [], - }); + transaction.signatures.pop(); const context = testing .createTransactionContext({ @@ -451,35 +402,13 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - 'Transactions from a single signature account should have exactly one signature. Found 0 signatures.', - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Transactions from a single signature account should have exactly one signature. Found 0 signatures.', ); }); it('should throw error if account is not multi signature and more than one signature present', async () => { - // Arrange - const transaction = new Transaction({ - module: 'token', - command: 'transfer', - nonce: BigInt('0'), - fee: BigInt('100000000'), - senderPublicKey: passphraseDerivedKeys.publicKey, - params: utils.getRandomBytes(100), - signatures: [], - }); - - const signature = ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getBytes(), - passphraseDerivedKeys.privateKey, - ); - - transaction.signatures.push(signature); - transaction.signatures.push(signature); + transaction.signatures.push(transaction.signatures[0]); const context = testing .createTransactionContext({ @@ -489,157 +418,53 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - 'Transactions from a single signature account should have exactly one signature. Found 2 signatures.', - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Transactions from a single signature account should have exactly one signature. Found 2 signatures.', ); }); }); - describe('Transaction from multi-signatures account', () => { - interface memberFixture { - passphrase: string; - keys?: { - privateKey: Buffer; - publicKey: Buffer; - }; - address?: Buffer; - } - - interface membersFixture { - [key: string]: memberFixture; - } - - const members: membersFixture = { - mainAccount: { - passphrase: 'order trip this crop race amused climb rather taxi morning holiday team', - }, - mandatoryA: { - passphrase: - 'clock cradle permit opinion hobby excite athlete weird soap mesh valley belt', - }, - mandatoryB: { - passphrase: - 'team dignity frost rookie gesture gaze piano daring fruit patrol chalk hidden', - }, - optionalA: { - passphrase: - 'welcome hello ostrich country drive car river jaguar warfare color tell risk', - }, - optionalB: { - passphrase: 'beef volcano emotion say lab reject small repeat reveal napkin bunker make', - }, - }; - - for (const aMember of Object.values(members)) { - aMember.keys = { - ...legacy.getPrivateAndPublicKeyFromPassphrase(aMember.passphrase), - }; - aMember.address = cryptoAddress.getAddressFromPublicKey(aMember.keys.publicKey); - } + describe('Transaction from a multi-signature account', () => { + let transaction: Transaction; - const multisigAccount = { - address: members.mainAccount.address, - numberOfSignatures: 3, - mandatoryKeys: [members.mandatoryA.keys?.publicKey, members.mandatoryB.keys?.publicKey], - optionalKeys: [members.optionalA.keys?.publicKey, members.optionalB.keys?.publicKey], + const privateKeys = { + mandatory: [keyPairs[0].privateKey, keyPairs[1].privateKey], + optional: [keyPairs[2].privateKey, keyPairs[3].privateKey], }; - let transaction: Transaction; - beforeEach(() => { - when(subStoreMock) - .calledWith(multisigAccount.address, authAccountSchema) + when(authAccountStoreMock) + .calledWith(multisigAddress, authAccountSchema) .mockResolvedValue({ numberOfSignatures: 3, - mandatoryKeys: [members.mandatoryA.keys?.publicKey, members.mandatoryB.keys?.publicKey], - optionalKeys: [members.optionalA.keys?.publicKey, members.optionalB.keys?.publicKey], + mandatoryKeys: multisigParams.mandatoryKeys, + optionalKeys: multisigParams.optionalKeys, nonce: BigInt(0), }); transaction = new Transaction({ module: 'token', command: 'transfer', - nonce: BigInt('0'), - fee: BigInt('100000000'), - senderPublicKey: (members as any).mainAccount.keys.publicKey, + nonce: BigInt(0), + fee: BigInt(100_000_000), + senderPublicKey: multisigParams.mandatoryKeys[0], params: utils.getRandomBytes(100), signatures: [], }); }); - it('should not throw for valid transaction', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); - - const context = testing - .createTransactionContext({ - stateStore, - transaction, - chainID, - }) - .createTransactionVerifyContext(); - - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ - status: VerifyStatus.OK, - }); - }); - - it('should not throw for multisignature account with only optional', async () => { - // Arrange - const optionalOnlyMultisigAccount = cloneDeep(multisigAccount); - optionalOnlyMultisigAccount.mandatoryKeys = []; - optionalOnlyMultisigAccount.numberOfSignatures = 1; - - when(subStoreMock) - .calledWith(optionalOnlyMultisigAccount.address, authAccountSchema) + it('should verify a valid transaction from a 1-of-2 multisig account with 0 mandatory signers', async () => { + when(authAccountStoreMock) + .calledWith(multisigAddress, authAccountSchema) .mockResolvedValue({ numberOfSignatures: 1, mandatoryKeys: [], - optionalKeys: [members.optionalA.keys?.publicKey, members.optionalB.keys?.publicKey], + optionalKeys: multisigParams.optionalKeys, nonce: BigInt(0), }); - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); + transaction.sign(chainID, privateKeys.optional[0]); + transaction.signatures.push(EMPTY_BUFFER); const context = testing .createTransactionContext({ @@ -649,42 +474,17 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.OK, }); }); - it('should not throw for valid transaction when first optional is present', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); + it('should verify a valid transaction from 3-of-4 multisig account with 2 mandatory signers, when the first optional signature is present', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.sign(chainID, privateKeys.optional[0]); + transaction.signatures.push(EMPTY_BUFFER); - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); const context = testing .createTransactionContext({ stateStore, @@ -693,42 +493,16 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.OK, }); }); - it('should not throw for valid transaction when second optional is present', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalB.keys.privateKey, - ), - ); + it('should verify a valid transaction from 3-of-4 multisig account with 2 mandatory signers, when the second optional signature is present', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[1]); const context = testing .createTransactionContext({ @@ -738,40 +512,16 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ status: VerifyStatus.OK, }); }); - it('should throw for transaction where non optional absent signature is not empty buffer', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); + it('should throw when an optional absent signature is not replaced by an empty buffer', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.sign(chainID, privateKeys.optional[1]); - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalB.keys.privateKey, - ), - ); const context = testing .createTransactionContext({ stateStore, @@ -780,53 +530,18 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( - 'hex', - )}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( + 'hex', + )}'`, ); }); - it('should throw error if number of provided signatures is bigger than numberOfSignatures in account asset', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalB.keys.privateKey, - ), - ); + it('should throw when a transaction from 3-of-4 multisig account has 4 signatures', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.sign(chainID, privateKeys.optional[0]); + transaction.sign(chainID, privateKeys.optional[1]); const context = testing .createTransactionContext({ @@ -836,37 +551,18 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( - 'hex', - )}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( + 'hex', + )}'`, ); }); - it('should throw error if number of provided signatures is smaller than numberOfSignatures in account asset', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); + it('should throw when a transaction from 3-of-4 multisig account with 2 mandatory signers has only 2 mandatory signatures', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.signatures.push(EMPTY_BUFFER); const context = testing .createTransactionContext({ @@ -876,46 +572,18 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( - 'hex', - )}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( + 'hex', + )}'`, ); }); - it('should throw for transaction with valid numberOfSignatures but missing mandatory key signature', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalB.keys.privateKey, - ), - ); + it('should throw if a mandatory signature is absent', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[0]); + transaction.sign(chainID, privateKeys.optional[1]); const context = testing .createTransactionContext({ @@ -925,42 +593,16 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error('Missing signature for a mandatory key.'), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Missing signature for a mandatory key.', ); }); - it('should throw error if any of the mandatory signatures is not valid', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); + it('should throw if a mandatory signature is invalid', async () => { + transaction.signatures.push(utils.getRandomBytes(64)); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[1]); const context = testing .createTransactionContext({ @@ -970,45 +612,16 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).resolves.toEqual({ - status: VerifyStatus.OK, - }); - }); - - it('should throw error if any of the optional signatures is not valid', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalB.keys.privateKey, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Failed to validate signature', ); + }); - // We change the first byte of the 2nd optional signature - transaction.signatures[3][0] = 10; + it('should throw if an optional signature is invalid', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.signatures.push(utils.getRandomBytes(64)); + transaction.signatures.push(EMPTY_BUFFER); const context = testing .createTransactionContext({ @@ -1018,46 +631,16 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Failed to validate signature '${transaction.signatures[3].toString( - 'hex', - )}' for transaction with id '${transaction.id.toString('hex')}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Failed to validate signature', ); }); - it('should throw error if mandatory signatures are not in order', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); + it('should throw if mandatory signatures are not in order', async () => { + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[1]); const context = testing .createTransactionContext({ @@ -1067,46 +650,18 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Failed to validate signature '${transaction.signatures[0].toString( - 'hex', - )}' for transaction with id '${transaction.id.toString('hex')}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Failed to validate signature '${transaction.signatures[0].toString( + 'hex', + )}' for transaction with id '${transaction.id.toString('hex')}'`, ); }); - it('should throw error if optional signatures are not in order', async () => { - // Arrange - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryA.keys.privateKey, - ), - ); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).mandatoryB.keys.privateKey, - ), - ); - - transaction.signatures.push(Buffer.from('')); - - transaction.signatures.push( - ed.signDataWithPrivateKey( - TAG_TRANSACTION, - chainID, - transaction.getSigningBytes(), - (members as any).optionalA.keys.privateKey, - ), - ); + it('should throw if optional signatures are not in order', async () => { + transaction.sign(chainID, privateKeys.mandatory[0]); + transaction.sign(chainID, privateKeys.mandatory[1]); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[0]); const context = testing .createTransactionContext({ @@ -1116,13 +671,10 @@ describe('AuthModule', () => { }) .createTransactionVerifyContext(); - // Act & Assert - return expect(authModule.verifyTransaction(context)).rejects.toThrow( - new Error( - `Failed to validate signature '${transaction.signatures[3].toString( - 'hex', - )}' for transaction with id '${transaction.id.toString('hex')}'`, - ), + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Failed to validate signature '${transaction.signatures[3].toString( + 'hex', + )}' for transaction with id '${transaction.id.toString('hex')}'`, ); }); }); @@ -1139,21 +691,20 @@ describe('AuthModule', () => { context = testing .createTransactionContext({ stateStore, - transaction: validTestTransaction, + transaction: registerMultisigTx, chainID, }) .createTransactionExecuteContext(); }); it('should increment account nonce after a transaction', async () => { - const address = cryptoAddress.getAddressFromPublicKey(validTestTransaction.senderPublicKey); const authAccountBeforeTransaction = { - nonce: BigInt(validTestTransaction.nonce), + nonce: BigInt(registerMultisigTx.nonce), numberOfSignatures: 4, - mandatoryKeys: [utils.getRandomBytes(64), utils.getRandomBytes(64)], - optionalKeys: [utils.getRandomBytes(64), utils.getRandomBytes(64)], + mandatoryKeys: multisigParams.mandatoryKeys, + optionalKeys: multisigParams.optionalKeys, }; - await authAccountStore.set(context, address, authAccountBeforeTransaction); + await authAccountStore.set(context, multisigAddress, authAccountBeforeTransaction); await authModule.beforeCommandExecute(context); diff --git a/framework/test/unit/modules/auth/multisig_fixture.ts b/framework/test/unit/modules/auth/multisig_fixture.ts new file mode 100644 index 00000000000..e1fd2fc7d4c --- /dev/null +++ b/framework/test/unit/modules/auth/multisig_fixture.ts @@ -0,0 +1,91 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { ed, address } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; +import { Transaction } from '@liskhq/lisk-chain'; +import { + registerMultisignatureParamsSchema, + multisigRegMsgSchema, +} from '../../../../src/modules/auth/schemas'; +import { RegisterMultisignatureParams } from '../../../../src/modules/auth/types'; +import { MESSAGE_TAG_MULTISIG_REG } from '../../../../src/modules/auth/constants'; + +const keyPairsString = [ + { + privateKey: + '2475a8233503caade9542f2dd6c8c725f10bc03e3f809210b768f0a2320f06d50904c986211330582ef5e41ed9a2e7d6730bb7bdc59459a0caaaba55be4ec128', + publicKey: '0904c986211330582ef5e41ed9a2e7d6730bb7bdc59459a0caaaba55be4ec128', + }, + { + privateKey: + '985bc97b4b2aa91d590dde455c19c70818d97c56c7cfff790a1e0b71e3d15962557f1b9647fd2aefa357fed8bead72d1b02e5151b57d3c32d4d3f808c0705026', + publicKey: '557f1b9647fd2aefa357fed8bead72d1b02e5151b57d3c32d4d3f808c0705026', + }, + { + privateKey: + 'd0b159fe5a7cc3d5f4b39a97621b514bc55b0a0f1aca8adeed2dd1899d93f103a3f96c50d0446220ef2f98240898515cbba8155730679ca35326d98dcfb680f0', + publicKey: 'a3f96c50d0446220ef2f98240898515cbba8155730679ca35326d98dcfb680f0', + }, + { + privateKey: + '03e7852c6f1c6fe5cd0c5f7e3a36e499a1e0207e867f74f5b5bc42bfcc888bc8b8d2422aa7ebf1f85031f0bac2403be1fb24e0196d3bbed33987d4769eb37411', + publicKey: 'b8d2422aa7ebf1f85031f0bac2403be1fb24e0196d3bbed33987d4769eb37411', + }, +]; + +export const keyPairs = keyPairsString.map(keyPair => ({ + privateKey: Buffer.from(keyPair.privateKey, 'hex'), + publicKey: Buffer.from(keyPair.publicKey, 'hex'), +})); + +export const chainID = Buffer.from('04000000', 'hex'); + +export const multisigParams = { + numberOfSignatures: 4, + mandatoryKeys: [keyPairs[0].publicKey, keyPairs[1].publicKey], + optionalKeys: [keyPairs[2].publicKey, keyPairs[3].publicKey], +}; + +export const multisigAddress = address.getAddressFromPublicKey(multisigParams.mandatoryKeys[0]); +const decodedMessage = { + address: multisigAddress, + nonce: BigInt(0), + ...multisigParams, +}; +const encodedMessage = codec.encode(multisigRegMsgSchema, decodedMessage); +const signatures: Buffer[] = []; +for (const keyPair of keyPairs) { + signatures.push( + ed.signData(MESSAGE_TAG_MULTISIG_REG, chainID, encodedMessage, keyPair.privateKey), + ); +} + +export const decodedParams: RegisterMultisignatureParams = { + numberOfSignatures: multisigParams.numberOfSignatures, + mandatoryKeys: multisigParams.mandatoryKeys, + optionalKeys: multisigParams.optionalKeys, + signatures, +}; +const encodedParams = codec.encode(registerMultisignatureParamsSchema, decodedParams); + +export const unsignedRegisterMultisigTx = new Transaction({ + module: 'auth', + command: 'registerMultisignature', + fee: BigInt('100000000'), + params: encodedParams, + nonce: BigInt(0), + senderPublicKey: keyPairs[0].publicKey, + signatures: [], +}); diff --git a/framework/test/unit/modules/auth/register_multisignature.spec.ts b/framework/test/unit/modules/auth/register_multisignature.spec.ts index db51e9226d4..4d1edc8814c 100644 --- a/framework/test/unit/modules/auth/register_multisignature.spec.ts +++ b/framework/test/unit/modules/auth/register_multisignature.spec.ts @@ -15,7 +15,6 @@ import { Transaction } from '@liskhq/lisk-chain'; import { codec } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; -import * as fixtures from './fixtures.json'; import * as testing from '../../../../src/testing'; import { RegisterMultisignatureCommand } from '../../../../src/modules/auth/commands/register_multisignature'; import { registerMultisignatureParamsSchema } from '../../../../src/modules/auth/schemas'; @@ -24,32 +23,33 @@ import { VerifyStatus } from '../../../../src/state_machine'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { AuthModule } from '../../../../src/modules/auth'; -import { AuthAccountStore } from '../../../../src/modules/auth/stores/auth_account'; +import { AuthAccount, AuthAccountStore } from '../../../../src/modules/auth/stores/auth_account'; import { InvalidSignatureEvent } from '../../../../src/modules/auth/events/invalid_signature'; import { MultisignatureRegistrationEvent } from '../../../../src/modules/auth/events/multisignature_registration'; +import { chainID, decodedParams, keyPairs, unsignedRegisterMultisigTx } from './multisig_fixture'; describe('Register Multisignature command', () => { let registerMultisignatureCommand: RegisterMultisignatureCommand; let stateStore: PrefixedStateReadWriter; - let authStore: AuthAccountStore; - let transaction: Transaction; - let decodedParams: RegisterMultisignatureParams; + let authAccountStore: AuthAccountStore; const authModule = new AuthModule(); - const defaultTestCase = fixtures.testCases[0]; - const chainID = Buffer.from(defaultTestCase.input.chainID, 'hex'); + + const defaultAuthAccount: AuthAccount = { + numberOfSignatures: 0, + mandatoryKeys: [], + optionalKeys: [], + nonce: BigInt(0), + }; + + const transaction = new Transaction(unsignedRegisterMultisigTx); + transaction.sign(chainID, keyPairs[0].privateKey); beforeEach(() => { registerMultisignatureCommand = new RegisterMultisignatureCommand( authModule.stores, authModule.events, ); - const buffer = Buffer.from(defaultTestCase.output.transaction, 'hex'); - transaction = Transaction.fromBytes(buffer); - decodedParams = codec.decode( - registerMultisignatureParamsSchema, - transaction.params, - ); }); describe('verify', () => { @@ -62,6 +62,7 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); expect(result.status).toBe(VerifyStatus.OK); @@ -69,11 +70,10 @@ describe('Register Multisignature command', () => { it('should return error if params has numberOfSignatures > 64', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, numberOfSignatures: 100, - mandatoryKeys: [utils.getRandomBytes(32)], - optionalKeys: [utils.getRandomBytes(32)], - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -82,6 +82,7 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); expect(result.error?.message).toInclude('must be <= 64'); @@ -89,11 +90,10 @@ describe('Register Multisignature command', () => { it('should return error if params has numberOfSignatures < 1', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, numberOfSignatures: 0, - mandatoryKeys: [utils.getRandomBytes(32)], - optionalKeys: [utils.getRandomBytes(32)], - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -102,6 +102,7 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); expect(result.error?.message).toInclude('must be >= 1'); @@ -109,11 +110,10 @@ describe('Register Multisignature command', () => { it('should return error if params has more than 64 mandatory keys', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, + ...decodedParams, mandatoryKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), - optionalKeys: [], - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -122,6 +122,7 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); expect(result.error?.message).toInclude('must NOT have more than 64 items'); @@ -129,11 +130,10 @@ describe('Register Multisignature command', () => { it('should return error if params mandatory keys contains items with length bigger than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, + ...decodedParams, mandatoryKeys: [utils.getRandomBytes(32), utils.getRandomBytes(64)], - optionalKeys: [], - signatures: [utils.getRandomBytes(64), utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -142,6 +142,7 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); expect(result.error?.message).toInclude("Property '.mandatoryKeys.1' maxLength exceeded"); @@ -149,11 +150,10 @@ describe('Register Multisignature command', () => { it('should return error if params mandatory keys contains items with length smaller than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, + ...decodedParams, mandatoryKeys: [utils.getRandomBytes(10), utils.getRandomBytes(32)], - optionalKeys: [], - signatures: [utils.getRandomBytes(64), utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -162,17 +162,18 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('minLength not satisfied'); }); it('should return error if params optional keys contains items with length bigger than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 1, - mandatoryKeys: [], + ...decodedParams, optionalKeys: [utils.getRandomBytes(64)], - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -181,17 +182,18 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('maxLength exceeded'); }); it('should return error if params optional keys contains items with length smaller than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 1, - mandatoryKeys: [], + ...decodedParams, optionalKeys: [utils.getRandomBytes(31)], - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -200,17 +202,18 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('minLength not satisfied'); }); it('should return error if params has more than 64 optional keys', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, - mandatoryKeys: [], + ...decodedParams, optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -219,16 +222,19 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('must NOT have more than 64 items'); }); it('should return error when there are duplicated mandatory keys', async () => { + const publicKey = utils.getRandomBytes(32); const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - mandatoryKeys: [decodedParams.mandatoryKeys[0], decodedParams.mandatoryKeys[0]], - signatures: [utils.getRandomBytes(64)], + mandatoryKeys: [publicKey, publicKey], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -239,15 +245,17 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe('MandatoryKeys contains duplicate public keys.'); }); it('should return error when there are duplicated optional keys', async () => { + const publicKey = utils.getRandomBytes(32); const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - optionalKeys: [decodedParams.optionalKeys[0], decodedParams.optionalKeys[0]], - signatures: [utils.getRandomBytes(64)], + optionalKeys: [publicKey, publicKey], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -258,6 +266,7 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe('OptionalKeys contains duplicate public keys.'); }); @@ -265,8 +274,8 @@ describe('Register Multisignature command', () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, numberOfSignatures: 5, - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -275,7 +284,9 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe( 'The numberOfSignatures is bigger than the count of Mandatory and Optional keys.', ); @@ -287,6 +298,7 @@ describe('Register Multisignature command', () => { numberOfSignatures: 1, signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -295,7 +307,9 @@ describe('Register Multisignature command', () => { .createCommandVerifyContext( registerMultisignatureParamsSchema, ); + const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe( 'The numberOfSignatures needs to be equal or bigger than the number of Mandatory keys.', ); @@ -303,16 +317,10 @@ describe('Register Multisignature command', () => { it('should return error when mandatory and optional key sets are not disjointed', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, - mandatoryKeys: [ - Buffer.from('48e041ae61a32777c899c1f1b0a9588bdfe939030613277a39556518cc66d371', 'hex'), - Buffer.from('483077a8b23208f2fd85dacec0fbb0b590befea0a1fcd76a5b43f33063aaa180', 'hex'), - ], - optionalKeys: [ - Buffer.from('483077a8b23208f2fd85dacec0fbb0b590befea0a1fcd76a5b43f33063aaa180', 'hex'), - ], - signatures: [utils.getRandomBytes(64)], + ...decodedParams, + optionalKeys: [keyPairs[0].publicKey, keyPairs[2].publicKey], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -323,6 +331,7 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe( 'Invalid combination of Mandatory and Optional keys. Repeated keys across Mandatory and Optional were found.', ); @@ -331,9 +340,9 @@ describe('Register Multisignature command', () => { it('should return error when mandatory keys set is not sorted', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - numberOfSignatures: 2, - mandatoryKeys: [decodedParams.mandatoryKeys[1], decodedParams.mandatoryKeys[0]], + mandatoryKeys: [keyPairs[1].publicKey, keyPairs[0].publicKey], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -344,15 +353,16 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe('Mandatory keys should be sorted lexicographically.'); }); it('should return error when optional keys set is not sorted', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - numberOfSignatures: 2, - optionalKeys: [decodedParams.optionalKeys[1], decodedParams.optionalKeys[0]], + optionalKeys: [keyPairs[3].publicKey, keyPairs[2].publicKey], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -363,16 +373,17 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toBe('Optional keys should be sorted lexicographically.'); }); it('should return error when the number of optional and mandatory keys is more than 64', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - numberOfSignatures: 2, + ...decodedParams, optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), mandatoryKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), - signatures: [utils.getRandomBytes(64)], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -383,16 +394,18 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('must NOT have more than 64 item'); }); - it('should return error when the number of optional and mandatory keys is less than 1', async () => { + it('should return error when no keys are provided', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { optionalKeys: [], mandatoryKeys: [], numberOfSignatures: 0, - signatures: [utils.getRandomBytes(64)], + signatures: [], }); + const context = testing .createTransactionContext({ transaction: new Transaction({ ...transaction.toObject(), params }), @@ -403,6 +416,7 @@ describe('Register Multisignature command', () => { ); const result = await registerMultisignatureCommand.verify(context); + expect(result.error?.message).toInclude('must be >= 1'); }); }); @@ -412,23 +426,10 @@ describe('Register Multisignature command', () => { beforeEach(() => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - authStore = authModule.stores.get(AuthAccountStore); + authAccountStore = authModule.stores.get(AuthAccountStore); }); - it('should not throw when registering for first time', async () => { - await authStore.set( - { - getStore: (storePrefix: Buffer, substorePrefix: Buffer) => - stateStore.getStore(storePrefix, substorePrefix), - }, - transaction.senderAddress, - { - optionalKeys: [], - mandatoryKeys: [], - numberOfSignatures: 0, - nonce: BigInt(0), - }, - ); + it('should not throw when registering for the first time and signatures are valid', async () => { const context = testing .createTransactionContext({ stateStore, @@ -439,16 +440,18 @@ describe('Register Multisignature command', () => { registerMultisignatureParamsSchema, ); - context.eventQueue = eventQueueMock; + await authAccountStore.set(context, transaction.senderAddress, defaultAuthAccount); + context.eventQueue = eventQueueMock; jest.spyOn(authModule.events.get(MultisignatureRegistrationEvent), 'log'); await expect(registerMultisignatureCommand.execute(context)).resolves.toBeUndefined(); - const updatedStore = authModule.stores.get(AuthAccountStore); - const updatedData = await updatedStore.get(context, transaction.senderAddress); - expect(updatedData.numberOfSignatures).toBe(decodedParams.numberOfSignatures); - expect(updatedData.mandatoryKeys).toEqual(decodedParams.mandatoryKeys); - expect(updatedData.optionalKeys).toEqual(decodedParams.optionalKeys); + + const authAccount = await authAccountStore.get(context, transaction.senderAddress); + + expect(authAccount.numberOfSignatures).toBe(decodedParams.numberOfSignatures); + expect(authAccount.mandatoryKeys).toEqual(decodedParams.mandatoryKeys); + expect(authAccount.optionalKeys).toEqual(decodedParams.optionalKeys); expect(authModule.events.get(MultisignatureRegistrationEvent).log).toHaveBeenCalledWith( expect.anything(), transaction.senderAddress, @@ -460,23 +463,13 @@ describe('Register Multisignature command', () => { ); }); - it('should throw when incorrect signature', async () => { - const buffer = Buffer.from(defaultTestCase.output.transaction, 'hex'); - const multiSignatureTx = Transaction.fromBytes(buffer); - const multiSignatureTxDecodedParams = codec.decode( - registerMultisignatureParamsSchema, - multiSignatureTx.params, - ); + it('should throw when the signature is incorrect', async () => { const invalidSignature = utils.getRandomBytes(64); - multiSignatureTxDecodedParams.signatures[0] = invalidSignature; + decodedParams.signatures[0] = invalidSignature; - const paramsBytes = codec.encode( - registerMultisignatureParamsSchema, - multiSignatureTxDecodedParams, - ); const invalidTransaction = new Transaction({ - ...multiSignatureTx.toObject(), - params: paramsBytes, + ...transaction.toObject(), + params: codec.encode(registerMultisignatureParamsSchema, decodedParams), }); const context = testing @@ -489,16 +482,10 @@ describe('Register Multisignature command', () => { registerMultisignatureParamsSchema, ); - await authStore.set(context, transaction.senderAddress, { - optionalKeys: [], - mandatoryKeys: [], - numberOfSignatures: 0, - nonce: BigInt(0), - }); - + await authAccountStore.set(context, transaction.senderAddress, defaultAuthAccount); context.eventQueue = eventQueueMock; - jest.spyOn(authModule.events.get(InvalidSignatureEvent), 'error'); + await expect(registerMultisignatureCommand.execute(context)).rejects.toThrow( `Invalid signature for public key ${context.params.mandatoryKeys[0].toString('hex')}.`, ); From bdc29bd74ab6362be82d8e06c5279bca2497aab2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Thu, 18 May 2023 04:19:45 +0800 Subject: [PATCH 033/170] Add missing Auth module unit tests (#8454) * Add missing unit tests for auth `verifyTransaction()`, multisig registration and util `verifySignature()` * Update framework/test/unit/modules/auth/register_multisignature.spec.ts --- .../test/unit/modules/auth/module.spec.ts | 65 ++++++- .../auth/register_multisignature.spec.ts | 182 ++++++++++++++++-- .../test/unit/modules/auth/utils.spec.ts | 52 +++++ 3 files changed, 272 insertions(+), 27 deletions(-) create mode 100644 framework/test/unit/modules/auth/utils.spec.ts diff --git a/framework/test/unit/modules/auth/module.spec.ts b/framework/test/unit/modules/auth/module.spec.ts index 29ce88b55a7..a46554454f4 100644 --- a/framework/test/unit/modules/auth/module.spec.ts +++ b/framework/test/unit/modules/auth/module.spec.ts @@ -365,8 +365,6 @@ describe('AuthModule', () => { signatures: [], }); - transaction.sign(chainID, keyPairs[1].privateKey); - when(authAccountStoreMock) .calledWith(singleSigAddress, authAccountSchema) .mockReturnValue({ @@ -378,6 +376,8 @@ describe('AuthModule', () => { }); it('should not throw for a valid transaction', async () => { + transaction.sign(chainID, keyPairs[1].privateKey); + const context = testing .createTransactionContext({ stateStore, @@ -391,9 +391,23 @@ describe('AuthModule', () => { }); }); - it('should throw if signature is missing', async () => { - transaction.signatures.pop(); + it('should throw if signature is invalid', async () => { + transaction.signatures.push(utils.getRandomBytes(64)); + const context = testing + .createTransactionContext({ + stateStore, + transaction, + chainID, + }) + .createTransactionVerifyContext(); + + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + 'Failed to validate signature', + ); + }); + + it('should throw if signature is missing', async () => { const context = testing .createTransactionContext({ stateStore, @@ -408,7 +422,8 @@ describe('AuthModule', () => { }); it('should throw error if account is not multi signature and more than one signature present', async () => { - transaction.signatures.push(transaction.signatures[0]); + transaction.sign(chainID, keyPairs[1].privateKey); + transaction.signatures.push(utils.getRandomBytes(64)); const context = testing .createTransactionContext({ @@ -466,7 +481,24 @@ describe('AuthModule', () => { transaction.sign(chainID, privateKeys.optional[0]); transaction.signatures.push(EMPTY_BUFFER); - const context = testing + let context = testing + .createTransactionContext({ + stateStore, + transaction, + chainID, + }) + .createTransactionVerifyContext(); + + await expect(authModule.verifyTransaction(context)).resolves.toEqual({ + status: VerifyStatus.OK, + }); + + // now do the same, but with the other optional signature present + transaction.signatures.splice(0, 2); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[1]); + + context = testing .createTransactionContext({ stateStore, transaction, @@ -579,6 +611,27 @@ describe('AuthModule', () => { ); }); + it('should throw when a transaction from 3-of-4 multisig account with 2 mandatory signers has only 2 optional signatures', async () => { + transaction.signatures.push(EMPTY_BUFFER); + transaction.signatures.push(EMPTY_BUFFER); + transaction.sign(chainID, privateKeys.optional[0]); + transaction.sign(chainID, privateKeys.optional[1]); + + const context = testing + .createTransactionContext({ + stateStore, + transaction, + chainID, + }) + .createTransactionVerifyContext(); + + await expect(authModule.verifyTransaction(context)).rejects.toThrow( + `Transaction signatures does not match required number of signatures: '3' for transaction with id '${transaction.id.toString( + 'hex', + )}'`, + ); + }); + it('should throw if a mandatory signature is absent', async () => { transaction.sign(chainID, privateKeys.mandatory[0]); transaction.signatures.push(EMPTY_BUFFER); diff --git a/framework/test/unit/modules/auth/register_multisignature.spec.ts b/framework/test/unit/modules/auth/register_multisignature.spec.ts index 4d1edc8814c..244c2e23532 100644 --- a/framework/test/unit/modules/auth/register_multisignature.spec.ts +++ b/framework/test/unit/modules/auth/register_multisignature.spec.ts @@ -53,7 +53,7 @@ describe('Register Multisignature command', () => { }); describe('verify', () => { - it('should return status OK for valid params', async () => { + it('should return status OK for valid params: 2 mandatory, 2 optional and all 4 required signatures present', async () => { const context = testing .createTransactionContext({ transaction, @@ -68,6 +68,78 @@ describe('Register Multisignature command', () => { expect(result.status).toBe(VerifyStatus.OK); }); + it('should return status OK for valid params: 2 mandatory keys, 0 optional keys and both 2 required signatures present', async () => { + const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, + optionalKeys: [], + numberOfSignatures: 2, + signatures: [decodedParams.signatures[0], decodedParams.signatures[1]], + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + + it('should return status OK for valid params: 0 mandatory keys, 2 optional keys and both 2 required signatures present', async () => { + const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, + mandatoryKeys: [], + numberOfSignatures: 2, + signatures: [decodedParams.signatures[2], decodedParams.signatures[3]], + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + + it('should return status OK when the total number of mandatory and optional keys is 64', async () => { + const mandatoryKeys = [...Array(20).keys()].map(() => utils.getRandomBytes(32)); + const optionalKeys = [...Array(44).keys()].map(() => utils.getRandomBytes(32)); + + mandatoryKeys.sort((a, b) => a.compare(b)); + optionalKeys.sort((a, b) => a.compare(b)); + + const params = codec.encode(registerMultisignatureParamsSchema, { + mandatoryKeys, + optionalKeys, + numberOfSignatures: 64, + signatures: [...Array(64).keys()].map(() => utils.getRandomBytes(64)), + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + it('should return error if params has numberOfSignatures > 64', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, @@ -128,10 +200,10 @@ describe('Register Multisignature command', () => { expect(result.error?.message).toInclude('must NOT have more than 64 items'); }); - it('should return error if params mandatory keys contains items with length bigger than 32 bytes', async () => { + it('should return error if params has more than 64 optional keys', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - mandatoryKeys: [utils.getRandomBytes(32), utils.getRandomBytes(64)], + optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), }); const context = testing @@ -145,13 +217,13 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude("Property '.mandatoryKeys.1' maxLength exceeded"); + expect(result.error?.message).toInclude('must NOT have more than 64 items'); }); - it('should return error if params mandatory keys contains items with length smaller than 32 bytes', async () => { + it('should return error if params mandatory keys contains items with length bigger than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - mandatoryKeys: [utils.getRandomBytes(10), utils.getRandomBytes(32)], + mandatoryKeys: [utils.getRandomBytes(32), utils.getRandomBytes(64)], }); const context = testing @@ -165,13 +237,13 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude('minLength not satisfied'); + expect(result.error?.message).toInclude("Property '.mandatoryKeys.1' maxLength exceeded"); }); - it('should return error if params optional keys contains items with length bigger than 32 bytes', async () => { + it('should return error if params mandatory keys contains items with length smaller than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - optionalKeys: [utils.getRandomBytes(64)], + mandatoryKeys: [utils.getRandomBytes(10), utils.getRandomBytes(32)], }); const context = testing @@ -185,13 +257,13 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude('maxLength exceeded'); + expect(result.error?.message).toInclude('minLength not satisfied'); }); - it('should return error if params optional keys contains items with length smaller than 32 bytes', async () => { + it('should return error if params optional keys contains items with length bigger than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - optionalKeys: [utils.getRandomBytes(31)], + optionalKeys: [utils.getRandomBytes(64)], }); const context = testing @@ -205,13 +277,13 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude('minLength not satisfied'); + expect(result.error?.message).toInclude('maxLength exceeded'); }); - it('should return error if params has more than 64 optional keys', async () => { + it('should return error if params optional keys contains items with length smaller than 32 bytes', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), + optionalKeys: [utils.getRandomBytes(31)], }); const context = testing @@ -225,7 +297,7 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude('must NOT have more than 64 items'); + expect(result.error?.message).toInclude('minLength not satisfied'); }); it('should return error when there are duplicated mandatory keys', async () => { @@ -377,11 +449,32 @@ describe('Register Multisignature command', () => { expect(result.error?.message).toBe('Optional keys should be sorted lexicographically.'); }); - it('should return error when the number of optional and mandatory keys is more than 64', async () => { + it('should return error when both mandatory and optional keys sets are not sorted', async () => { + const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, + mandatoryKeys: [keyPairs[1].publicKey, keyPairs[0].publicKey], + optionalKeys: [keyPairs[3].publicKey, keyPairs[2].publicKey], + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.error?.message).toBe('Mandatory keys should be sorted lexicographically.'); + }); + + it('should return error when the number of both mandatory and optional keys is more than 64 each', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { ...decodedParams, - optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), mandatoryKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), + optionalKeys: [...Array(65).keys()].map(() => utils.getRandomBytes(32)), }); const context = testing @@ -400,9 +493,30 @@ describe('Register Multisignature command', () => { it('should return error when no keys are provided', async () => { const params = codec.encode(registerMultisignatureParamsSchema, { - optionalKeys: [], + ...decodedParams, mandatoryKeys: [], - numberOfSignatures: 0, + optionalKeys: [], + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.error?.message).toBe( + 'The numberOfSignatures is bigger than the count of Mandatory and Optional keys.', + ); + }); + + it('should return error when no signatures are provided', async () => { + const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, signatures: [], }); @@ -417,7 +531,33 @@ describe('Register Multisignature command', () => { const result = await registerMultisignatureCommand.verify(context); - expect(result.error?.message).toInclude('must be >= 1'); + expect(result.error?.message).toBe( + 'The number of mandatory and optional keys should match the number of signatures', + ); + }); + + it('should return error when registering an account with 2 mandatory keys, but only 1 signature is present', async () => { + const params = codec.encode(registerMultisignatureParamsSchema, { + ...decodedParams, + optionalKeys: [], + numberOfSignatures: 2, + signatures: [utils.getRandomBytes(64)], + }); + + const context = testing + .createTransactionContext({ + transaction: new Transaction({ ...transaction.toObject(), params }), + chainID, + }) + .createCommandVerifyContext( + registerMultisignatureParamsSchema, + ); + + const result = await registerMultisignatureCommand.verify(context); + + expect(result.error?.message).toBe( + 'The number of mandatory and optional keys should match the number of signatures', + ); }); }); diff --git a/framework/test/unit/modules/auth/utils.spec.ts b/framework/test/unit/modules/auth/utils.spec.ts new file mode 100644 index 00000000000..3f5d5e00ddd --- /dev/null +++ b/framework/test/unit/modules/auth/utils.spec.ts @@ -0,0 +1,52 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils, ed } from '@liskhq/lisk-cryptography'; +import { Transaction, TAG_TRANSACTION } from '@liskhq/lisk-chain'; +import { verifySignature } from '../../../../src/modules/auth/utils'; + +describe('utils', () => { + describe('verifySignature', () => { + const chainID = Buffer.from('04000000', 'hex'); + + it('should verify a valid transaction signature', async () => { + const privateKey = await ed.getPrivateKeyFromPhraseAndPath('hello lisk', "m/44'/134'/0'"); + const publicKey = ed.getPublicKeyFromPrivateKey(privateKey); + + const transaction = new Transaction({ + module: 'token', + command: 'transfer', + nonce: BigInt('0'), + fee: BigInt('100000000'), + senderPublicKey: publicKey, + params: utils.getRandomBytes(100), + signatures: [], + }); + + const transactionSigningBytes = transaction.getSigningBytes(); + const signature = ed.signDataWithPrivateKey( + TAG_TRANSACTION, + chainID, + transactionSigningBytes, + privateKey, + ); + + transaction.signatures.push(signature); + + expect(() => + verifySignature(chainID, publicKey, signature, transactionSigningBytes, transaction.id), + ).not.toThrow(); + }); + }); +}); From 1d5d18d2b46129d0f33da1c56220b73248cd251e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Tue, 23 May 2023 23:01:11 +0800 Subject: [PATCH 034/170] Add missing unit tests for PoS module Stake command (#8462) * Clean up PoS Stake command unit tests * Add a test case of staker increasing an existing stake --- .../unit/modules/pos/commands/stake.spec.ts | 586 ++++++------------ 1 file changed, 176 insertions(+), 410 deletions(-) diff --git a/framework/test/unit/modules/pos/commands/stake.spec.ts b/framework/test/unit/modules/pos/commands/stake.spec.ts index c1103b6c53b..d9e2d4fd095 100644 --- a/framework/test/unit/modules/pos/commands/stake.spec.ts +++ b/framework/test/unit/modules/pos/commands/stake.spec.ts @@ -15,7 +15,7 @@ import { Transaction } from '@liskhq/lisk-chain'; import { codec } from '@liskhq/lisk-codec'; import { address, utils } from '@liskhq/lisk-cryptography'; -import { validator } from '@liskhq/lisk-validator'; +import { validator as schemaValidator } from '@liskhq/lisk-validator'; import { StakeCommand, VerifyStatus, PoSModule } from '../../../../../src'; import { MAX_NUMBER_PENDING_UNLOCKS, @@ -27,7 +27,11 @@ import { InternalMethod } from '../../../../../src/modules/pos/internal_method'; import { ValidatorAccount, ValidatorStore } from '../../../../../src/modules/pos/stores/validator'; import { EligibleValidatorsStore } from '../../../../../src/modules/pos/stores/eligible_validators'; import { StakerStore } from '../../../../../src/modules/pos/stores/staker'; -import { StakeObject, StakeTransactionParams } from '../../../../../src/modules/pos/types'; +import { + StakeObject, + StakerData, + StakeTransactionParams, +} from '../../../../../src/modules/pos/types'; import { EventQueue, MethodContext } from '../../../../../src/state_machine'; import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; @@ -83,18 +87,18 @@ describe('StakeCommand', () => { const validator1StakeAmount = liskToBeddows(90); const validator2StakeAmount = liskToBeddows(50); - let validatorInfo1: ValidatorAccount; - let validatorInfo2: ValidatorAccount; - let validatorInfo3: ValidatorAccount; + let defaultValidator: ValidatorAccount; + let validator1: ValidatorAccount; + let validator2: ValidatorAccount; + let validator3: ValidatorAccount; let stakerStore: StakerStore; let validatorStore: ValidatorStore; let context: any; let transaction: any; let command: StakeCommand; - let transactionParams: Buffer; let transactionParamsDecoded: any; let stateStore: PrefixedStateReadWriter; - let lockFn: any; + let tokenLockMock: jest.Mock; let tokenMethod: any; let internalMethod: InternalMethod; let mockAssignStakeRewards: jest.SpyInstance< @@ -108,9 +112,9 @@ describe('StakeCommand', () => { >; beforeEach(async () => { - lockFn = jest.fn(); + tokenLockMock = jest.fn(); tokenMethod = { - lock: lockFn, + lock: tokenLockMock, unlock: jest.fn(), getAvailableBalance: jest.fn(), burn: jest.fn(), @@ -136,11 +140,11 @@ describe('StakeCommand', () => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - validatorInfo1 = { + defaultValidator = { consecutiveMissedBlocks: 0, isBanned: false, lastGeneratedHeight: 5, - name: 'someValidator1', + name: 'defaultValidator', reportMisbehaviorHeights: [], selfStake: BigInt(0), totalStake: BigInt(0), @@ -149,43 +153,37 @@ describe('StakeCommand', () => { sharingCoefficients: [{ tokenID: Buffer.alloc(8), coefficient: Buffer.alloc(24) }], }; - validatorInfo2 = { - consecutiveMissedBlocks: 0, - isBanned: false, - lastGeneratedHeight: 5, + validator1 = { + ...defaultValidator, + name: 'someValidator1', + }; + + validator2 = { + ...defaultValidator, name: 'someValidator2', - reportMisbehaviorHeights: [], - selfStake: BigInt(0), - totalStake: BigInt(0), - commission: 0, - lastCommissionIncreaseHeight: 0, - sharingCoefficients: [{ tokenID: Buffer.alloc(8), coefficient: Buffer.alloc(24) }], }; - validatorInfo3 = { - consecutiveMissedBlocks: 0, - isBanned: false, - lastGeneratedHeight: 5, + validator3 = { + ...defaultValidator, name: 'someValidator3', - reportMisbehaviorHeights: [], - selfStake: BigInt(0), - totalStake: BigInt(0), - commission: 0, - lastCommissionIncreaseHeight: 0, - sharingCoefficients: [{ tokenID: Buffer.alloc(8), coefficient: Buffer.alloc(24) }], }; - validatorStore = pos.stores.get(ValidatorStore); - - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress2, validatorInfo2); - stakerStore = pos.stores.get(StakerStore); validatorStore = pos.stores.get(ValidatorStore); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress2, validatorInfo2); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress3, validatorInfo3); + await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validator1); + await validatorStore.set(createStoreGetter(stateStore), validatorAddress2, validator2); + await validatorStore.set(createStoreGetter(stateStore), validatorAddress3, validator3); + + transaction = new Transaction({ + module: 'pos', + command: 'stake', + fee: BigInt(1500000), + nonce: BigInt(0), + params: Buffer.alloc(0), + senderPublicKey, + signatures: [], + }); }); describe('constructor', () => { @@ -199,29 +197,13 @@ describe('StakeCommand', () => { }); describe('verify', () => { - beforeEach(() => { - transaction = new Transaction({ - module: 'pos', - command: 'stake', - fee: BigInt(1500000), - nonce: BigInt(0), - params: Buffer.alloc(0), - senderPublicKey: utils.getRandomBytes(32), - signatures: [], - }); - }); - describe('schema validation', () => { describe('when transaction.params.stakes does not include any stake', () => { beforeEach(() => { transactionParamsDecoded = { stakes: [], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -246,11 +228,7 @@ describe('StakeCommand', () => { amount: liskToBeddows(0), })), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -266,7 +244,7 @@ describe('StakeCommand', () => { }); describe('when transaction.params.stakes includes amount which is less than int64 range', () => { - beforeEach(() => { + it('should return errors', () => { transactionParamsDecoded = { stakes: [ { @@ -275,17 +253,15 @@ describe('StakeCommand', () => { }, ], }; - }); - it('should return errors', () => { - expect(() => validator.validate(command.schema, transactionParamsDecoded)).toThrow( + expect(() => schemaValidator.validate(command.schema, transactionParamsDecoded)).toThrow( 'should pass "dataType" keyword validation', ); }); }); describe('when transaction.params.stakes includes amount which is greater than int64 range', () => { - beforeEach(() => { + it('should return errors', () => { transactionParamsDecoded = { stakes: [ { @@ -294,10 +270,8 @@ describe('StakeCommand', () => { }, ], }; - }); - it('should return errors', () => { - expect(() => validator.validate(command.schema, transactionParamsDecoded)).toThrow( + expect(() => schemaValidator.validate(command.schema, transactionParamsDecoded)).toThrow( 'should pass "dataType" keyword validation', ); }); @@ -306,81 +280,57 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes contains valid contents', () => { it('should not throw errors with valid upstake case', async () => { - // Arrange transactionParamsDecoded = { stakes: [{ validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(20) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty('status', VerifyStatus.OK); }); it('should not throw errors with valid downstake cast', async () => { - // Arrange transactionParamsDecoded = { stakes: [{ validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(-20) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty('status', VerifyStatus.OK); }); it('should not throw errors with valid mixed stakes case', async () => { - // Arrange transactionParamsDecoded = { stakes: [ { validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(20) }, { validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(-20) }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty('status', VerifyStatus.OK); }); }); describe('when transaction.params.stakes contains more than 10 positive stakes', () => { it('should throw error', async () => { - // Arrange transactionParamsDecoded = { stakes: Array(11) .fill(0) .map(() => ({ validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(10) })), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Upstake can only be casted up to 10.', @@ -390,7 +340,6 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes contains more than 10 negative stakes', () => { it('should throw error', async () => { - // Arrange transactionParamsDecoded = { stakes: Array(11) .fill(0) @@ -399,16 +348,11 @@ describe('StakeCommand', () => { amount: liskToBeddows(-10), })), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Downstake can only be casted up to 10.', @@ -418,23 +362,15 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes includes duplicate validators within positive amount', () => { it('should throw error', async () => { - // Arrange const validatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { - stakes: Array(2) - .fill(0) - .map(() => ({ validatorAddress, amount: liskToBeddows(10) })), + stakes: Array(2).fill({ validatorAddress, amount: liskToBeddows(10) }), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Validator address must be unique.', @@ -444,7 +380,6 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes includes duplicate validators within positive and negative amount', () => { it('should throw error', async () => { - // Arrange const validatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { stakes: [ @@ -452,16 +387,11 @@ describe('StakeCommand', () => { { validatorAddress, amount: liskToBeddows(-10) }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Validator address must be unique.', @@ -471,21 +401,15 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes includes zero amount', () => { it('should throw error', async () => { - // Arrange const validatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { stakes: [{ validatorAddress, amount: liskToBeddows(0) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Amount cannot be 0.', @@ -495,21 +419,15 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes includes positive amount which is not multiple of 10 * 10^8', () => { it('should throw an error', async () => { - // Arrange const validatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { stakes: [{ validatorAddress, amount: BigInt(20) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Amount should be multiple of 10 * 10^8.', @@ -519,21 +437,15 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes includes negative amount which is not multiple of 10 * 10^8', () => { it('should throw error', async () => { - // Arrange const validatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { stakes: [{ validatorAddress, amount: BigInt(-20) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, }).createCommandVerifyContext(command.schema); - // Assert await expect(command.verify(context)).resolves.toHaveProperty( 'error.message', 'Amount should be multiple of 10 * 10^8.', @@ -543,34 +455,17 @@ describe('StakeCommand', () => { }); describe('execute', () => { - beforeEach(() => { - transaction = new Transaction({ - module: 'pos', - command: 'stake', - fee: BigInt(1500000), - nonce: BigInt(0), - params: transactionParams, - senderPublicKey, - signatures: [], - }); - }); describe('when transaction.params.stakes contain positive amount', () => { it('should emit ValidatorStakedEvent with STAKE_SUCCESSFUL result', async () => { - // Arrange transactionParamsDecoded = { stakes: [{ validatorAddress: validatorAddress1, amount: liskToBeddows(10) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).resolves.toBeUndefined(); checkEventResult( @@ -588,39 +483,28 @@ describe('StakeCommand', () => { }); it('should throw error if stake amount is more than balance', async () => { - // Arrange transactionParamsDecoded = { stakes: [{ validatorAddress: utils.getRandomBytes(20), amount: liskToBeddows(100) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - lockFn.mockRejectedValue(new Error('Not enough balance to lock')); + tokenLockMock.mockRejectedValue(new Error('Not enough balance to lock')); - // Assert await expect(command.execute(context)).rejects.toThrow(); }); it('should make account to have correct balance', async () => { - // Arrange transactionParamsDecoded = { stakes: [ { validatorAddress: validatorAddress1, amount: validator1StakeAmount }, { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -628,16 +512,15 @@ describe('StakeCommand', () => { await command.execute(context); - // Assert - expect(lockFn).toHaveBeenCalledTimes(2); - expect(lockFn).toHaveBeenCalledWith( + expect(tokenLockMock).toHaveBeenCalledTimes(2); + expect(tokenLockMock).toHaveBeenCalledWith( expect.anything(), senderAddress, MODULE_NAME_POS, posTokenID, validator1StakeAmount, ); - expect(lockFn).toHaveBeenCalledWith( + expect(tokenLockMock).toHaveBeenCalledWith( expect.anything(), senderAddress, MODULE_NAME_POS, @@ -647,20 +530,10 @@ describe('StakeCommand', () => { }); it('should not change pendingUnlocks', async () => { - // Arrange - stakerStore = pos.stores.get(StakerStore); - validatorStore = pos.stores.get(ValidatorStore); - - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); - transactionParamsDecoded = { stakes: [{ validatorAddress: validatorAddress1, amount: validator1StakeAmount }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -673,29 +546,17 @@ describe('StakeCommand', () => { senderAddress, ); - // Assert expect(pendingUnlocks).toHaveLength(0); }); - it('should order stakerData.sentStakes', async () => { - // Arrange - stakerStore = pos.stores.get(StakerStore); - validatorStore = pos.stores.get(ValidatorStore); - - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress2, validatorInfo2); - + it('should order stakerData.stakes', async () => { transactionParamsDecoded = { stakes: [ { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, { validatorAddress: validatorAddress1, amount: validator1StakeAmount }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -705,31 +566,20 @@ describe('StakeCommand', () => { const { stakes } = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - const sentStakesCopy = stakes.slice(0); - sentStakesCopy.sort((a: any, b: any) => a.validatorAddress.compare(b.validatorAddress)); + const stakesCopy = stakes.slice(0); + stakesCopy.sort((a: any, b: any) => a.validatorAddress.compare(b.validatorAddress)); - // Assert - expect(stakes).toStrictEqual(sentStakesCopy); + expect(stakes).toStrictEqual(stakesCopy); }); - it('should make upstaked validator account to have correct totalStakeReceived', async () => { - // Arrange - validatorStore = pos.stores.get(ValidatorStore); - - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress2, validatorInfo2); - + it('should correctly update validator totalStake when a staker is upstaking for the first time', async () => { transactionParamsDecoded = { stakes: [ { validatorAddress: validatorAddress1, amount: validator1StakeAmount }, { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -737,40 +587,77 @@ describe('StakeCommand', () => { await command.execute(context); - const { totalStake: totalStakeReceived1 } = await validatorStore.get( + const { totalStake: totalStake1 } = await validatorStore.get( createStoreGetter(stateStore), validatorAddress1, ); - const { totalStake: totalStakeReceived2 } = await validatorStore.get( + const { totalStake: totalStake2 } = await validatorStore.get( createStoreGetter(stateStore), validatorAddress2, ); - // Assert - expect(totalStakeReceived1).toBe(validator1StakeAmount); - expect(totalStakeReceived2).toBe(validator2StakeAmount); + expect(totalStake1).toBe(validator1StakeAmount); + expect(totalStake2).toBe(validator2StakeAmount); }); - it('should update stake object when it exists before and create if it does not exist', async () => { - // Arrange - stakerStore = pos.stores.get(StakerStore); - validatorStore = pos.stores.get(ValidatorStore); + it("should increase staker's stakes.amount and validator's totalStake when an existing staker further increases their stake", async () => { + const previousStakeAmount = liskToBeddows(120); + const newStakeAmount = liskToBeddows(88); + + const validatorAccount: ValidatorAccount = { + ...validator1, + totalStake: previousStakeAmount, + selfStake: liskToBeddows(50), + }; + const stakerData: StakerData = { + stakes: [ + { + validatorAddress: validatorAddress1, + amount: previousStakeAmount, + sharingCoefficients: validatorAccount.sharingCoefficients, + }, + ], + pendingUnlocks: [], + }; + + await stakerStore.set(createStoreGetter(stateStore), senderAddress, stakerData); + await validatorStore.set( + createStoreGetter(stateStore), + validatorAddress1, + validatorAccount, + ); - await validatorStore.set(createStoreGetter(stateStore), validatorAddress1, validatorInfo1); transactionParamsDecoded = { - stakes: [{ validatorAddress: validatorAddress1, amount: validator1StakeAmount }], + stakes: [{ validatorAddress: validatorAddress1, amount: newStakeAmount }], }; + transaction.params = codec.encode(command.schema, transactionParamsDecoded); + context = createTransactionContext({ + transaction, + stateStore, + }).createCommandExecuteContext(command.schema); + + await command.execute(context); - transactionParams = codec.encode(command.schema, transactionParamsDecoded); + const { totalStake } = await validatorStore.get( + createStoreGetter(stateStore), + validatorAddress1, + ); + const { stakes } = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - transaction.params = transactionParams; + expect(totalStake).toBe(previousStakeAmount + newStakeAmount); + expect(stakes[0].amount).toBe(previousStakeAmount + newStakeAmount); + }); + it('should create a new entry in staker store, when a new staker upstakes', async () => { + transactionParamsDecoded = { + stakes: [{ validatorAddress: validatorAddress1, amount: validator1StakeAmount }], + }; + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect( stakerStore.get(createStoreGetter(stateStore), senderAddress), ).rejects.toThrow(); @@ -785,7 +672,7 @@ describe('StakeCommand', () => { }); }); - describe('when transaction.params.stakes contain negative amount which makes stakerStore.sentStakes to be 0 entries', () => { + describe('when transaction.params.stakes contain negative amount which decreases StakerData.stakes[x].amount to 0', () => { beforeEach(async () => { transactionParamsDecoded = { stakes: [ @@ -793,11 +680,7 @@ describe('StakeCommand', () => { { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -814,11 +697,7 @@ describe('StakeCommand', () => { { validatorAddress: validatorAddress2, amount: validator2StakeAmount * BigInt(-1) }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -827,7 +706,7 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - lockFn.mockClear(); + tokenLockMock.mockClear(); }); it('should emit ValidatorStakedEvent with STAKE_SUCCESSFUL result', async () => { @@ -850,25 +729,18 @@ describe('StakeCommand', () => { }); it('should not change account balance', async () => { - // Act await command.execute(context); - // Assert - expect(lockFn).toHaveBeenCalledTimes(0); + expect(tokenLockMock).toHaveBeenCalledTimes(0); }); it('should remove stake which has zero amount', async () => { - // Arrange transactionParamsDecoded = { stakes: [ { validatorAddress: validatorAddress1, amount: validator1StakeAmount * BigInt(-1) }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -878,23 +750,17 @@ describe('StakeCommand', () => { const stakerData = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - // Assert expect(stakerData.stakes).toHaveLength(1); expect(stakerData.stakes[0].validatorAddress).not.toEqual(validatorAddress1); }); it('should update stake which has non-zero amount', async () => { - // Arrange const downStakeAmount = liskToBeddows(10); transactionParamsDecoded = { stakes: [{ validatorAddress: validatorAddress1, amount: downStakeAmount * BigInt(-1) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -904,9 +770,8 @@ describe('StakeCommand', () => { const stakerData = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - // Assert expect( - stakerData.stakes.find((v: any) => v.validatorAddress.equals(validatorAddress1)), + stakerData.stakes.find(validator => validator.validatorAddress.equals(validatorAddress1)), ).toEqual({ validatorAddress: validatorAddress1, amount: validator1StakeAmount - downStakeAmount, @@ -915,12 +780,10 @@ describe('StakeCommand', () => { }); it('should make account to have correct unlocking', async () => { - // Arrange await command.execute(context); const stakerData = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - // Assert expect(stakerData.pendingUnlocks).toHaveLength(2); expect(stakerData.pendingUnlocks).toEqual( [ @@ -939,20 +802,17 @@ describe('StakeCommand', () => { }); it('should order stakerData.pendingUnlocks', async () => { - // Arrange await command.execute(context); const stakerData = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - // Assert expect(stakerData.pendingUnlocks).toHaveLength(2); expect(stakerData.pendingUnlocks.map((d: any) => d.validatorAddress)).toEqual( [validatorAddress1, validatorAddress2].sort((a, b) => a.compare(b)), ); }); - it('should make downstaked validator account to have correct totalStakeReceived', async () => { - // Arrange + it('should make downstaked validator account to have correct totalStake', async () => { await command.execute(context); const validatorData1 = await validatorStore.get( @@ -964,29 +824,22 @@ describe('StakeCommand', () => { validatorAddress2, ); - // Assert expect(validatorData1.totalStake).toEqual(BigInt(0)); expect(validatorData2.totalStake).toEqual(BigInt(0)); }); it('should throw error and emit ValidatorStakedEvent with STAKE_FAILED_INVALID_UNSTAKE_PARAMETERS result when downstaked validator is not already upstaked', async () => { - // Arrange const downStakeAmount = liskToBeddows(10); transactionParamsDecoded = { stakes: [{ validatorAddress: validatorAddress3, amount: downStakeAmount * BigInt(-1) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow( 'Cannot cast downstake to validator who is not upstaked.', ); @@ -1016,11 +869,7 @@ describe('StakeCommand', () => { { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1037,11 +886,7 @@ describe('StakeCommand', () => { { validatorAddress: validatorAddress2, amount: negativeStakeValidator2 }, ].sort((a, b) => -1 * a.validatorAddress.compare(b.validatorAddress)), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1050,7 +895,7 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - lockFn.mockClear(); + tokenLockMock.mockClear(); }); it('should assign reward to staker for downstake and upstake for already staked validator', async () => { @@ -1059,7 +904,7 @@ describe('StakeCommand', () => { expect(mockAssignStakeRewards).toHaveBeenCalledTimes(2); }); - it('should assign sharingCoefficients of the validator to the corresponding sentStake of the staker for that validator', async () => { + it('should assign sharingCoefficients of the validator to the corresponding stake of the staker for that validator', async () => { const sharingCoefficients = [ { tokenID: Buffer.alloc(8), @@ -1071,15 +916,6 @@ describe('StakeCommand', () => { }, ]; - const validator1 = await validatorStore.get( - createStoreGetter(stateStore), - validatorAddress1, - ); - const validator2 = await validatorStore.get( - createStoreGetter(stateStore), - validatorAddress2, - ); - validator1.sharingCoefficients = sharingCoefficients; validator2.sharingCoefficients = sharingCoefficients; @@ -1091,11 +927,11 @@ describe('StakeCommand', () => { const { stakes } = await stakerStore.get(createStoreGetter(stateStore), senderAddress); expect( - stakes.find(sentStake => sentStake.validatorAddress.equals(validatorAddress1)) + stakes.find(stake => stake.validatorAddress.equals(validatorAddress1)) ?.sharingCoefficients, ).toEqual(sharingCoefficients); expect( - stakes.find(sentStake => sentStake.validatorAddress.equals(validatorAddress2)) + stakes.find(stake => stake.validatorAddress.equals(validatorAddress2)) ?.sharingCoefficients, ).toEqual(sharingCoefficients); }); @@ -1104,11 +940,7 @@ describe('StakeCommand', () => { transactionParamsDecoded = { stakes: [{ validatorAddress: validatorAddress3, amount: positiveStakeValidator1 }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1124,19 +956,18 @@ describe('StakeCommand', () => { const validatorAddress = utils.getRandomBytes(20); const selfStake = BigInt(2) + BigInt(defaultConfig.minWeightStandby); - const validatorInfo = { - ...validatorInfo1, + const validator = { + ...validator1, selfStake, totalStake: BigInt(1) + BigInt(100) * BigInt(defaultConfig.minWeightStandby), }; const expectedWeight = BigInt(10) * selfStake; - await validatorStore.set(createStoreGetter(stateStore), validatorAddress, validatorInfo); + await validatorStore.set(createStoreGetter(stateStore), validatorAddress, validator); + transactionParamsDecoded = { stakes: [{ validatorAddress, amount: positiveStakeValidator1 }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - transaction.params = transactionParams; + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1159,9 +990,7 @@ describe('StakeCommand', () => { transactionParamsDecoded = { stakes: [{ validatorAddress, amount: BigInt(-2) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - transaction.params = transactionParams; + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1181,12 +1010,10 @@ describe('StakeCommand', () => { }); it('should make staker to have correct balance', async () => { - // Arrange await command.execute(context); - // Assert - expect(lockFn).toHaveBeenCalledTimes(1); - expect(lockFn).toHaveBeenCalledWith( + expect(tokenLockMock).toHaveBeenCalledTimes(1); + expect(tokenLockMock).toHaveBeenCalledWith( expect.anything(), senderAddress, MODULE_NAME_POS, @@ -1196,11 +1023,9 @@ describe('StakeCommand', () => { }); it('should make staker to have correct unlocking', async () => { - // Arrange await command.execute(context); const stakerData = await stakerStore.get(createStoreGetter(stateStore), senderAddress); - // Assert expect(stakerData.pendingUnlocks).toHaveLength(1); expect(stakerData.pendingUnlocks).toEqual([ { @@ -1211,21 +1036,20 @@ describe('StakeCommand', () => { ]); }); - it('should make upstaked validator account to have correct totalStakeReceived', async () => { - // Arrange + it('should make upstaked validator account to have correct totalStake', async () => { await command.execute(context); - const validatorData1 = await validatorStore.get( + const updatedValidator1 = await validatorStore.get( createStoreGetter(stateStore), validatorAddress1, ); - // Assert - expect(validatorData1.totalStake).toEqual(validator1StakeAmount + positiveStakeValidator1); + expect(updatedValidator1.totalStake).toEqual( + validator1StakeAmount + positiveStakeValidator1, + ); }); - it('should make downstaked validator account to have correct totalStakeReceived', async () => { - // Arrange + it('should make downstaked validator account to have correct totalStake', async () => { await command.execute(context); const validatorData2 = await validatorStore.get( @@ -1233,7 +1057,6 @@ describe('StakeCommand', () => { validatorAddress2, ); - // Assert expect(validatorData2.totalStake).toEqual(validator2StakeAmount + negativeStakeValidator2); }); }); @@ -1246,11 +1069,7 @@ describe('StakeCommand', () => { { validatorAddress: validatorAddress2, amount: validator2StakeAmount }, ].sort((a, b) => -1 * a.validatorAddress.compare(b.validatorAddress)), }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1259,23 +1078,18 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - lockFn.mockClear(); + tokenLockMock.mockClear(); }); describe('when transaction.params.stakes contain validator address which is not registered', () => { it('should throw error and emit ValidatorStakedEevnt with STAKE_FAILED_NON_REGISTERED_VALIDATOR failure', async () => { - // Arrange const nonExistingValidatorAddress = utils.getRandomBytes(20); transactionParamsDecoded = { ...transactionParamsDecoded, stakes: [{ validatorAddress: nonExistingValidatorAddress, amount: liskToBeddows(76) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1284,7 +1098,6 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow( 'Invalid stake: no registered validator with the specified address', ); @@ -1304,9 +1117,8 @@ describe('StakeCommand', () => { }); }); - describe('when transaction.params.stakes positive amount makes stakerData.sentStakes entries more than 10', () => { + describe('when transaction.params.stakes positive amount makes StakerData.stakes array contain more than 10 elements', () => { it('should throw error and emit ValidatorStakedEvent with STAKE_FAILED_TOO_MANY_SENT_STAKES failure', async () => { - // Arrange const stakes = []; for (let i = 0; i < 12; i += 1) { @@ -1337,17 +1149,12 @@ describe('StakeCommand', () => { } transactionParamsDecoded = { stakes }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow('Sender can only stake upto 10.'); checkEventResult( @@ -1365,9 +1172,8 @@ describe('StakeCommand', () => { }); }); - describe('when transaction.params.stakes negative amount decrease stakerData.sentStakes entries yet positive amount makes account exceeds more than 10', () => { + describe('when transaction.params.stakes negative amount decrease StakerData.stakes array entries, yet positive amount makes account exceeds more than 10', () => { it('should throw error and emit ValidatorStakedEvent with STAKE_FAILED_TOO_MANY_SENT_STAKES failure', async () => { - // Arrange const initialValidatorAmount = 8; const stakerData = await stakerStore.getOrDefault( createStoreGetter(stateStore), @@ -1452,17 +1258,12 @@ describe('StakeCommand', () => { // now we added 2 negative stakes and 3 new positive stakes // which will make total positive stakes to grow over 10 transactionParamsDecoded = { stakes }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow('Sender can only stake upto 10.'); checkEventResult( @@ -1482,7 +1283,6 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes has negative amount and makes stakerData.pendingUnlocks more than 20 entries', () => { it('should throw error and emit ValidatorStakedEvent with STAKE_FAILED_TOO_MANY_PENDING_UNLOCKS failure', async () => { - // Arrange const initialValidatorAmountForUnlocks = 19; const stakerData = await stakerStore.getOrDefault( createStoreGetter(stateStore), @@ -1569,17 +1369,12 @@ describe('StakeCommand', () => { // now we added 2 negative stakes // which will make total unlocking to grow over 20 transactionParamsDecoded = { stakes }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow( `Pending unlocks cannot exceed ${MAX_NUMBER_PENDING_UNLOCKS.toString()}.`, ); @@ -1603,7 +1398,6 @@ describe('StakeCommand', () => { describe('when transaction.params.stakes negative amount exceeds the previously staked amount', () => { it('should throw error and emit ValidatorStakedEvent with STAKE_FAILED_INVALID_UNSTAKE_PARAMETERS', async () => { - // Arrange const stakerData = await stakerStore.getOrDefault( createStoreGetter(stateStore), senderAddress, @@ -1623,17 +1417,12 @@ describe('StakeCommand', () => { }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, }).createCommandExecuteContext(command.schema); - // Assert await expect(command.execute(context)).rejects.toThrow( 'The unstake amount exceeds the staked amount for this validator.', ); @@ -1664,7 +1453,7 @@ describe('StakeCommand', () => { selfStake = BigInt(20); const validatorInfo = { - ...validatorInfo1, + ...validator1, totalStake, selfStake, }; @@ -1673,11 +1462,7 @@ describe('StakeCommand', () => { transactionParamsDecoded = { stakes: [{ validatorAddress: senderAddress, amount: senderStakeAmountPositive }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1686,11 +1471,10 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - lockFn.mockClear(); + tokenLockMock.mockClear(); }); - it('should update stakes and totalStakeReceived', async () => { - // Act & Assign + it('should update stakes and totalStake', async () => { await command.execute(context); const validatorData = await validatorStore.get( @@ -1701,10 +1485,10 @@ describe('StakeCommand', () => { createStoreGetter(stateStore), senderAddress, ); - // Assert + expect(validatorData.totalStake).toEqual(totalStake + senderStakeAmountPositive); expect(stakerData.stakes).toHaveLength(1); - expect(lockFn).toHaveBeenCalledWith( + expect(tokenLockMock).toHaveBeenCalledWith( expect.anything(), senderAddress, MODULE_NAME_POS, @@ -1713,21 +1497,19 @@ describe('StakeCommand', () => { ); }); - it('should change validatorData.selfStake and totalStakeReceived with positive stake', async () => { - // Act & Assign + it('should change validatorData.selfStake and totalStake with positive stake', async () => { await command.execute(context); const validatorData = await validatorStore.get( createStoreGetter(stateStore), senderAddress, ); - // Assert + expect(validatorData.totalStake).toEqual(totalStake + senderStakeAmountPositive); expect(validatorData.selfStake).toEqual(selfStake + senderStakeAmountPositive); }); - it('should change validatorData.selfStake, totalStakeReceived and unlocking with negative stake', async () => { - // Act & Assign + it('should change validatorData.selfStake, totalStake and unlocking with negative stake', async () => { await command.execute(context); transactionParamsDecoded = { @@ -1735,11 +1517,7 @@ describe('StakeCommand', () => { { validatorAddress: senderAddress, amount: senderStakeAmountNegative * BigInt(-1) }, ], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1759,7 +1537,6 @@ describe('StakeCommand', () => { senderAddress, ); - // Assert expect(validatorData.totalStake).toEqual( totalStake + senderStakeAmountPositive - senderStakeAmountNegative, ); @@ -1815,11 +1592,7 @@ describe('StakeCommand', () => { transactionParamsDecoded = { stakes: [{ validatorAddress, amount: senderStakeAmountPositive }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1828,34 +1601,28 @@ describe('StakeCommand', () => { } as any, }).createCommandExecuteContext(command.schema); - lockFn.mockClear(); + tokenLockMock.mockClear(); }); - it('should not change validatorData.selfStake but should update totalStakeReceived with positive stake', async () => { - // Act & Assign + it('should not change validatorData.selfStake but should update totalStake with positive stake', async () => { await command.execute(context); const validatorData = await validatorStore.get( createStoreGetter(stateStore), validatorAddress, ); - // Assert + expect(validatorData.totalStake).toEqual(senderStakeAmountPositive + validatorSelfStake); expect(validatorData.selfStake).toEqual(validatorSelfStake); }); - it('should not change validatorData.selfStake but should change totalStakeReceived and unlocking with negative stake', async () => { - // Act & Assign + it('should not change validatorData.selfStake but should change totalStake and unlocking with negative stake', async () => { await command.execute(context); transactionParamsDecoded = { stakes: [{ validatorAddress, amount: senderStakeAmountNegative * BigInt(-1) }], }; - - transactionParams = codec.encode(command.schema, transactionParamsDecoded); - - transaction.params = transactionParams; - + transaction.params = codec.encode(command.schema, transactionParamsDecoded); context = createTransactionContext({ transaction, stateStore, @@ -1875,7 +1642,6 @@ describe('StakeCommand', () => { senderAddress, ); - // Assert expect(validatorData.totalStake).toEqual( senderStakeAmountPositive - senderStakeAmountNegative + validatorSelfStake, ); From 3fe5e43b51abede8e38ed902d4bae02d1144d5fa Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 26 May 2023 08:57:21 +0200 Subject: [PATCH 035/170] Adds Stores for NFT Module (#8488) * :seedling: Adds stores for NFT module * :seedling: Adds getKey method to UserStore for NFT module * :bug: Fixes required property in schema for NFT module's UserStore * :recycle: Updates parameter name for NFT module's UserStore#getKey --- framework/src/modules/nft/constants.ts | 26 +++++ framework/src/modules/nft/stores/.gitkeep | 0 framework/src/modules/nft/stores/escrow.ts | 28 +++++ framework/src/modules/nft/stores/nft.ts | 73 ++++++++++++ .../src/modules/nft/stores/supported_nfts.ts | 62 ++++++++++ framework/src/modules/nft/stores/user.ts | 42 +++++++ .../test/unit/modules/nft/stores/nft.spec.ts | 106 ++++++++++++++++++ .../modules/nft/stores/supported_nfts.spec.ts | 65 +++++++++++ .../test/unit/modules/nft/stores/user.spec.ts | 34 ++++++ 9 files changed, 436 insertions(+) delete mode 100644 framework/src/modules/nft/stores/.gitkeep create mode 100644 framework/src/modules/nft/stores/escrow.ts create mode 100644 framework/src/modules/nft/stores/nft.ts create mode 100644 framework/src/modules/nft/stores/supported_nfts.ts create mode 100644 framework/src/modules/nft/stores/user.ts create mode 100644 framework/test/unit/modules/nft/stores/nft.spec.ts create mode 100644 framework/test/unit/modules/nft/stores/supported_nfts.spec.ts create mode 100644 framework/test/unit/modules/nft/stores/user.spec.ts diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index 206ba71de27..37f82833ed3 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -11,3 +11,29 @@ * * Removal or modification of this copyright notice is prohibited. */ + +export const LENGTH_CHAIN_ID = 4; +export const LENGTH_NFT_ID = 16; +export const LENGTH_COLLECTION_ID = 4; +export const MIN_LENGTH_MODULE_NAME = 1; +export const MAX_LENGTH_MODULE_NAME = 32; +export const LENGTH_ADDRESS = 20; + +export const enum NftEventResult { + SUCCESSFUL = 0, + NFT_DOES_NOT_EXIST = 1, + NFT_NOT_NATIVE = 2, + NFT_NOT_SUPPORTED = 3, + NFT_LOCKED = 4, + NFT_NOT_LOCKED = 5, + UNAUTHORIZED_UNLOCK = 6, + NFT_ESCROWED = 7, + NFT_NOT_ESCROWED = 8, + INITIATED_BY_NONNATIVE_CHAIN = 9, + INITIATED_BY_NONOWNER = 10, + RECOVER_FAIL_INVALID_INPUTS = 11, + INSUFFICIENT_BALANCE = 12, + DATA_TOO_LONG = 13, +} + +export type NFTErrorEventResult = Exclude; diff --git a/framework/src/modules/nft/stores/.gitkeep b/framework/src/modules/nft/stores/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/framework/src/modules/nft/stores/escrow.ts b/framework/src/modules/nft/stores/escrow.ts new file mode 100644 index 00000000000..719bf0b7fbe --- /dev/null +++ b/framework/src/modules/nft/stores/escrow.ts @@ -0,0 +1,28 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseStore } from '../../base_store'; + +export const escrowStoreSchema = { + $id: '/nft/store/escrow', + type: 'object', + required: [], + properties: {}, +}; + +type EscrowStoreData = Record; + +export class EscrowStore extends BaseStore { + public schema = escrowStoreSchema; +} diff --git a/framework/src/modules/nft/stores/nft.ts b/framework/src/modules/nft/stores/nft.ts new file mode 100644 index 00000000000..c1e8ce23244 --- /dev/null +++ b/framework/src/modules/nft/stores/nft.ts @@ -0,0 +1,73 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseStore, StoreGetter } from '../../base_store'; +import { MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from '../constants'; + +export interface NFTStoreData { + owner: Buffer; + attributesArray: { + module: string; + attributes: Buffer; + }[]; +} + +export const nftStoreSchema = { + $id: '/nft/store/nft', + type: 'object', + required: ['owner', 'attributesArray'], + properties: { + owner: { + dataType: 'bytes', + fieldNumber: 1, + }, + attributesArray: { + type: 'array', + fieldNumber: 2, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export class NFTStore extends BaseStore { + public schema = nftStoreSchema; + + public async save(context: StoreGetter, nftID: Buffer, data: NFTStoreData): Promise { + const attributesArray = data.attributesArray.filter( + attribute => attribute.attributes.length > 0, + ); + attributesArray.sort((a, b) => a.module.localeCompare(b.module, 'en')); + + await this.set(context, nftID, { + ...data, + attributesArray, + }); + } +} diff --git a/framework/src/modules/nft/stores/supported_nfts.ts b/framework/src/modules/nft/stores/supported_nfts.ts new file mode 100644 index 00000000000..e16dcb0838e --- /dev/null +++ b/framework/src/modules/nft/stores/supported_nfts.ts @@ -0,0 +1,62 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseStore, StoreGetter } from '../../base_store'; +import { LENGTH_COLLECTION_ID } from '../constants'; + +export interface SupportedNFTsStoreData { + supportedCollectionIDArray: { + collectionID: Buffer; + }[]; +} + +export const supportedNFTsStoreSchema = { + $id: '/nft/store/supportedNFTs', + type: 'object', + required: ['supportedCollectionIDArray'], + properties: { + supportedCollectionIDArray: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + required: ['collectionID'], + properties: { + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 1, + }, + }, + }, + }, + }, +}; + +export class SupportedNFTsStore extends BaseStore { + public schema = supportedNFTsStoreSchema; + + public async save( + context: StoreGetter, + chainID: Buffer, + data: SupportedNFTsStoreData, + ): Promise { + const supportedCollectionIDArray = data.supportedCollectionIDArray.sort((a, b) => + a.collectionID.compare(b.collectionID), + ); + + await this.set(context, chainID, { supportedCollectionIDArray }); + } +} diff --git a/framework/src/modules/nft/stores/user.ts b/framework/src/modules/nft/stores/user.ts new file mode 100644 index 00000000000..752b55abf21 --- /dev/null +++ b/framework/src/modules/nft/stores/user.ts @@ -0,0 +1,42 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from '../../base_store'; +import { MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from '../constants'; + +export interface UserStoreData { + lockingModule: string; +} + +export const userStoreSchema = { + $id: '/nft/store/user', + type: 'object', + required: ['lockingModule'], + properties: { + lockingModule: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + }, +}; + +export class UserStore extends BaseStore { + public schema = userStoreSchema; + + public getKey(address: Buffer, nftID: Buffer): Buffer { + return Buffer.concat([address, nftID]); + } +} diff --git a/framework/test/unit/modules/nft/stores/nft.spec.ts b/framework/test/unit/modules/nft/stores/nft.spec.ts new file mode 100644 index 00000000000..7142bd787e1 --- /dev/null +++ b/framework/test/unit/modules/nft/stores/nft.spec.ts @@ -0,0 +1,106 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { NFTStore } from '../../../../../src/modules/nft/stores/nft'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import { createStoreGetter } from '../../../../../src/testing/utils'; +import { LENGTH_NFT_ID } from '../../../../../src/modules/nft/constants'; +import { StoreGetter } from '../../../../../src'; + +describe('NFTStore', () => { + let store: NFTStore; + let context: StoreGetter; + + beforeEach(() => { + store = new NFTStore('NFT', 5); + + const db = new InMemoryPrefixedStateDB(); + const stateStore = new PrefixedStateReadWriter(db); + + context = createStoreGetter(stateStore); + }); + + describe('save', () => { + it('should order NFTs of an owner by module', async () => { + const nftID = Buffer.alloc(LENGTH_NFT_ID, 0); + const owner = Buffer.alloc(8, 1); + + const unsortedAttributesArray = [ + { + module: 'token', + attributes: Buffer.alloc(8, 1), + }, + { + module: 'pos', + attributes: Buffer.alloc(8, 1), + }, + ]; + + const sortedAttributesArray = [ + { + module: 'pos', + attributes: Buffer.alloc(8, 1), + }, + { + module: 'token', + attributes: Buffer.alloc(8, 1), + }, + ]; + + await store.save(context, nftID, { + owner, + attributesArray: unsortedAttributesArray, + }); + + await expect(store.get(context, nftID)).resolves.toEqual({ + owner, + attributesArray: sortedAttributesArray, + }); + }); + + it('should remove modules with no attributes array', async () => { + const nftID = Buffer.alloc(LENGTH_NFT_ID, 0); + const owner = Buffer.alloc(8, 1); + + const attributesArray = [ + { + module: 'nft', + attributes: Buffer.alloc(0), + }, + { + module: 'pos', + attributes: Buffer.alloc(8, 1), + }, + ]; + + const filteredAttributesArray = [ + { + module: 'pos', + attributes: Buffer.alloc(8, 1), + }, + ]; + + await store.save(context, nftID, { + owner, + attributesArray, + }); + + await expect(store.get(context, nftID)).resolves.toEqual({ + owner, + attributesArray: filteredAttributesArray, + }); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts b/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts new file mode 100644 index 00000000000..968cfa3bb42 --- /dev/null +++ b/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts @@ -0,0 +1,65 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { SupportedNFTsStore } from '../../../../../src/modules/nft/stores/supported_nfts'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import { createStoreGetter } from '../../../../../src/testing/utils'; +import { LENGTH_COLLECTION_ID } from '../../../../../src/modules/nft/constants'; +import { CHAIN_ID_LENGTH, StoreGetter } from '../../../../../src'; + +describe('NFTStore', () => { + let store: SupportedNFTsStore; + let context: StoreGetter; + + beforeEach(() => { + store = new SupportedNFTsStore('NFT', 5); + + const db = new InMemoryPrefixedStateDB(); + const stateStore = new PrefixedStateReadWriter(db); + + context = createStoreGetter(stateStore); + }); + + describe('save', () => { + it('should order supported NFT collection of a chain', async () => { + const chainID = Buffer.alloc(CHAIN_ID_LENGTH, 0); + + const unsortedSupportedCollections = [ + { + collectionID: Buffer.alloc(LENGTH_COLLECTION_ID, 1), + }, + { + collectionID: Buffer.alloc(LENGTH_COLLECTION_ID, 0), + }, + { + collectionID: Buffer.from([0, 1, 1, 0]), + }, + ]; + + const sortedSupportedCollections = unsortedSupportedCollections.sort((a, b) => + a.collectionID.compare(b.collectionID), + ); + + const data = { + supportedCollectionIDArray: unsortedSupportedCollections, + }; + await store.save(context, chainID, data); + + await expect(store.get(context, chainID)).resolves.toEqual({ + supportedCollectionIDArray: sortedSupportedCollections, + }); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/stores/user.spec.ts b/framework/test/unit/modules/nft/stores/user.spec.ts new file mode 100644 index 00000000000..e16fcc3f52a --- /dev/null +++ b/framework/test/unit/modules/nft/stores/user.spec.ts @@ -0,0 +1,34 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { UserStore } from '../../../../../src/modules/nft/stores/user'; +import { LENGTH_ADDRESS, LENGTH_NFT_ID } from '../../../../../src/modules/nft/constants'; + +describe('UserStore', () => { + let store: UserStore; + + beforeEach(() => { + store = new UserStore('NFT', 5); + }); + + describe('getKey', () => { + it('should concatenate the provided address and nftID', () => { + const address = utils.getRandomBytes(LENGTH_ADDRESS); + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + expect(store.getKey(address, nftID)).toEqual(Buffer.concat([address, nftID])); + }); + }); +}); From a4ca7b88ff31933a6a2441b78558b81177c32648 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 26 May 2023 14:21:58 +0200 Subject: [PATCH 036/170] Adds events for NFT module (#8485) * :seedling: Adds events for NFT module * :recycle: Updates event schema names and event type * :label: Removes duplicate types * :label: Updates TransferCrossChainEventData * :label: Updates NftErrorEventResult * :bug: Updates references to NFTErrorEventResult to NftErrorEventResult * :memo: Adds event data type for AllNFTsFromChainSupportedEvent * :recycle: Updates log interface * :memo: Updates NftEventResult * :memo: Updates NftEventResult * :bug: Fixes references to NftEventResult --- framework/src/modules/nft/constants.ts | 33 ++++---- framework/src/modules/nft/events/.gitkeep | 0 .../events/all_nfts_from_chain_suported.ts | 42 ++++++++++ .../all_nfts_from_chain_support_removed.ts | 42 ++++++++++ ...ll_nfts_from_collection_support_removed.ts | 49 ++++++++++++ .../all_nfts_from_collection_suppported.ts | 49 ++++++++++++ .../nft/events/all_nfts_support_removed.ts | 21 +++++ .../modules/nft/events/all_nfts_supported.ts | 21 +++++ .../src/modules/nft/events/ccm_transfer.ts | 61 +++++++++++++++ framework/src/modules/nft/events/create.ts | 62 +++++++++++++++ framework/src/modules/nft/events/destroy.ts | 55 ++++++++++++++ framework/src/modules/nft/events/lock.ts | 61 +++++++++++++++ framework/src/modules/nft/events/recover.ts | 53 +++++++++++++ .../src/modules/nft/events/set_attributes.ts | 53 +++++++++++++ framework/src/modules/nft/events/transfer.ts | 65 ++++++++++++++++ .../nft/events/transfer_cross_chain.ts | 76 +++++++++++++++++++ framework/src/modules/nft/events/unlock.ts | 61 +++++++++++++++ framework/src/modules/nft/module.ts | 38 ++++++++++ 18 files changed, 827 insertions(+), 15 deletions(-) delete mode 100644 framework/src/modules/nft/events/.gitkeep create mode 100644 framework/src/modules/nft/events/all_nfts_from_chain_suported.ts create mode 100644 framework/src/modules/nft/events/all_nfts_from_chain_support_removed.ts create mode 100644 framework/src/modules/nft/events/all_nfts_from_collection_support_removed.ts create mode 100644 framework/src/modules/nft/events/all_nfts_from_collection_suppported.ts create mode 100644 framework/src/modules/nft/events/all_nfts_support_removed.ts create mode 100644 framework/src/modules/nft/events/all_nfts_supported.ts create mode 100644 framework/src/modules/nft/events/ccm_transfer.ts create mode 100644 framework/src/modules/nft/events/create.ts create mode 100644 framework/src/modules/nft/events/destroy.ts create mode 100644 framework/src/modules/nft/events/lock.ts create mode 100644 framework/src/modules/nft/events/recover.ts create mode 100644 framework/src/modules/nft/events/set_attributes.ts create mode 100644 framework/src/modules/nft/events/transfer.ts create mode 100644 framework/src/modules/nft/events/transfer_cross_chain.ts create mode 100644 framework/src/modules/nft/events/unlock.ts diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index 37f82833ed3..c0cbc168a98 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -20,20 +20,23 @@ export const MAX_LENGTH_MODULE_NAME = 32; export const LENGTH_ADDRESS = 20; export const enum NftEventResult { - SUCCESSFUL = 0, - NFT_DOES_NOT_EXIST = 1, - NFT_NOT_NATIVE = 2, - NFT_NOT_SUPPORTED = 3, - NFT_LOCKED = 4, - NFT_NOT_LOCKED = 5, - UNAUTHORIZED_UNLOCK = 6, - NFT_ESCROWED = 7, - NFT_NOT_ESCROWED = 8, - INITIATED_BY_NONNATIVE_CHAIN = 9, - INITIATED_BY_NONOWNER = 10, - RECOVER_FAIL_INVALID_INPUTS = 11, - INSUFFICIENT_BALANCE = 12, - DATA_TOO_LONG = 13, + RESULT_SUCCESSFUL = 0, + RESULT_NFT_DOES_NOT_EXIST = 1, + RESULT_NFT_NOT_NATIVE = 2, + RESULT_NFT_NOT_SUPPORTED = 3, + RESULT_NFT_LOCKED = 4, + RESULT_NFT_NOT_LOCKED = 5, + RESULT_UNAUTHORIZED_UNLOCK = 6, + RESULT_NFT_ESCROWED = 7, + RESULT_NFT_NOT_ESCROWED = 8, + RESULT_INITIATED_BY_NONNATIVE_CHAIN = 9, + RESULT_INITIATED_BY_NONOWNER = 10, + RESULT_RECOVER_FAIL_INVALID_INPUTS = 11, + RESULT_INSUFFICIENT_BALANCE = 12, + RESULT_DATA_TOO_LONG = 13, } -export type NFTErrorEventResult = Exclude; +export type NftErrorEventResult = Exclude< + NftEventResult, + NftEventResult.RESULT_NFT_ESCROWED | NftEventResult.RESULT_SUCCESSFUL +>; diff --git a/framework/src/modules/nft/events/.gitkeep b/framework/src/modules/nft/events/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/framework/src/modules/nft/events/all_nfts_from_chain_suported.ts b/framework/src/modules/nft/events/all_nfts_from_chain_suported.ts new file mode 100644 index 00000000000..9798ac42b9c --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_from_chain_suported.ts @@ -0,0 +1,42 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_CHAIN_ID } from '../constants'; + +export interface AllNFTsFromChainSupportedEventData { + chainID: Buffer; +} + +export const allNFTsFromChainSupportedEventSchema = { + $id: '/nft/events/allNFTsFromChainSupported', + type: 'object', + required: ['chainID'], + properties: { + chainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + }, +}; + +export class AllNFTsFromChainSupportedEvent extends BaseEvent { + public schema = allNFTsFromChainSupportedEventSchema; + + public log(ctx: EventQueuer, chainID: Buffer): void { + this.add(ctx, { chainID }, [chainID]); + } +} diff --git a/framework/src/modules/nft/events/all_nfts_from_chain_support_removed.ts b/framework/src/modules/nft/events/all_nfts_from_chain_support_removed.ts new file mode 100644 index 00000000000..0fe1603823a --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_from_chain_support_removed.ts @@ -0,0 +1,42 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_CHAIN_ID } from '../constants'; + +export interface AllNFTsFromChainSupportRemovedEventData { + chainID: Buffer; +} + +export const allNFTsFromChainSupportRemovedEventSchema = { + $id: '/nft/events/allNFTsFromChainSupportRemoved', + type: 'object', + required: ['chainID'], + properties: { + chainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + }, +}; + +export class AllNFTsFromChainSupportRemovedEvent extends BaseEvent { + public schema = allNFTsFromChainSupportRemovedEventSchema; + + public log(ctx: EventQueuer, chainID: Buffer): void { + this.add(ctx, { chainID }, [chainID]); + } +} diff --git a/framework/src/modules/nft/events/all_nfts_from_collection_support_removed.ts b/framework/src/modules/nft/events/all_nfts_from_collection_support_removed.ts new file mode 100644 index 00000000000..388ae16d9aa --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_from_collection_support_removed.ts @@ -0,0 +1,49 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID } from '../constants'; + +export interface AllNFTsFromCollectionSupportRemovedEventData { + chainID: Buffer; + collectionID: Buffer; +} + +export const allNFTsFromCollectionSupportRemovedEventSchema = { + $id: '/nft/events/allNFTsFromCollectionSupportRemoved', + type: 'object', + required: ['chainID', 'collectionID'], + properties: { + chainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + }, +}; + +export class AllNFTsFromCollectionSupportRemovedEvent extends BaseEvent { + public schema = allNFTsFromCollectionSupportRemovedEventSchema; + + public log(ctx: EventQueuer, data: AllNFTsFromCollectionSupportRemovedEventData): void { + this.add(ctx, data, [data.chainID, data.collectionID]); + } +} diff --git a/framework/src/modules/nft/events/all_nfts_from_collection_suppported.ts b/framework/src/modules/nft/events/all_nfts_from_collection_suppported.ts new file mode 100644 index 00000000000..9b82b4f1539 --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_from_collection_suppported.ts @@ -0,0 +1,49 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID } from '../constants'; + +export interface AllNFTsFromCollectionSupportedEventData { + chainID: Buffer; + collectionID: Buffer; +} + +export const allNFTsFromCollectionSupportedEventSchema = { + $id: '/nft/events/allNFTsFromCollectionSupported', + type: 'object', + required: ['chainID', 'collectionID'], + properties: { + chainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + }, +}; + +export class AllNFTsFromCollectionSupportedEvent extends BaseEvent { + public schema = allNFTsFromCollectionSupportedEventSchema; + + public log(ctx: EventQueuer, data: AllNFTsFromCollectionSupportedEventData): void { + this.add(ctx, data, [data.chainID, data.collectionID]); + } +} diff --git a/framework/src/modules/nft/events/all_nfts_support_removed.ts b/framework/src/modules/nft/events/all_nfts_support_removed.ts new file mode 100644 index 00000000000..a15f6fbe6bf --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_support_removed.ts @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; + +export class AllNFTsSupportRemovedEvent extends BaseEvent { + public log(ctx: EventQueuer): void { + this.add(ctx, undefined); + } +} diff --git a/framework/src/modules/nft/events/all_nfts_supported.ts b/framework/src/modules/nft/events/all_nfts_supported.ts new file mode 100644 index 00000000000..80f1da06a20 --- /dev/null +++ b/framework/src/modules/nft/events/all_nfts_supported.ts @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; + +export class AllNFTsSupportedEvent extends BaseEvent { + public log(ctx: EventQueuer): void { + this.add(ctx, undefined); + } +} diff --git a/framework/src/modules/nft/events/ccm_transfer.ts b/framework/src/modules/nft/events/ccm_transfer.ts new file mode 100644 index 00000000000..1e72b946398 --- /dev/null +++ b/framework/src/modules/nft/events/ccm_transfer.ts @@ -0,0 +1,61 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, NftEventResult } from '../constants'; + +export interface CCMTransferEventData { + senderAddress: Buffer; + recipientAddress: Buffer; + nftID: Buffer; +} + +export const ccmTransferEventSchema = { + $id: '/nft/events/ccmTransfer', + type: 'object', + required: ['senderAddress', 'recipientAddress', 'nftID', 'result'], + properties: { + senderAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 3, + }, + result: { + dataType: 'uint32', + fieldNumber: 4, + }, + }, +}; + +export class CcmTransferEvent extends BaseEvent { + public schema = ccmTransferEventSchema; + + public log(ctx: EventQueuer, data: CCMTransferEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.senderAddress, + data.recipientAddress, + ]); + } +} diff --git a/framework/src/modules/nft/events/create.ts b/framework/src/modules/nft/events/create.ts new file mode 100644 index 00000000000..c14b93d1a88 --- /dev/null +++ b/framework/src/modules/nft/events/create.ts @@ -0,0 +1,62 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_COLLECTION_ID, LENGTH_NFT_ID, NftEventResult } from '../constants'; + +export interface CreateEventData { + address: Buffer; + nftID: Buffer; + collectionID: Buffer; +} + +export const createEventSchema = { + $id: '/nft/events/create', + type: 'object', + required: ['address', 'nftID', 'collectionID', 'result'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLenght: LENGTH_NFT_ID, + fieldNumber: 2, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLenght: LENGTH_COLLECTION_ID, + fieldNumber: 3, + }, + result: { + dataType: 'uint32', + fieldNumber: 4, + }, + }, +}; + +export class CreateEvent extends BaseEvent { + public schema = createEventSchema; + + public log(ctx: EventQueuer, data: CreateEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.address, + data.nftID, + ]); + } +} diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts new file mode 100644 index 00000000000..1294f466ba9 --- /dev/null +++ b/framework/src/modules/nft/events/destroy.ts @@ -0,0 +1,55 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, NftEventResult } from '../constants'; + +export interface DestroyEventData { + address: Buffer; + nftID: Buffer; +} + +export const createEventSchema = { + $id: '/nft/events/destroy', + type: 'object', + required: ['address', 'nftID', 'result'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLenght: LENGTH_NFT_ID, + fieldNumber: 2, + }, + result: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +export class DestroyEvent extends BaseEvent { + public schema = createEventSchema; + + public log(ctx: EventQueuer, data: DestroyEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.address, + data.nftID, + ]); + } +} diff --git a/framework/src/modules/nft/events/lock.ts b/framework/src/modules/nft/events/lock.ts new file mode 100644 index 00000000000..9820836158f --- /dev/null +++ b/framework/src/modules/nft/events/lock.ts @@ -0,0 +1,61 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, + NftEventResult, +} from '../constants'; + +export interface LockEventData { + module: string; + nftID: Buffer; +} + +export const lockEventSchema = { + $id: '/nft/events/lock', + type: 'object', + required: ['module', 'nftID', 'result'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + result: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +export class LockEvent extends BaseEvent { + public schema = lockEventSchema; + + public log(ctx: EventQueuer, data: LockEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + Buffer.from(data.module), + data.nftID, + ]); + } +} diff --git a/framework/src/modules/nft/events/recover.ts b/framework/src/modules/nft/events/recover.ts new file mode 100644 index 00000000000..589e0585f12 --- /dev/null +++ b/framework/src/modules/nft/events/recover.ts @@ -0,0 +1,53 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, LENGTH_CHAIN_ID, NftEventResult } from '../constants'; + +export interface RecoverEventData { + terminatedChainID: Buffer; + nftID: Buffer; +} + +export const recoverEventSchema = { + $id: '/nft/events/recover', + type: 'object', + required: ['terminatedChainID', 'nftID', 'result'], + properties: { + terminatedChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + result: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +export class RecoverEvent extends BaseEvent { + public schema = recoverEventSchema; + + public log(ctx: EventQueuer, data: RecoverEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [data.nftID]); + } +} diff --git a/framework/src/modules/nft/events/set_attributes.ts b/framework/src/modules/nft/events/set_attributes.ts new file mode 100644 index 00000000000..7d6ef954240 --- /dev/null +++ b/framework/src/modules/nft/events/set_attributes.ts @@ -0,0 +1,53 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, NftEventResult } from '../constants'; + +export interface SetAttributesEventData { + nftID: Buffer; + attributes: Buffer; +} + +export const setAttributesEventSchema = { + $id: '/nft/events/setAttributes', + type: 'object', + required: ['nftID', 'attributes', 'result'], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + result: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +export class SetAttributesEvent extends BaseEvent< + SetAttributesEventData & { result: NftEventResult } +> { + public schema = setAttributesEventSchema; + + public log(ctx: EventQueuer, data: SetAttributesEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [data.nftID]); + } +} diff --git a/framework/src/modules/nft/events/transfer.ts b/framework/src/modules/nft/events/transfer.ts new file mode 100644 index 00000000000..591abc8c306 --- /dev/null +++ b/framework/src/modules/nft/events/transfer.ts @@ -0,0 +1,65 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, NftErrorEventResult, NftEventResult } from '../constants'; + +export interface TransferEventData { + senderAddress: Buffer; + recipientAddress: Buffer; + nftID: Buffer; +} + +export const transferEventSchema = { + $id: '/nft/events/transfer', + type: 'object', + required: ['senderAddress', 'recipientAddress', 'nftID', 'result'], + properties: { + senderAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 3, + }, + result: { + dataType: 'uint32', + fieldNumber: 4, + }, + }, +}; + +export class TransferEvent extends BaseEvent { + public schema = transferEventSchema; + + public log(ctx: EventQueuer, data: TransferEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.senderAddress, + data.recipientAddress, + ]); + } + + public error(ctx: EventQueuer, data: TransferEventData, result: NftErrorEventResult): void { + this.add(ctx, { ...data, result }, [data.senderAddress, data.recipientAddress], true); + } +} diff --git a/framework/src/modules/nft/events/transfer_cross_chain.ts b/framework/src/modules/nft/events/transfer_cross_chain.ts new file mode 100644 index 00000000000..bc864793143 --- /dev/null +++ b/framework/src/modules/nft/events/transfer_cross_chain.ts @@ -0,0 +1,76 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { LENGTH_NFT_ID, LENGTH_CHAIN_ID, NftEventResult } from '../constants'; + +export interface TransferCrossChainEventData { + senderAddress: Buffer; + recipientAddress: Buffer; + receivingChainID: Buffer; + nftID: Buffer; + includeAttributes: boolean; +} + +export const transferCrossChainEventSchema = { + $id: '/nft/events/transferCrossChain', + type: 'object', + required: ['senderAddress', 'recipientAddress', 'nftID', 'receivingChainID', 'result'], + properties: { + senderAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 3, + }, + receivingChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 4, + }, + includeAttributes: { + dataType: 'boolean', + fieldNumber: 5, + }, + result: { + dataType: 'uint32', + fieldNumber: 6, + }, + }, +}; + +export class TransferCrossChainEvent extends BaseEvent< + TransferCrossChainEventData & { result: NftEventResult } +> { + public schema = transferCrossChainEventSchema; + + public log(ctx: EventQueuer, data: TransferCrossChainEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.senderAddress, + data.recipientAddress, + data.receivingChainID, + ]); + } +} diff --git a/framework/src/modules/nft/events/unlock.ts b/framework/src/modules/nft/events/unlock.ts new file mode 100644 index 00000000000..63629e7b5e9 --- /dev/null +++ b/framework/src/modules/nft/events/unlock.ts @@ -0,0 +1,61 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, + NftEventResult, +} from '../constants'; + +export interface UnlockEventData { + module: string; + nftID: Buffer; +} + +export const unlockEventSchema = { + $id: '/nft/events/unlock', + type: 'object', + required: ['module', 'nftID', 'result'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + result: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +export class UnlockEvent extends BaseEvent { + public schema = unlockEventSchema; + + public log(ctx: EventQueuer, data: UnlockEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + Buffer.from(data.module), + data.nftID, + ]); + } +} diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 55e24c14fc8..24fc41e4073 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -18,6 +18,20 @@ import { BaseInteroperableModule } from '../interoperability'; import { InteroperabilityMethod } from '../token/types'; import { NFTInteroperableMethod } from './cc_method'; import { NFTEndpoint } from './endpoint'; +import { AllNFTsFromChainSupportedEvent } from './events/all_nfts_from_chain_suported'; +import { AllNFTsFromCollectionSupportRemovedEvent } from './events/all_nfts_from_collection_support_removed'; +import { AllNFTsFromCollectionSupportedEvent } from './events/all_nfts_from_collection_suppported'; +import { AllNFTsSupportRemovedEvent } from './events/all_nfts_support_removed'; +import { AllNFTsSupportedEvent } from './events/all_nfts_supported'; +import { CcmTransferEvent } from './events/ccm_transfer'; +import { CreateEvent } from './events/create'; +import { DestroyEvent } from './events/destroy'; +import { LockEvent } from './events/lock'; +import { RecoverEvent } from './events/recover'; +import { SetAttributesEvent } from './events/set_attributes'; +import { TransferEvent } from './events/transfer'; +import { TransferCrossChainEvent } from './events/transfer_cross_chain'; +import { UnlockEvent } from './events/unlock'; import { InternalMethod } from './internal_method'; import { NFTMethod } from './method'; import { FeeMethod } from './types'; @@ -36,6 +50,30 @@ export class NFTModule extends BaseInteroperableModule { // eslint-disable-next-line no-useless-constructor public constructor() { super(); + this.events.register(TransferEvent, new TransferEvent(this.name)); + this.events.register(TransferCrossChainEvent, new TransferCrossChainEvent(this.name)); + this.events.register(CcmTransferEvent, new CcmTransferEvent(this.name)); + this.events.register(CreateEvent, new CreateEvent(this.name)); + this.events.register(DestroyEvent, new DestroyEvent(this.name)); + this.events.register(DestroyEvent, new DestroyEvent(this.name)); + this.events.register(LockEvent, new LockEvent(this.name)); + this.events.register(UnlockEvent, new UnlockEvent(this.name)); + this.events.register(SetAttributesEvent, new SetAttributesEvent(this.name)); + this.events.register(RecoverEvent, new RecoverEvent(this.name)); + this.events.register(AllNFTsSupportedEvent, new AllNFTsSupportedEvent(this.name)); + this.events.register(AllNFTsSupportRemovedEvent, new AllNFTsSupportRemovedEvent(this.name)); + this.events.register( + AllNFTsFromChainSupportedEvent, + new AllNFTsFromChainSupportedEvent(this.name), + ); + this.events.register( + AllNFTsFromCollectionSupportedEvent, + new AllNFTsFromCollectionSupportedEvent(this.name), + ); + this.events.register( + AllNFTsFromCollectionSupportRemovedEvent, + new AllNFTsFromCollectionSupportRemovedEvent(this.name), + ); } public addDependencies(interoperabilityMethod: InteroperabilityMethod, _feeMethod: FeeMethod) { From 09a0498929ddd9f9ef310ed183c74c73be9beeb0 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Tue, 30 May 2023 16:32:23 +0200 Subject: [PATCH 037/170] Add missing unit test for PoM coammand (#8463) :white_check_mark: Add missing unit test Refactor the unit test Add an expected error message Update the init test Set the generator addresses of both headers to be the same address. Add unit test Refactor unit test Co-authored-by: shuse2 --- .../pos/commands/report_misbehavior.spec.ts | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/framework/test/unit/modules/pos/commands/report_misbehavior.spec.ts b/framework/test/unit/modules/pos/commands/report_misbehavior.spec.ts index 9a69593cfb3..3c8524345be 100644 --- a/framework/test/unit/modules/pos/commands/report_misbehavior.spec.ts +++ b/framework/test/unit/modules/pos/commands/report_misbehavior.spec.ts @@ -528,6 +528,33 @@ describe('ReportMisbehaviorCommand', () => { await expect(pomCommand.verify(context)).not.toReject(); }); + + it('should return error when generator of header1 is not a validator', async () => { + const randomAddress = utils.getRandomBytes(20); + + transactionParamsDecoded = { + header1: codec.encode(blockHeaderSchema, { + ...header1, + generatorAddress: randomAddress, + }), + header2: codec.encode(blockHeaderSchema, { + ...header2, + generatorAddress: randomAddress, + }), + }; + transactionParams = codec.encode(pomCommand.schema, transactionParamsDecoded); + transaction.params = transactionParams; + context = testing + .createTransactionContext({ + stateStore, + transaction, + }) + .createCommandExecuteContext(pomCommand.schema); + + await expect(pomCommand.verify(context)).rejects.toThrow( + `Specified key 7160f8688000${randomAddress.toString('hex')} does not exist`, + ); + }); }); describe('execute', () => { From 1acd131823589a3034570e5fb0203050b15c4b21 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 1 Jun 2023 12:55:37 +0200 Subject: [PATCH 038/170] Missing unit tests Token module (#8435) * :recycle: Removes duplicate unit test for TransferCommand#verify * :white_check_mark: Adds unit test for TransferCrossChainCommand#verify method for invalid messageFeeTokenID length * :white_check_mark: :recycle: Removes redundant setup code in unit test for TransferCrossChainCommand#verify * :white_check_mark: Adds and modifies tests for CrosschainTransferCommand * :recycle: Removes parameters validation in CrossChainTransferCommand#execute * :white_check_mark: Adds test case when all tokens are supported for SupportedTokenStore#isSupported * :bug: :white_check_mark: Fixes test case when all tokens are supported for SupportedTokenStore#isSupported * :white_check_mark: Adds additional matcher to verify if chain is supported for SupportedTokensStore#supportChain * :bug: :white_check_mark: Modifies SupportedTokenStore#supportToken to not update state for already supported token * :white_check_mark: Adds tests for SupportedTokenStore#removeSupportForToken to throw error for LSK * :white_check_mark: Removes redundant test for SupportedTokensStore#removeSupportForToken if the only supported token is removed * :bug: :white_check_mark: Rephrases test for SupportedTokensStore#removeSupportForToken to remove the chain from store if only supported token is removed * :bug: :white_check_mark: Adds test for SupportedTokensStore#removeSupportForToken to not modify the store state of the chain if support for unsupported token is removed * :white_check_mark: Adds unit tests for TokenEndpoint#isSupported * :white_check_mark: Modifies unit test fixture for TokenModule#initGenesisState when tokens are not sorted for SupportedTokensStore * :white_check_mark: Adds fixtures to test invalid tokenID for supplySubstore for TokenModule#initGenesisState * :recycle: :white_check_mark: Updates error expectations to test invalid tokenID for supplySubstore for TokenModule#initGenesisState * :white_check_mark: Adds and updates fixtures to test invalid tokenID for userSubStore for TokenModule#initGenesisState * :white_check_mark: Adds fixtures to test invalid parameters for escrowSubstore for TokenModule#initGenesisState * :white_check_mark: Adds fixtures to test invalid parameters for supportedTokensSubstore for TokenModule#initGenesisState * :white_check_mark: Adds fixtures to test duplicate address and tokenID for userSubstore for TokenModule#initGenesisState * :recycle: Updates test description for SupportedTokensStore#removeSupportForToken Co-authored-by: shuse2 * :recycle: Removes redudant condition for unsupported token for SupportedTokensStore#supportToken * :recycle: Reverts test for SupportedTokensStore#supportToken when chainID is LSK * :white_check_mark: Adds & updates tests for CrossChainTransferCommand#execute method for non-existent escrow account and insufficient amount * :recycle: Rephrased test description for SupportedTokensStore#removeSupportForToken to test for execption if all tokens are supported for the chain. * :recycle: Rephrases test case for SupportedTokensStore#removeSupportForToken to not update the store if token is not already supported * :bug: :white_check_mark: Fixes total amount calculation and adds/updates related tests * :white_check_mark: Adds fixtures to test incorrect order of tokenID for an address in userSubstore for TokenModule#initGenesisState * :white_check_mark: Adds/Updates tests for SupportedTokensStore#supportChain to set supportedTokenIDs to empty list * :recycle: :white_check_mark: Updates tests to fail if sender balance is insufficient for the sum of amount and messageFee if tokenID and messageFeeTokenID are same --- .../modules/token/cc_commands/cc_transfer.ts | 1 - .../token/cc_commands/cc_transfer.spec.ts | 215 ++++++++++-------- .../modules/token/commands/transfer.spec.ts | 28 +-- .../commands/transfer_cross_chain.spec.ts | 40 +++- .../test/unit/modules/token/endpoint.spec.ts | 18 ++ .../token/init_genesis_state_fixture.ts | 197 +++++++++++++++- .../token/stores/supported_tokens.spec.ts | 76 +++++-- 7 files changed, 420 insertions(+), 155 deletions(-) diff --git a/framework/src/modules/token/cc_commands/cc_transfer.ts b/framework/src/modules/token/cc_commands/cc_transfer.ts index d07cd5b4a58..48a8f39b221 100644 --- a/framework/src/modules/token/cc_commands/cc_transfer.ts +++ b/framework/src/modules/token/cc_commands/cc_transfer.ts @@ -85,7 +85,6 @@ export class CrossChainTransferCommand extends BaseCCCommand { crossChainTransferMessageParams, ccm.params, ); - validator.validate(crossChainTransferMessageParams, params); const { tokenID, amount, senderAddress } = params; recipientAddress = params.recipientAddress; const [tokenChainID] = splitTokenID(tokenID); diff --git a/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts b/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts index 4d62513607c..bbd7c78ff72 100644 --- a/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts @@ -38,6 +38,7 @@ import { ccmTransferEventSchema } from '../../../../../src/modules/token/events/ import { SupplyStore } from '../../../../../src/modules/token/stores/supply'; import { InternalMethod } from '../../../../../src/modules/token/internal_method'; import { InteroperabilityMethod } from '../../../../../src/modules/token/types'; +import { MAX_RESERVED_ERROR_STATUS } from '../../../../../src/modules/interoperability/constants'; describe('CrossChain Transfer Command', () => { const tokenModule = new TokenModule(); @@ -87,6 +88,60 @@ describe('CrossChain Transfer Command', () => { let escrowStore: EscrowStore; let userStore: UserStore; + const createTransactionContextWithOverridingCCMAndParams = ( + { params, ccm }: { params?: Record; ccm?: Record } = { + params: {}, + ccm: {}, + }, + ) => { + const validParams = { + tokenID: defaultTokenID, + amount: defaultAmount, + senderAddress: defaultAddress, + recipientAddress: defaultAddress, + data: 'ddd', + }; + + const finalCCM = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: tokenModule.name, + nonce: BigInt(1), + sendingChainID: Buffer.from([3, 0, 0, 0]), + receivingChainID: Buffer.from([0, 0, 0, 1]), + fee: BigInt(3000), + status: CCM_STATUS_OK, + params: codec.encode(crossChainTransferMessageParams, { + ...validParams, + ...params, + }), + ...ccm, + }; + + const context = { + ccm: finalCCM, + feeAddress: defaultAddress, + transaction: { + senderAddress: defaultAddress, + fee: BigInt(0), + params: defaultEncodedCCUParams, + }, + header: { + height: 0, + timestamp: 0, + }, + stateStore, + contextStore: new Map(), + getMethodContext: () => methodContext, + eventQueue: new EventQueue(0), + ccmSize: BigInt(30), + getStore: (moduleID: Buffer, prefix: Buffer) => stateStore.getStore(moduleID, prefix), + logger: fakeLogger, + chainID: ownChainID, + }; + + return context; + }; + beforeEach(async () => { method = new TokenMethod(tokenModule.stores, tokenModule.events, tokenModule.name); command = new CrossChainTransferCommand(tokenModule.stores, tokenModule.events); @@ -142,52 +197,61 @@ describe('CrossChain Transfer Command', () => { }); describe('verify', () => { - it('should throw if validation fails', async () => { - // Arrange - const params = codec.encode(crossChainTransferMessageParams, { - tokenID: Buffer.from([0, 0, 0, 1]), - amount: defaultAmount, - senderAddress: defaultAddress, - recipientAddress: defaultAddress, - data: 'ddd', + it('should throw if tokenID does not have valid length', async () => { + const tokenIDMinLengthContext = createTransactionContextWithOverridingCCMAndParams({ + params: { tokenID: Buffer.alloc(4, 1) }, }); - const ccm = { - crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, - module: tokenModule.name, - nonce: BigInt(1), - sendingChainID: Buffer.from([3, 0, 0, 0]), - receivingChainID: Buffer.from([0, 0, 0, 1]), - fee: BigInt(30000), - status: CCM_STATUS_OK, - params, - }; + await expect(command.verify(tokenIDMinLengthContext)).rejects.toThrow( + `Property '.tokenID' minLength not satisfied`, + ); - const ctx = { - ccm, - feeAddress: defaultAddress, - transaction: { - senderAddress: defaultAddress, - fee: BigInt(0), - params: defaultEncodedCCUParams, - }, - header: { - height: 0, - timestamp: 0, - }, - stateStore, - contextStore: new Map(), - getMethodContext: () => methodContext, - eventQueue: new EventQueue(0), - ccmSize: BigInt(30), - getStore: (moduleID: Buffer, prefix: Buffer) => stateStore.getStore(moduleID, prefix), - logger: fakeLogger, - chainID: utils.getRandomBytes(32), - }; + const tokenIDMaxLengthContext = createTransactionContextWithOverridingCCMAndParams({ + params: { tokenID: Buffer.alloc(10, 1) }, + }); - // Act & Assert - await expect(command.verify(ctx)).rejects.toThrow( - `Property '.tokenID' minLength not satisfied`, + await expect(command.verify(tokenIDMaxLengthContext)).rejects.toThrow( + `Property '.tokenID' maxLength exceeded`, + ); + }); + + it('should throw if senderAddress does not have valid length', async () => { + const invalidSenderAddressContext = createTransactionContextWithOverridingCCMAndParams({ + params: { senderAddress: Buffer.alloc(23, 1) }, + }); + + await expect(command.verify(invalidSenderAddressContext)).rejects.toThrow( + `Property '.senderAddress' address length invalid`, + ); + }); + + it('should throw if recipientAddress does not have valid length', async () => { + const invalidRecipientAddressContext = createTransactionContextWithOverridingCCMAndParams({ + params: { recipientAddress: Buffer.alloc(23, 1) }, + }); + + await expect(command.verify(invalidRecipientAddressContext)).rejects.toThrow( + `Property '.recipientAddress' address length invalid`, + ); + }); + + it('should throw if data exceeds exceeds 64 characters', async () => { + const invalidDataContext = createTransactionContextWithOverridingCCMAndParams({ + params: { data: '1'.repeat(65) }, + }); + + await expect(command.verify(invalidDataContext)).rejects.toThrow( + `Property '.data' must NOT have more than 64 characters`, + ); + }); + + it('should throw if CCM.status > MAX_RESERVED_ERROR_STATUS', async () => { + const invalidCMMStatusContext = createTransactionContextWithOverridingCCMAndParams({ + ccm: { status: MAX_RESERVED_ERROR_STATUS + 1 }, + }); + + await expect(command.verify(invalidCMMStatusContext)).rejects.toThrow( + `Invalid CCM status code.`, ); }); @@ -337,55 +401,6 @@ describe('CrossChain Transfer Command', () => { }); describe('execute', () => { - it('should throw if validation fails', async () => { - // Arrange - const params = codec.encode(crossChainTransferMessageParams, { - tokenID: Buffer.from([0, 0, 0, 1]), - amount: defaultAmount, - senderAddress: defaultAddress, - recipientAddress: defaultAddress, - data: 'ddd', - }); - - const ccm = { - crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, - module: tokenModule.name, - nonce: BigInt(1), - sendingChainID: Buffer.from([3, 0, 0, 0]), - receivingChainID: Buffer.from([0, 0, 0, 1]), - fee: BigInt(30000), - status: CCM_STATUS_OK, - params, - }; - - const ctx = { - ccm, - feeAddress: defaultAddress, - transaction: { - senderAddress: defaultAddress, - fee: BigInt(0), - params: defaultEncodedCCUParams, - }, - header: { - height: 0, - timestamp: 0, - }, - stateStore, - contextStore: new Map(), - getMethodContext: () => methodContext, - eventQueue: new EventQueue(0), - ccmSize: BigInt(30), - getStore: (moduleID: Buffer, prefix: Buffer) => stateStore.getStore(moduleID, prefix), - logger: fakeLogger, - chainID: ownChainID, - }; - - // Act & Assert - await expect(command.execute(ctx)).rejects.toThrow( - `Property '.tokenID' minLength not satisfied`, - ); - }); - it('should throw if fail to decode the CCM', async () => { // Arrange const ccm = { @@ -533,9 +548,9 @@ describe('CrossChain Transfer Command', () => { // Act & Assert await expect(command.execute(ctx)).resolves.toBeUndefined(); - await expect( - method.userAccountExists(methodContext, defaultAddress, defaultTokenID), - ).resolves.toBe(true); + const key = userStore.getKey(defaultAddress, defaultTokenID); + const userData = await userStore.get(methodContext, key); + expect(userData.availableBalance).toEqual(defaultAccount.availableBalance + defaultAmount); }); it("should initialize account when recipient user store doesn't exist", async () => { @@ -723,7 +738,7 @@ describe('CrossChain Transfer Command', () => { }); }); - it('should throw when the fee to initialize an account is insufficient', async () => { + it('should throw when escrow account has insufficient balance', async () => { // Arrange const params = codec.encode(crossChainTransferMessageParams, { tokenID: defaultTokenID, @@ -769,5 +784,15 @@ describe('CrossChain Transfer Command', () => { // Act && Assert await expect(command.execute(ctx)).rejects.toThrow('Insufficient balance in escrow account.'); }); + + it('should throw if escrow account does not exist', async () => { + const escrowAccountNotExistingContext = createTransactionContextWithOverridingCCMAndParams({ + params: { tokenID: Buffer.from([0, 0, 0, 1, 0, 2, 3, 8]) }, + }); + + await expect(command.execute(escrowAccountNotExistingContext)).rejects.toThrow( + 'does not exist', + ); + }); }); }); diff --git a/framework/test/unit/modules/token/commands/transfer.spec.ts b/framework/test/unit/modules/token/commands/transfer.spec.ts index 8372238ad22..9ec7eb4fe43 100644 --- a/framework/test/unit/modules/token/commands/transfer.spec.ts +++ b/framework/test/unit/modules/token/commands/transfer.spec.ts @@ -161,32 +161,6 @@ describe('Transfer command', () => { ).rejects.toThrow(".data' must NOT have more than 64 characters"); }); - it('should success when all parameters are valid', async () => { - jest - .spyOn(command['_method'], 'getAvailableBalance') - .mockResolvedValue(BigInt(100000000 + 1)); - - const context = createTransactionContext({ - transaction: new Transaction({ - module: 'token', - command: 'transfer', - fee: BigInt(5000000), - nonce: BigInt(0), - senderPublicKey: utils.getRandomBytes(32), - params: codec.encode(transferParamsSchema, { - tokenID: Buffer.from('0000000100000000', 'hex'), - amount: BigInt(100000000), - recipientAddress: utils.getRandomBytes(20), - data: '1'.repeat(64), - }), - signatures: [utils.getRandomBytes(64)], - }), - }); - const result = await command.verify(context.createCommandVerifyContext(transferParamsSchema)); - - expect(result.status).toEqual(VerifyStatus.OK); - }); - it('should fail if balance for the provided tokenID is insufficient', async () => { const amount = BigInt(100000000); const availableBalance = amount - BigInt(1); @@ -214,7 +188,7 @@ describe('Transfer command', () => { ).rejects.toThrow(`balance ${availableBalance} is not sufficient for ${amount}`); }); - it('should pass if balance for the provided tokenID is sufficient', async () => { + it('should pass if parameters are valid and balance for the provided tokenID is sufficient', async () => { const amount = BigInt(100000000); jest.spyOn(command['_method'], 'getAvailableBalance').mockResolvedValue(amount); diff --git a/framework/test/unit/modules/token/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/token/commands/transfer_cross_chain.spec.ts index e45d78592f7..c768db25cf5 100644 --- a/framework/test/unit/modules/token/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/token/commands/transfer_cross_chain.spec.ts @@ -254,6 +254,34 @@ describe('CCTransfer command', () => { ); }); + it('should fail when messageFeeTokenID does not have valid length', async () => { + const messageFeeTokenIDMinLengthContext = createTransactionContextWithOverridingParams({ + messageFeeTokenID: Buffer.from('00', 'hex'), + }); + + const messageFeeTokenIDMaxLengthContext = createTransactionContextWithOverridingParams({ + messageFeeTokenID: Buffer.from('00000000000000000000000000', 'hex'), + }); + + expectSchemaValidationError( + await command.verify( + messageFeeTokenIDMinLengthContext.createCommandExecuteContext( + crossChainTransferParamsSchema, + ), + ), + "'.messageFeeTokenID' minLength not satisfied", + ); + + expectSchemaValidationError( + await command.verify( + messageFeeTokenIDMaxLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + "'.messageFeeTokenID' maxLength exceeded", + ); + }); + it('should fail when chainID of the tokenID is other than ownChainID or receivingChainID', async () => { const invalidtokenChainIDContext = createTransactionContextWithOverridingParams({ tokenID: Buffer.from([0, 0, 1, 1, 0, 0, 0, 0]), @@ -310,18 +338,6 @@ describe('CCTransfer command', () => { }, ); - await userStore.save( - insufficientBalanceContext.createCommandExecuteContext( - crossChainTransferParamsSchema, - ), - insufficientBalanceContext.transaction.senderAddress, - messageFeeTokenID, - { - availableBalance: amount, - lockedBalances: [], - }, - ); - expectSchemaValidationError( await command.verify( insufficientBalanceContext.createCommandVerifyContext(crossChainTransferParamsSchema), diff --git a/framework/test/unit/modules/token/endpoint.spec.ts b/framework/test/unit/modules/token/endpoint.spec.ts index bb95307a983..1d8e8b246e4 100644 --- a/framework/test/unit/modules/token/endpoint.spec.ts +++ b/framework/test/unit/modules/token/endpoint.spec.ts @@ -315,6 +315,24 @@ describe('token endpoint', () => { }); describe('isSupported', () => { + it('should return true for a native token', async () => { + const moduleEndpointContext = createTransientModuleEndpointContext({ + stateStore, + params: { tokenID: nativeTokenID.toString('hex') }, + }); + + expect(await endpoint.isSupported(moduleEndpointContext)).toEqual({ supported: true }); + }); + + it('should return true for LSK', async () => { + const moduleEndpointContext = createTransientModuleEndpointContext({ + stateStore, + params: { tokenID: mainChainTokenID.toString('hex') }, + }); + + expect(await endpoint.isSupported(moduleEndpointContext)).toEqual({ supported: true }); + }); + it('should return true for a supported token', async () => { await supportedTokensStore.set(methodContext, foreignChainID, { supportedTokenIDs: supportedForeignChainTokenIDs, diff --git a/framework/test/unit/modules/token/init_genesis_state_fixture.ts b/framework/test/unit/modules/token/init_genesis_state_fixture.ts index 575f7cbb6d5..3301a66b61d 100644 --- a/framework/test/unit/modules/token/init_genesis_state_fixture.ts +++ b/framework/test/unit/modules/token/init_genesis_state_fixture.ts @@ -63,6 +63,22 @@ const validData = { export const validGenesisAssets = [['Valid genesis asset', validData]]; export const invalidGenesisAssets = [ + [ + 'minimum token id length not satisfied', + { + ...validData, + userSubstore: [ + { + address: Buffer.alloc(20, 0), + tokenID: Buffer.from([9, 0, 0]), + availableBalance: oneUnit, + lockedBalances: [{ module: 'pos', amount: oneUnit }], + }, + ...validData.userSubstore.slice(1), + ], + }, + "tokenID' minLength not satisfied", + ], [ 'Invalid address length', { @@ -80,7 +96,7 @@ export const invalidGenesisAssets = [ ".address' address length invalid", ], [ - 'Invalid token id length', + 'maximum token id length', { ...validData, userSubstore: [ @@ -127,6 +143,22 @@ export const invalidGenesisAssets = [ }, 'UserSubstore must be sorted by address and tokenID', ], + [ + 'Unsorted tokens in userstore by address', + { + ...validData, + userSubstore: [ + { + address: Buffer.alloc(20, 1), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), + availableBalance: BigInt('1000'), + lockedBalances: [{ module: 'pos', amount: oneUnit }], + }, + ...validData.userSubstore.slice(1), + ], + }, + 'UserSubstore must be sorted by address and tokenID', + ], [ 'Locked balances is not sorted', { @@ -197,6 +229,55 @@ export const invalidGenesisAssets = [ }, 'has empty data', ], + [ + 'Duplicate address and tokenID for userSubstore', + { + ...validData, + userSubstore: [ + { + address: Buffer.alloc(20, 0), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), + availableBalance: oneUnit, + lockedBalances: [{ module: 'pos', amount: oneUnit }], + }, + { + address: Buffer.alloc(20, 0), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), + availableBalance: oneUnit, + lockedBalances: [{ module: 'pos', amount: oneUnit }], + }, + ], + }, + 'pair is duplicated', + ], + [ + 'minimum tokenID length not satisfied for supplyStore', + { + ...validData, + supplySubstore: [ + ...validData.supplySubstore, + { + tokenID: Buffer.alloc(1, 0), + totalSupply: oneUnit * BigInt(2), + }, + ], + }, + "tokenID' minLength not satisfied", + ], + [ + 'maximum tokenID length for supplyStore', + { + ...validData, + supplySubstore: [ + ...validData.supplySubstore, + { + tokenID: Buffer.alloc(10, 0), + totalSupply: oneUnit * BigInt(2), + }, + ], + }, + "tokenID' maxLength exceeded", + ], [ 'Duplicate supply store', { @@ -219,6 +300,66 @@ export const invalidGenesisAssets = [ }, 'SupplySubstore must be sorted by tokenID', ], + [ + 'escrowChainID minimum length not satisified for escrowSubstore', + { + ...validData, + escrowSubstore: [ + ...validData.escrowSubstore, + { + escrowChainID: Buffer.from([0, 0, 0]), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 1, 0]), + amount: oneUnit, + }, + ], + }, + ".escrowChainID' minLength not satisfied", + ], + [ + 'escrowChainID maximum length not exceeded for escrowSubstore', + { + ...validData, + escrowSubstore: [ + ...validData.escrowSubstore, + { + escrowChainID: Buffer.from([0, 0, 0, 0, 0]), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 1, 0]), + amount: oneUnit, + }, + ], + }, + ".escrowChainID' maxLength exceeded", + ], + [ + 'tokenID minimum length not satisfied for escrowSubstore', + { + ...validData, + escrowSubstore: [ + ...validData.escrowSubstore, + { + escrowChainID: Buffer.from([0, 0, 0, 0, 0]), + tokenID: Buffer.from([0, 0, 0, 0, 0]), + amount: oneUnit, + }, + ], + }, + ".tokenID' minLength not satisfied", + ], + [ + 'tokenID maximum length exceeded for escrowSubstore', + { + ...validData, + escrowSubstore: [ + ...validData.escrowSubstore, + { + escrowChainID: Buffer.from([0, 0, 0, 0, 0]), + tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + amount: oneUnit, + }, + ], + }, + ".tokenID' maxLength exceeded", + ], [ 'Duplicate escrow store', { @@ -316,6 +457,58 @@ export const invalidGenesisAssets = [ }, 'Stored total supply is non zero but cannot be computed', ], + [ + 'chainID minLength not satisfied for supportedTOkensSubstore', + { + ...validData, + supportedTokensSubstore: [ + { + chainID: Buffer.from([0, 0]), + supportedTokenIDs: [], + }, + ], + }, + "chainID' minLength not satisfied", + ], + [ + 'chainID maxLength exceeded for supportedTOkensSubstore', + { + ...validData, + supportedTokensSubstore: [ + { + chainID: Buffer.from([0, 0, 0, 0, 0]), + supportedTokenIDs: [], + }, + ], + }, + "chainID' maxLength exceeded", + ], + [ + 'tokenID minLength not satisfied for supportedTOkensSubstore', + { + ...validData, + supportedTokensSubstore: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + supportedTokenIDs: [Buffer.from([1, 0])], + }, + ], + }, + "supportedTokenIDs.0' minLength not satisfied", + ], + [ + 'tokenID maxLength exceeded for supportedTOkensSubstore', + { + ...validData, + supportedTokensSubstore: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + supportedTokenIDs: [Buffer.from([1, 0, 0, 0, 0, 0, 0, 0, 0])], + }, + ], + }, + "supportedTokenIDs.0' maxLength exceeded", + ], [ 'Supported tokens store has duplicate chainID on supported ID', { @@ -369,7 +562,7 @@ export const invalidGenesisAssets = [ { chainID: Buffer.from([0, 0, 0, 4]), supportedTokenIDs: [ - Buffer.from([0, 0, 0, 4, 0, 0, 0, 0]), + Buffer.from([0, 0, 0, 4, 0, 0, 0, 1]), Buffer.from([0, 0, 0, 4, 0, 0, 0, 0]), ], }, diff --git a/framework/test/unit/modules/token/stores/supported_tokens.spec.ts b/framework/test/unit/modules/token/stores/supported_tokens.spec.ts index bbeed6b6373..abcc09d8b3c 100644 --- a/framework/test/unit/modules/token/stores/supported_tokens.spec.ts +++ b/framework/test/unit/modules/token/stores/supported_tokens.spec.ts @@ -96,6 +96,12 @@ describe('SupportedTokensStore', () => { store.isSupported(context, Buffer.from([1, 1, 1, 1, 0, 0, 0, 0])), ).resolves.toBeTrue(); }); + + it('should return true if all tokens are supported', async () => { + await store.supportAll(context); + + await expect(store.isSupported(context, Buffer.alloc(8))).resolves.toBeTrue(); + }); }); describe('supportAll', () => { @@ -151,6 +157,21 @@ describe('SupportedTokensStore', () => { await expect( store.isSupported(context, Buffer.from([2, 0, 0, 0, 0, 0, 0, 0])), ).resolves.toBeTrue(); + + await expect(store.get(context, Buffer.from([2, 0, 0, 0]))).resolves.toEqual({ + supportedTokenIDs: [], + }); + }); + + it('should update data with empty list if tokens are supported', async () => { + const chainID = Buffer.from([2, 0, 0, 0]); + const tokenID = Buffer.concat([chainID, Buffer.from([1, 1, 1, 1])]); + + await store.set(context, chainID, { supportedTokenIDs: [tokenID] }); + + await store.supportChain(context, chainID); + + await expect(store.get(context, chainID)).resolves.toEqual({ supportedTokenIDs: [] }); }); }); @@ -252,16 +273,18 @@ describe('SupportedTokensStore', () => { describe('removeSupportForToken', () => { it('should reject if chain is native', async () => { - await store.set(context, Buffer.from([1, 1, 1, 1]), { - supportedTokenIDs: [Buffer.from([1, 1, 1, 1, 0, 0, 0, 0])], - }); - await expect( store.removeSupportForToken(context, Buffer.concat([ownChainID, Buffer.alloc(4)])), ).rejects.toThrow('Cannot remove support for LSK or native token.'); }); - it('should not do anything if all tokens are supported', async () => { + it('should reject if token is LSK', async () => { + await expect( + store.removeSupportForToken(context, Buffer.from([1, 0, 0, 0, 0, 0, 0, 0])), + ).rejects.toThrow('Cannot remove support for LSK or native token.'); + }); + + it('should reject if all tokens are supported', async () => { await store.set(context, ALL_SUPPORTED_TOKENS_KEY, { supportedTokenIDs: [] }); const tokenID = Buffer.from([2, 0, 0, 0, 1, 0, 0, 0]); await expect(store.removeSupportForToken(context, tokenID)).rejects.toThrow( @@ -271,7 +294,7 @@ describe('SupportedTokensStore', () => { await expect(store.allSupported(context)).resolves.toBeTrue(); }); - it('should remove data if only the tokenID removed is supported', async () => { + it('should remove chain from supported tokens if the only supported tokenID is removed', async () => { const tokenID = Buffer.from([1, 1, 1, 1, 1, 0, 0, 0]); await store.set(context, Buffer.from([1, 1, 1, 1]), { supportedTokenIDs: [tokenID], @@ -281,7 +304,7 @@ describe('SupportedTokensStore', () => { await expect(store.has(context, Buffer.from([1, 1, 1, 1]))).resolves.toBeFalse(); }); - it('should remove data if the tokenID and keep other supported tokens', async () => { + it('should remove supported tokenID and keep other supported tokens', async () => { const tokenID = Buffer.from([1, 1, 1, 1, 1, 0, 0, 0]); await store.set(context, Buffer.from([1, 1, 1, 1]), { supportedTokenIDs: [ @@ -301,10 +324,37 @@ describe('SupportedTokensStore', () => { }); }); - it('should return undefined if support does not exist', async () => { + it('should not modify supported tokens store if a token that is not supported is the input', async () => { + const chainID = Buffer.from([1, 1, 1, 1]); + const notSupportedToken = Buffer.concat([chainID, Buffer.from([1, 0, 0, 0])]); + + const supportedTokensStoreState = { + supportedTokenIDs: [ + Buffer.concat([chainID, Buffer.from([0, 0, 1, 1])]), + Buffer.concat([chainID, Buffer.from([1, 0, 1, 1])]), + ], + }; + + await store.set(context, chainID, supportedTokensStoreState); + + await store.removeSupportForToken(context, notSupportedToken); + + await expect(store.get(context, chainID)).resolves.toEqual(supportedTokensStoreState); + }); + + it('should not modify store if support does not exist', async () => { await expect( store.removeSupportForToken(context, Buffer.from([1, 1, 1, 1, 1, 0, 0, 0])), ).resolves.toBeUndefined(); + + const chainID = Buffer.from([1, 1, 1, 1]); + const tokenID = Buffer.from([1, 0, 0, 0]); + + await expect( + store.removeSupportForToken(context, Buffer.concat([chainID, tokenID])), + ).resolves.toBeUndefined(); + + await expect(store.has(context, chainID)).resolves.toBeFalse(); }); it('should reject if the supported tokens array length is 0', async () => { @@ -316,15 +366,5 @@ describe('SupportedTokensStore', () => { store.removeSupportForToken(context, Buffer.from([1, 1, 1, 1, 1, 0, 0, 0])), ).rejects.toThrow('All tokens from the specified chain are supported.'); }); - - it('should remove token from supported tokens if a token with value tokenID exists', async () => { - const tokenID = Buffer.from([1, 1, 1, 1, 1, 0, 0, 0]); - await store.set(context, Buffer.from([1, 1, 1, 1]), { - supportedTokenIDs: [tokenID], - }); - - await expect(store.removeSupportForToken(context, tokenID)).resolves.toBeUndefined(); - await expect(store.has(context, Buffer.from([1, 1, 1, 1]))).resolves.toBeFalse(); - }); }); }); From e9e7f2e5321d9ead5c15d4091a65fa7d33b2600d Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 1 Jun 2023 16:46:57 +0200 Subject: [PATCH 039/170] Transfer NFT Command (#8503) * :bug: Registers NFT stores * :seedling: Adds createNFTEntry and transferInternal to NFT module * :seedling: Adds getNFTOwner and getLockingModule to NFTMethods * :seedling: Adds TransferCommand to NFT module * :seedling: Adds InternalMethod#ceateUserEntry * :bug: Updates InternalMethod#transferInternal to create user entry for recipientAddress * :recycle: Removes redundant Params type * :recycle: :memo: Adds NFTAttributes interface * :recycle: tests for TransferCommand * :recycle: tests for InternalMethod * :recycle: Method#getLockingModule * :recycle: tests for Method * :recycle: Updates dependencies for InternalMethod and Method --- framework/src/modules/nft/commands/.gitkeep | 0 .../src/modules/nft/commands/transfer.ts | 91 ++++++ framework/src/modules/nft/constants.ts | 1 + framework/src/modules/nft/internal_method.ts | 69 ++++- framework/src/modules/nft/method.ts | 43 ++- framework/src/modules/nft/module.ts | 13 +- framework/src/modules/nft/schemas.ts | 28 ++ framework/src/modules/nft/stores/nft.ts | 10 +- .../modules/nft/commands/transfer.spec.ts | 273 ++++++++++++++++++ .../unit/modules/nft/internal_method.spec.ts | 153 ++++++++++ .../test/unit/modules/nft/method.spec.ts | 104 +++++++ 11 files changed, 770 insertions(+), 15 deletions(-) delete mode 100644 framework/src/modules/nft/commands/.gitkeep create mode 100644 framework/src/modules/nft/commands/transfer.ts create mode 100644 framework/test/unit/modules/nft/commands/transfer.spec.ts create mode 100644 framework/test/unit/modules/nft/internal_method.spec.ts create mode 100644 framework/test/unit/modules/nft/method.spec.ts diff --git a/framework/src/modules/nft/commands/.gitkeep b/framework/src/modules/nft/commands/.gitkeep deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/framework/src/modules/nft/commands/transfer.ts b/framework/src/modules/nft/commands/transfer.ts new file mode 100644 index 00000000000..85a984c00a2 --- /dev/null +++ b/framework/src/modules/nft/commands/transfer.ts @@ -0,0 +1,91 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; +import { + CommandExecuteContext, + CommandVerifyContext, + VerificationResult, + VerifyStatus, +} from '../../../state_machine'; +import { BaseCommand } from '../../base_command'; +import { transferParamsSchema } from '../schemas'; +import { NFTStore } from '../stores/nft'; +import { NFTMethod } from '../method'; +import { LENGTH_CHAIN_ID, NFT_NOT_LOCKED } from '../constants'; +import { InternalMethod } from '../internal_method'; + +export interface Params { + nftID: Buffer; + recipientAddress: Buffer; + data: string; +} + +export class TransferCommand extends BaseCommand { + public schema = transferParamsSchema; + private _method!: NFTMethod; + private _internalMethod!: InternalMethod; + + public init(args: { method: NFTMethod; internalMethod: InternalMethod }) { + this._method = args.method; + this._internalMethod = args.internalMethod; + } + + public async verify(context: CommandVerifyContext): Promise { + const { params } = context; + + validator.validate(this.schema, params); + + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(context, params.nftID); + + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this._method.getNFTOwner(context.getMethodContext(), params.nftID); + + if (owner.length === LENGTH_CHAIN_ID) { + throw new Error('NFT is escrowed to another chain'); + } + + if (!owner.equals(context.transaction.senderAddress)) { + throw new Error('Transfer not initiated by the NFT owner'); + } + + const lockingModule = await this._method.getLockingModule( + context.getMethodContext(), + params.nftID, + ); + + if (lockingModule !== NFT_NOT_LOCKED) { + throw new Error('Locked NFTs cannot be transferred'); + } + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._internalMethod.transferInternal( + context.getMethodContext(), + params.recipientAddress, + params.nftID, + ); + } +} diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index c0cbc168a98..323e8dfc67f 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -18,6 +18,7 @@ export const LENGTH_COLLECTION_ID = 4; export const MIN_LENGTH_MODULE_NAME = 1; export const MAX_LENGTH_MODULE_NAME = 32; export const LENGTH_ADDRESS = 20; +export const NFT_NOT_LOCKED = 'nft'; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 28c3ed9e09c..16f21c62cb1 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -13,13 +13,80 @@ */ import { BaseMethod } from '../base_method'; -import { ModuleConfig } from './types'; +import { NFTStore, NFTAttributes } from './stores/nft'; +import { InteroperabilityMethod, ModuleConfig } from './types'; +import { MethodContext } from '../../state_machine'; +import { TransferEvent } from './events/transfer'; +import { UserStore } from './stores/user'; +import { NFT_NOT_LOCKED } from './constants'; +import { NFTMethod } from './method'; export class InternalMethod extends BaseMethod { // @ts-expect-error TODO: unused error. Remove when implementing. private _config!: ModuleConfig; + // @ts-expect-error TODO: unused error. Remove when implementing. + private _method!: NFTMethod; + + // @ts-expect-error TODO: unused error. Remove when implementing. + private _interoperabilityMethod!: InteroperabilityMethod; + public init(config: ModuleConfig): void { this._config = config; } + + public addDependencies(method: NFTMethod, interoperabilityMethod: InteroperabilityMethod) { + this._method = method; + this._interoperabilityMethod = interoperabilityMethod; + } + + public async createUserEntry( + methodContext: MethodContext, + address: Buffer, + nftID: Buffer, + ): Promise { + const userStore = this.stores.get(UserStore); + + await userStore.set(methodContext, userStore.getKey(address, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + } + + public async createNFTEntry( + methodContext: MethodContext, + address: Buffer, + nftID: Buffer, + attributesArray: NFTAttributes[], + ): Promise { + const nftStore = this.stores.get(NFTStore); + await nftStore.save(methodContext, nftID, { + owner: address, + attributesArray, + }); + } + + public async transferInternal( + methodContext: MethodContext, + recipientAddress: Buffer, + nftID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const userStore = this.stores.get(UserStore); + + const data = await nftStore.get(methodContext, nftID); + const senderAddress = data.owner; + + data.owner = recipientAddress; + + await nftStore.set(methodContext, nftID, data); + + await userStore.del(methodContext, userStore.getKey(senderAddress, nftID)); + await this.createUserEntry(methodContext, recipientAddress, nftID); + + this.events.get(TransferEvent).log(methodContext, { + senderAddress, + recipientAddress, + nftID, + }); + } } diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 16b611a5766..cccadfa8523 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -13,25 +13,52 @@ */ import { BaseMethod } from '../base_method'; import { InteroperabilityMethod, ModuleConfig } from './types'; -import { InternalMethod } from './internal_method'; +import { NFTStore } from './stores/nft'; +import { ImmutableMethodContext } from '../../state_machine'; +import { LENGTH_CHAIN_ID } from './constants'; +import { UserStore } from './stores/user'; export class NFTMethod extends BaseMethod { // @ts-expect-error TODO: unused error. Remove when implementing. private _config!: ModuleConfig; // @ts-expect-error TODO: unused error. Remove when implementing. private _interoperabilityMethod!: InteroperabilityMethod; - // @ts-expect-error TODO: unused error. Remove when implementing. - private _internalMethod!: InternalMethod; public init(config: ModuleConfig): void { this._config = config; } - public addDependencies( - interoperabilityMethod: InteroperabilityMethod, - internalMethod: InternalMethod, - ) { + public addDependencies(interoperabilityMethod: InteroperabilityMethod) { this._interoperabilityMethod = interoperabilityMethod; - this._internalMethod = internalMethod; + } + + public async getNFTOwner(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(methodContext, nftID); + + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const data = await nftStore.get(methodContext, nftID); + + return data.owner; + } + + public async getLockingModule( + methodContext: ImmutableMethodContext, + nftID: Buffer, + ): Promise { + const owner = await this.getNFTOwner(methodContext, nftID); + + if (owner.length === LENGTH_CHAIN_ID) { + throw new Error('NFT is escrowed to another chain'); + } + + const userStore = this.stores.get(UserStore); + const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); + + return userData.lockingModule; } } diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 24fc41e4073..62dd39ab424 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -34,6 +34,10 @@ import { TransferCrossChainEvent } from './events/transfer_cross_chain'; import { UnlockEvent } from './events/unlock'; import { InternalMethod } from './internal_method'; import { NFTMethod } from './method'; +import { EscrowStore } from './stores/escrow'; +import { NFTStore } from './stores/nft'; +import { SupportedNFTsStore } from './stores/supported_nfts'; +import { UserStore } from './stores/user'; import { FeeMethod } from './types'; export class NFTModule extends BaseInteroperableModule { @@ -42,7 +46,7 @@ export class NFTModule extends BaseInteroperableModule { public crossChainMethod = new NFTInteroperableMethod(this.stores, this.events); private readonly _internalMethod = new InternalMethod(this.stores, this.events); - // @ts-expect-error TODO: unused error. Remove when implementing. + private _interoperabilityMethod!: InteroperabilityMethod; public commands = []; @@ -74,11 +78,16 @@ export class NFTModule extends BaseInteroperableModule { AllNFTsFromCollectionSupportRemovedEvent, new AllNFTsFromCollectionSupportRemovedEvent(this.name), ); + this.stores.register(NFTStore, new NFTStore(this.name, 1)); + this.stores.register(UserStore, new UserStore(this.name, 2)); + this.stores.register(EscrowStore, new EscrowStore(this.name, 3)); + this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 4)); } public addDependencies(interoperabilityMethod: InteroperabilityMethod, _feeMethod: FeeMethod) { this._interoperabilityMethod = interoperabilityMethod; - this.method.addDependencies(interoperabilityMethod, this._internalMethod); + this.method.addDependencies(interoperabilityMethod); + this._internalMethod.addDependencies(this.method, this._interoperabilityMethod); this.crossChainMethod.addDependencies(interoperabilityMethod); } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 206ba71de27..2c0cee0da4e 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -11,3 +11,31 @@ * * Removal or modification of this copyright notice is prohibited. */ + +import { MAX_DATA_LENGTH } from '../token/constants'; +import { LENGTH_NFT_ID } from './constants'; + +export const transferParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['nftID', 'recipientAddress', 'data'], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + data: { + dataType: 'string', + minLength: 0, + maxLength: MAX_DATA_LENGTH, + fieldNumber: 3, + }, + }, +}; diff --git a/framework/src/modules/nft/stores/nft.ts b/framework/src/modules/nft/stores/nft.ts index c1e8ce23244..ec931e7be7b 100644 --- a/framework/src/modules/nft/stores/nft.ts +++ b/framework/src/modules/nft/stores/nft.ts @@ -15,12 +15,14 @@ import { BaseStore, StoreGetter } from '../../base_store'; import { MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from '../constants'; +export interface NFTAttributes { + module: string; + attributes: Buffer; +} + export interface NFTStoreData { owner: Buffer; - attributesArray: { - module: string; - attributes: Buffer; - }[]; + attributesArray: NFTAttributes[]; } export const nftStoreSchema = { diff --git a/framework/test/unit/modules/nft/commands/transfer.spec.ts b/framework/test/unit/modules/nft/commands/transfer.spec.ts new file mode 100644 index 00000000000..a4cae3f8054 --- /dev/null +++ b/framework/test/unit/modules/nft/commands/transfer.spec.ts @@ -0,0 +1,273 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { Transaction } from '@liskhq/lisk-chain'; +import { codec } from '@liskhq/lisk-codec'; +import { utils, address } from '@liskhq/lisk-cryptography'; +import { NFTModule } from '../../../../../src/modules/nft/module'; +import { TransferCommand, Params } from '../../../../../src/modules/nft/commands/transfer'; +import { createTransactionContext } from '../../../../../src/testing'; +import { transferParamsSchema } from '../../../../../src/modules/nft/schemas'; +import { + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_NFT_ID, + NFT_NOT_LOCKED, +} from '../../../../../src/modules/nft/constants'; +import { NFTAttributes, NFTStore } from '../../../../../src/modules/nft/stores/nft'; +import { createStoreGetter } from '../../../../../src/testing/utils'; +import { VerifyStatus } from '../../../../../src'; +import { InternalMethod } from '../../../../../src/modules/nft/internal_method'; +import { NFTMethod } from '../../../../../src/modules/nft/method'; +import { UserStore } from '../../../../../src/modules/nft/stores/user'; +import { EventQueue } from '../../../../../src/state_machine'; +import { TransferEvent } from '../../../../../src/modules/nft/events/transfer'; + +describe('Transfer command', () => { + const module = new NFTModule(); + const method = new NFTMethod(module.stores, module.events); + const internalMethod = new InternalMethod(module.stores, module.events); + let command: TransferCommand; + + const validParams: Params = { + nftID: Buffer.alloc(LENGTH_NFT_ID, 1), + recipientAddress: utils.getRandomBytes(20), + data: '', + }; + + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: any, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + + const createTransactionContextWithOverridingParams = ( + params: Record, + txParams: Record = {}, + ) => + createTransactionContext({ + transaction: new Transaction({ + module: module.name, + command: 'transfer', + fee: BigInt(5000000), + nonce: BigInt(0), + senderPublicKey: utils.getRandomBytes(32), + params: codec.encode(transferParamsSchema, { + ...validParams, + ...params, + }), + signatures: [utils.getRandomBytes(64)], + ...txParams, + }), + }); + + const nftStore = module.stores.get(NFTStore); + const userStore = module.stores.get(UserStore); + + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const senderPublicKey = utils.getRandomBytes(32); + const owner = address.getAddressFromPublicKey(senderPublicKey); + + beforeEach(() => { + command = new TransferCommand(module.stores, module.events); + command.init({ method, internalMethod }); + }); + + describe('verify', () => { + it('should fail if nftID does not have valid length', async () => { + const nftMinLengthContext = createTransactionContextWithOverridingParams({ + nftID: Buffer.alloc(LENGTH_NFT_ID - 1, 1), + }); + + const nftMaxLengthContext = createTransactionContextWithOverridingParams({ + nftID: Buffer.alloc(LENGTH_NFT_ID + 1, 1), + }); + + await expect( + command.verify(nftMinLengthContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow("'.nftID' minLength not satisfied"); + + await expect( + command.verify(nftMaxLengthContext.createCommandExecuteContext(transferParamsSchema)), + ).rejects.toThrow("'.nftID' maxLength exceeded"); + }); + + it('should fail if recipientAddress is not 20 bytes', async () => { + const recipientAddressIncorrectLengthContext = createTransactionContextWithOverridingParams({ + recipientAddress: utils.getRandomBytes(22), + }); + + await expect( + command.verify( + recipientAddressIncorrectLengthContext.createCommandVerifyContext(transferParamsSchema), + ), + ).rejects.toThrow("'.recipientAddress' address length invalid"); + }); + + it('should fail if data exceeds 64 characters', async () => { + const dataIncorrectLengthContext = createTransactionContextWithOverridingParams({ + data: '1'.repeat(65), + }); + + await expect( + command.verify(dataIncorrectLengthContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow("'.data' must NOT have more than 64 characters"); + }); + + it('should fail if nftID does not exist', async () => { + const nftIDNotExistingContext = createTransactionContextWithOverridingParams({ + nftID: Buffer.alloc(LENGTH_NFT_ID, 0), + }); + + await expect( + command.verify(nftIDNotExistingContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow('NFT substore entry does not exist'); + }); + + it('should fail if NFT is escrowed to another chain', async () => { + const nftEscrowedContext = createTransactionContextWithOverridingParams({ + nftID, + }); + + await nftStore.set(createStoreGetter(nftEscrowedContext.stateStore), nftID, { + owner: chainID, + attributesArray: [], + }); + + await expect( + command.verify(nftEscrowedContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow('NFT is escrowed to another chain'); + }); + + it('should fail if owner of the NFT is not the sender', async () => { + const nftIncorrectOwnerContext = createTransactionContextWithOverridingParams({ + nftID, + }); + + await nftStore.save(createStoreGetter(nftIncorrectOwnerContext.stateStore), nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await expect( + command.verify(nftIncorrectOwnerContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow('Transfer not initiated by the NFT owner'); + }); + + it('should fail if NFT exists and is locked by its owner', async () => { + const lockedNFTContext = createTransactionContextWithOverridingParams( + { nftID }, + { senderPublicKey }, + ); + + await nftStore.save(createStoreGetter(lockedNFTContext.stateStore), nftID, { + owner, + attributesArray: [], + }); + + await userStore.set( + createStoreGetter(lockedNFTContext.stateStore), + userStore.getKey(owner, nftID), + { + lockingModule: 'token', + }, + ); + + await expect( + command.verify(lockedNFTContext.createCommandVerifyContext(transferParamsSchema)), + ).rejects.toThrow('Locked NFTs cannot be transferred'); + }); + + it('should verify if unlocked NFT exists and its owner is performing the transfer', async () => { + const validContext = createTransactionContextWithOverridingParams( + { nftID }, + { senderPublicKey }, + ); + + await nftStore.save(createStoreGetter(validContext.stateStore), nftID, { + owner, + attributesArray: [], + }); + + await userStore.set( + createStoreGetter(validContext.stateStore), + userStore.getKey(owner, nftID), + { + lockingModule: NFT_NOT_LOCKED, + }, + ); + + await expect( + command.verify(validContext.createCommandVerifyContext(transferParamsSchema)), + ).resolves.toEqual({ status: VerifyStatus.OK }); + }); + }); + + describe('execute', () => { + it('should transfer NFT and emit Transfer event', async () => { + const senderAddress = owner; + const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const attributesArray: NFTAttributes[] = []; + + const validContext = createTransactionContextWithOverridingParams( + { nftID, recipientAddress }, + { senderPublicKey }, + ); + + await nftStore.save(createStoreGetter(validContext.stateStore), nftID, { + owner: senderAddress, + attributesArray, + }); + + await userStore.set( + createStoreGetter(validContext.stateStore), + userStore.getKey(senderAddress, nftID), + { + lockingModule: NFT_NOT_LOCKED, + }, + ); + + await expect( + command.execute(validContext.createCommandExecuteContext(transferParamsSchema)), + ).resolves.toBeUndefined(); + + await expect( + nftStore.get(createStoreGetter(validContext.stateStore), nftID), + ).resolves.toEqual({ + owner: recipientAddress, + attributesArray, + }); + + checkEventResult(validContext.eventQueue, 1, TransferEvent, 0, { + senderAddress, + recipientAddress, + nftID, + }); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts new file mode 100644 index 00000000000..2e46a80553c --- /dev/null +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -0,0 +1,153 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; +import { NFTModule } from '../../../../src/modules/nft/module'; +import { InternalMethod } from '../../../../src/modules/nft/internal_method'; +import { EventQueue, createMethodContext } from '../../../../src/state_machine'; +import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; +import { + LENGTH_ADDRESS, + LENGTH_NFT_ID, + NFT_NOT_LOCKED, +} from '../../../../src/modules/nft/constants'; +import { NFTStore } from '../../../../src/modules/nft/stores/nft'; +import { MethodContext } from '../../../../src/state_machine/method_context'; +import { TransferEvent } from '../../../../src/modules/nft/events/transfer'; +import { UserStore } from '../../../../src/modules/nft/stores/user'; + +describe('InternalMethod', () => { + const module = new NFTModule(); + const internalMethod = new InternalMethod(module.stores, module.events); + let methodContext!: MethodContext; + + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: any, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + + const userStore = module.stores.get(UserStore); + const nftStore = module.stores.get(NFTStore); + + const address = utils.getRandomBytes(LENGTH_ADDRESS); + const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + beforeEach(() => { + methodContext = createMethodContext({ + stateStore: new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()), + eventQueue: new EventQueue(0), + contextStore: new Map(), + }); + }); + + describe('createNFTEntry', () => { + it('should create an entry in NFStore with attributes sorted by module', async () => { + const unsortedAttributesArray = [ + { + module: 'token', + attributes: Buffer.alloc(8, 1), + }, + { + module: 'pos', + attributes: Buffer.alloc(8, 1), + }, + ]; + + const sortedAttributesArray = unsortedAttributesArray.sort((a, b) => + a.module.localeCompare(b.module, 'en'), + ); + + await internalMethod.createNFTEntry(methodContext, address, nftID, unsortedAttributesArray); + + await expect(nftStore.get(methodContext, nftID)).resolves.toEqual({ + owner: address, + attributesArray: sortedAttributesArray, + }); + }); + }); + + describe('createUserEntry', () => { + it('should create an entry for an unlocked NFT in UserStore', async () => { + await expect( + internalMethod.createUserEntry(methodContext, address, nftID), + ).resolves.toBeUndefined(); + + await expect(userStore.get(methodContext, userStore.getKey(address, nftID))).resolves.toEqual( + { + lockingModule: NFT_NOT_LOCKED, + }, + ); + }); + }); + + describe('transferInternal', () => { + it('should transfer NFT from sender to recipient and emit Transfer event', async () => { + await module.stores.get(NFTStore).save(methodContext, nftID, { + owner: senderAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await internalMethod.transferInternal(methodContext, recipientAddress, nftID); + + await expect(module.stores.get(NFTStore).get(methodContext, nftID)).resolves.toEqual({ + owner: recipientAddress, + attributesArray: [], + }); + + await expect( + userStore.has(methodContext, userStore.getKey(senderAddress, nftID)), + ).resolves.toBeFalse(); + + await expect( + userStore.get(methodContext, userStore.getKey(recipientAddress, nftID)), + ).resolves.toEqual({ + lockingModule: NFT_NOT_LOCKED, + }); + + checkEventResult(methodContext.eventQueue, 1, TransferEvent, 0, { + senderAddress, + recipientAddress, + nftID, + }); + }); + + it('should fail if NFT does not exist', async () => { + await expect( + internalMethod.transferInternal(methodContext, recipientAddress, nftID), + ).rejects.toThrow('does not exist'); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts new file mode 100644 index 00000000000..346f7473a59 --- /dev/null +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -0,0 +1,104 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { NFTMethod } from '../../../../src/modules/nft/method'; +import { NFTModule } from '../../../../src/modules/nft/module'; +import { EventQueue } from '../../../../src/state_machine'; +import { MethodContext, createMethodContext } from '../../../../src/state_machine/method_context'; +import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; +import { + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_NFT_ID, +} from '../../../../src/modules/nft/constants'; +import { NFTStore } from '../../../../src/modules/nft/stores/nft'; +import { UserStore } from '../../../../src/modules/nft/stores/user'; + +describe('NFTMethods', () => { + const module = new NFTModule(); + const method = new NFTMethod(module.stores, module.events); + + let methodContext!: MethodContext; + + const nftStore = module.stores.get(NFTStore); + const userStore = module.stores.get(UserStore); + + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + let owner: Buffer; + + beforeEach(() => { + owner = utils.getRandomBytes(LENGTH_ADDRESS); + + methodContext = createMethodContext({ + stateStore: new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()), + eventQueue: new EventQueue(0), + contextStore: new Map(), + }); + }); + + describe('getNFTOwner', () => { + it('should fail if NFT does not exist', async () => { + await expect(method.getNFTOwner(methodContext, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should return the owner if NFT exists', async () => { + await nftStore.save(methodContext, nftID, { + owner, + attributesArray: [], + }); + + await expect(method.getNFTOwner(methodContext, nftID)).resolves.toEqual(owner); + }); + }); + + describe('getLockingModule', () => { + it('should fail if NFT does not exist', async () => { + await expect(method.getLockingModule(methodContext, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should fail if NFT is escrowed', async () => { + owner = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await nftStore.save(methodContext, nftID, { + owner, + attributesArray: [], + }); + + await expect(method.getLockingModule(methodContext, nftID)).rejects.toThrow( + 'NFT is escrowed to another chain', + ); + }); + + it('should return the lockingModule for the owner of the NFT', async () => { + const lockingModule = 'nft'; + + await nftStore.save(methodContext, nftID, { + owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(owner, nftID), { + lockingModule, + }); + + await expect(method.getLockingModule(methodContext, nftID)).resolves.toEqual(lockingModule); + }); + }); +}); From 99b0ed4c1663c98c92dca6ebcc444a77805290d2 Mon Sep 17 00:00:00 2001 From: has5aan Date: Wed, 31 May 2023 14:12:34 +0200 Subject: [PATCH 040/170] :seedling: Adds EscrowStore#getKey --- framework/src/modules/nft/stores/escrow.ts | 4 +++ .../unit/modules/nft/stores/escrow.spec.ts | 36 +++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 framework/test/unit/modules/nft/stores/escrow.spec.ts diff --git a/framework/src/modules/nft/stores/escrow.ts b/framework/src/modules/nft/stores/escrow.ts index 719bf0b7fbe..b5d224088bd 100644 --- a/framework/src/modules/nft/stores/escrow.ts +++ b/framework/src/modules/nft/stores/escrow.ts @@ -25,4 +25,8 @@ type EscrowStoreData = Record; export class EscrowStore extends BaseStore { public schema = escrowStoreSchema; + + public getKey(receivingChainID: Buffer, nftID: Buffer): Buffer { + return Buffer.concat([receivingChainID, nftID]); + } } diff --git a/framework/test/unit/modules/nft/stores/escrow.spec.ts b/framework/test/unit/modules/nft/stores/escrow.spec.ts new file mode 100644 index 00000000000..89d27e973af --- /dev/null +++ b/framework/test/unit/modules/nft/stores/escrow.spec.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { EscrowStore } from '../../../../../src/modules/nft/stores/escrow'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID } from '../../../../../src/modules/nft/constants'; + +describe('EscrowStore', () => { + let store: EscrowStore; + + beforeEach(() => { + store = new EscrowStore('NFT', 5); + }); + + describe('getKey', () => { + it('should concatenate the provided receivingChainID and nftID', () => { + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + expect(store.getKey(receivingChainID, nftID)).toEqual( + Buffer.concat([receivingChainID, nftID]), + ); + }); + }); +}); From 332c8fa942fd1d09ff8712aea71d23999de80259 Mon Sep 17 00:00:00 2001 From: has5aan Date: Wed, 31 May 2023 14:14:46 +0200 Subject: [PATCH 041/170] :bug: Fixes schema for DestroyEvent --- framework/src/modules/nft/events/destroy.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index 1294f466ba9..c4c6e0e1923 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -33,7 +33,7 @@ export const createEventSchema = { nftID: { dataType: 'bytes', minLength: LENGTH_NFT_ID, - maxLenght: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, fieldNumber: 2, }, result: { From 138fbecb62f85d7d9385781021eeb8a1714e6d68 Mon Sep 17 00:00:00 2001 From: has5aan Date: Wed, 31 May 2023 14:19:58 +0200 Subject: [PATCH 042/170] :recycle: Adds result parameter to DestroyEvent#log --- framework/src/modules/nft/events/destroy.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index c4c6e0e1923..3475c03a869 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -46,10 +46,11 @@ export const createEventSchema = { export class DestroyEvent extends BaseEvent { public schema = createEventSchema; - public log(ctx: EventQueuer, data: DestroyEventData): void { - this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ - data.address, - data.nftID, - ]); + public log( + ctx: EventQueuer, + data: DestroyEventData, + result: NftEventResult = NftEventResult.RESULT_SUCCESSFUL, + ): void { + this.add(ctx, { ...data, result }, [data.address, data.nftID]); } } From 029710afd9aa770ada31fb2dc7e36722868d12d4 Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 1 Jun 2023 14:03:09 +0200 Subject: [PATCH 043/170] :seedling: Adds NFTMethod.getChainID --- framework/src/modules/nft/method.ts | 10 +++++++++- framework/test/unit/modules/nft/method.spec.ts | 12 ++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index cccadfa8523..61129d21395 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -15,7 +15,7 @@ import { BaseMethod } from '../base_method'; import { InteroperabilityMethod, ModuleConfig } from './types'; import { NFTStore } from './stores/nft'; import { ImmutableMethodContext } from '../../state_machine'; -import { LENGTH_CHAIN_ID } from './constants'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID } from './constants'; import { UserStore } from './stores/user'; export class NFTMethod extends BaseMethod { @@ -32,6 +32,14 @@ export class NFTMethod extends BaseMethod { this._interoperabilityMethod = interoperabilityMethod; } + public getChainID(nftID: Buffer): Buffer { + if (nftID.length !== LENGTH_NFT_ID) { + throw new Error(`NFT ID must have length ${LENGTH_NFT_ID}`); + } + + return nftID.slice(0, LENGTH_CHAIN_ID); + } + public async getNFTOwner(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { const nftStore = this.stores.get(NFTStore); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 346f7473a59..f096aaa8799 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -49,6 +49,18 @@ describe('NFTMethods', () => { }); }); + describe('getChainID', () => { + it('should throw if nftID has invalid length', () => { + expect(() => { + method.getChainID(utils.getRandomBytes(LENGTH_NFT_ID - 1)); + }).toThrow(`NFT ID must have length ${LENGTH_NFT_ID}`); + }); + + it('should return the first bytes of length LENGTH_CHAIN_ID from provided nftID', () => { + expect(method.getChainID(nftID)).toEqual(nftID.slice(0, LENGTH_CHAIN_ID)); + }); + }); + describe('getNFTOwner', () => { it('should fail if NFT does not exist', async () => { await expect(method.getNFTOwner(methodContext, nftID)).rejects.toThrow( From 5936b7b2e6077a82e7d3ba0286dc18cef769c6e0 Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 1 Jun 2023 14:16:18 +0200 Subject: [PATCH 044/170] :seedling: Adds NFTMethod.destroy --- framework/src/modules/nft/method.ts | 82 +++++++- .../test/unit/modules/nft/method.spec.ts | 175 +++++++++++++++++- 2 files changed, 254 insertions(+), 3 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 61129d21395..a2f449879de 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -14,9 +14,10 @@ import { BaseMethod } from '../base_method'; import { InteroperabilityMethod, ModuleConfig } from './types'; import { NFTStore } from './stores/nft'; -import { ImmutableMethodContext } from '../../state_machine'; -import { LENGTH_CHAIN_ID, LENGTH_NFT_ID } from './constants'; +import { ImmutableMethodContext, MethodContext } from '../../state_machine'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID, NFT_NOT_LOCKED, NftEventResult } from './constants'; import { UserStore } from './stores/user'; +import { DestroyEvent } from './events/destroy'; export class NFTMethod extends BaseMethod { // @ts-expect-error TODO: unused error. Remove when implementing. @@ -69,4 +70,81 @@ export class NFTMethod extends BaseMethod { return userData.lockingModule; } + + public async destroy( + methodContext: MethodContext, + address: Buffer, + nftID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(methodContext, nftID); + + if (!nftExists) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this.getNFTOwner(methodContext, nftID); + + if (!owner.equals(address)) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + + throw new Error('Not initiated by the NFT owner'); + } + + const userStore = this.stores.get(UserStore); + const userKey = userStore.getKey(owner, nftID); + const { lockingModule } = await userStore.get(methodContext, userKey); + + if (lockingModule !== NFT_NOT_LOCKED) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + + throw new Error('Locked NFTs cannot be destroyed'); + } + + if (owner.length === LENGTH_CHAIN_ID) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + + throw new Error('NFT is escrowed to another chain'); + } + + await nftStore.del(methodContext, nftID); + + await userStore.del(methodContext, userKey); + + this.events.get(DestroyEvent).log(methodContext, { + address, + nftID, + }); + } } diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index f096aaa8799..8f227a8cb0a 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -12,6 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ +import { codec } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; import { NFTMethod } from '../../../../src/modules/nft/method'; import { NFTModule } from '../../../../src/modules/nft/module'; @@ -23,11 +24,14 @@ import { LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_NFT_ID, + NFT_NOT_LOCKED, + NftEventResult, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; -describe('NFTMethods', () => { +describe('NFTMethod', () => { const module = new NFTModule(); const method = new NFTMethod(module.stores, module.events); @@ -39,6 +43,25 @@ describe('NFTMethods', () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); let owner: Buffer; + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: EventDataType, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + beforeEach(() => { owner = utils.getRandomBytes(LENGTH_ADDRESS); @@ -113,4 +136,154 @@ describe('NFTMethods', () => { await expect(method.getLockingModule(methodContext, nftID)).resolves.toEqual(lockingModule); }); }); + + describe('destroy', () => { + let existingNFT: { nftID: any; owner: any }; + let lockedExistingNFT: { nftID: any; owner: any }; + let escrowedNFT: { nftID: any; owner: any }; + + beforeEach(async () => { + existingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + lockedExistingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + escrowedNFT = { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + await module.stores.get(NFTStore).save(methodContext, existingNFT.nftID, { + owner: existingNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await module.stores.get(NFTStore).save(methodContext, lockedExistingNFT.nftID, { + owner: lockedExistingNFT.owner, + attributesArray: [], + }); + + await userStore.set( + methodContext, + userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), + { + lockingModule: 'token', + }, + ); + + await module.stores.get(NFTStore).save(methodContext, escrowedNFT.nftID, { + owner: escrowedNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(escrowedNFT.owner, escrowedNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + }); + + it('should fail and emit Destroy event if NFT does not exist', async () => { + const address = utils.getRandomBytes(LENGTH_ADDRESS); + + await expect(method.destroy(methodContext, address, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should fail and emit Destroy event if NFT is not owned by the provided address', async () => { + const notOwner = utils.getRandomBytes(LENGTH_ADDRESS); + + await expect(method.destroy(methodContext, notOwner, existingNFT.nftID)).rejects.toThrow( + 'Not initiated by the NFT owner', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: notOwner, + nftID: existingNFT.nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + }); + + it('should fail and emit Destroy event if NFT is locked', async () => { + await expect( + method.destroy(methodContext, lockedExistingNFT.owner, lockedExistingNFT.nftID), + ).rejects.toThrow('Locked NFTs cannot be destroyed'); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: lockedExistingNFT.owner, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + }); + + it('should fail and emit Destroy event if NFT is escrowed', async () => { + await expect( + method.destroy(methodContext, escrowedNFT.owner, escrowedNFT.nftID), + ).rejects.toThrow(); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: escrowedNFT.owner, + nftID: escrowedNFT.nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + }); + + it('should delete NFTStore and UserStore entry and emit Destroy event', async () => { + await expect( + method.destroy(methodContext, existingNFT.owner, existingNFT.nftID), + ).resolves.toBeUndefined(); + + await expect( + module.stores.get(NFTStore).has(methodContext, existingNFT.nftID), + ).resolves.toBeFalse(); + await expect( + module.stores + .get(UserStore) + .has(methodContext, Buffer.concat([existingNFT.owner, escrowedNFT.nftID])), + ).resolves.toBeFalse(); + + checkEventResult(methodContext.eventQueue, 1, DestroyEvent, 0, { + address: existingNFT.owner, + nftID: existingNFT.nftID, + }); + }); + }); }); From 55112a883ab089af79bba9e4713dcd8f8005d542 Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 1 Jun 2023 14:17:31 +0200 Subject: [PATCH 045/170] :seedling: Adds InternalMethod.createEscrowEntry --- framework/src/modules/nft/internal_method.ts | 11 +++++++++++ .../test/unit/modules/nft/internal_method.spec.ts | 15 +++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 16f21c62cb1..bad4183c079 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -20,6 +20,7 @@ import { TransferEvent } from './events/transfer'; import { UserStore } from './stores/user'; import { NFT_NOT_LOCKED } from './constants'; import { NFTMethod } from './method'; +import { EscrowStore } from './stores/escrow'; export class InternalMethod extends BaseMethod { // @ts-expect-error TODO: unused error. Remove when implementing. @@ -40,6 +41,16 @@ export class InternalMethod extends BaseMethod { this._interoperabilityMethod = interoperabilityMethod; } + public async createEscrowEntry( + methodContext: MethodContext, + receivingChainID: Buffer, + nftID: Buffer, + ): Promise { + const escrowStore = this.stores.get(EscrowStore); + + await escrowStore.set(methodContext, escrowStore.getKey(receivingChainID, nftID), {}); + } + public async createUserEntry( methodContext: MethodContext, address: Buffer, diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 2e46a80553c..8ce8bdb255b 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -21,6 +21,7 @@ import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_ import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { LENGTH_ADDRESS, + LENGTH_CHAIN_ID, LENGTH_NFT_ID, NFT_NOT_LOCKED, } from '../../../../src/modules/nft/constants'; @@ -28,6 +29,7 @@ import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { MethodContext } from '../../../../src/state_machine/method_context'; import { TransferEvent } from '../../../../src/modules/nft/events/transfer'; import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; describe('InternalMethod', () => { const module = new NFTModule(); @@ -55,6 +57,7 @@ describe('InternalMethod', () => { const userStore = module.stores.get(UserStore); const nftStore = module.stores.get(NFTStore); + const escrowStore = module.stores.get(EscrowStore); const address = utils.getRandomBytes(LENGTH_ADDRESS); const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); @@ -69,6 +72,18 @@ describe('InternalMethod', () => { }); }); + describe('createEscrowEntry', () => { + it('should create an entry in EscrowStore', async () => { + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await internalMethod.createEscrowEntry(methodContext, receivingChainID, nftID); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + }); + }); + describe('createNFTEntry', () => { it('should create an entry in NFStore with attributes sorted by module', async () => { const unsortedAttributesArray = [ From 85594ff017294faa2dd2063b00173942226fbd67 Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 1 Jun 2023 14:20:12 +0200 Subject: [PATCH 046/170] :recycle: test for InternalMethod --- framework/test/unit/modules/nft/internal_method.spec.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 8ce8bdb255b..3d7d42fc182 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -27,7 +27,7 @@ import { } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { MethodContext } from '../../../../src/state_machine/method_context'; -import { TransferEvent } from '../../../../src/modules/nft/events/transfer'; +import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/events/transfer'; import { UserStore } from '../../../../src/modules/nft/stores/user'; import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; @@ -36,12 +36,12 @@ describe('InternalMethod', () => { const internalMethod = new InternalMethod(module.stores, module.events); let methodContext!: MethodContext; - const checkEventResult = ( + const checkEventResult = ( eventQueue: EventQueue, length: number, EventClass: any, index: number, - expectedResult: any, + expectedResult: EventDataType, result: any = 0, ) => { expect(eventQueue.getEvents()).toHaveLength(length); @@ -152,7 +152,7 @@ describe('InternalMethod', () => { lockingModule: NFT_NOT_LOCKED, }); - checkEventResult(methodContext.eventQueue, 1, TransferEvent, 0, { + checkEventResult(methodContext.eventQueue, 1, TransferEvent, 0, { senderAddress, recipientAddress, nftID, From e78c4756305d70f09198bbd65bc8b57e621a4c3f Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 1 Jun 2023 14:47:13 +0200 Subject: [PATCH 047/170] :seedling: Adds InternalMethod.transferCrossChainInternal --- framework/src/modules/nft/constants.ts | 4 +- framework/src/modules/nft/internal_method.ts | 80 +++- framework/src/modules/nft/schemas.ts | 52 ++- framework/src/modules/nft/types.ts | 4 +- .../unit/modules/nft/internal_method.spec.ts | 347 +++++++++++++++++- 5 files changed, 477 insertions(+), 10 deletions(-) diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index 323e8dfc67f..a63d4095872 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -18,7 +18,9 @@ export const LENGTH_COLLECTION_ID = 4; export const MIN_LENGTH_MODULE_NAME = 1; export const MAX_LENGTH_MODULE_NAME = 32; export const LENGTH_ADDRESS = 20; -export const NFT_NOT_LOCKED = 'nft'; +export const MODULE_NAME_NFT = 'nft'; +export const NFT_NOT_LOCKED = MODULE_NAME_NFT; +export const CROSS_CHAIN_COMMAND_NAME_TRANSFER = 'crossChainTransfer'; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index bad4183c079..da065eb78e8 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -12,24 +12,23 @@ * Removal or modification of this copyright notice is prohibited. */ +import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; import { NFTStore, NFTAttributes } from './stores/nft'; import { InteroperabilityMethod, ModuleConfig } from './types'; import { MethodContext } from '../../state_machine'; import { TransferEvent } from './events/transfer'; import { UserStore } from './stores/user'; -import { NFT_NOT_LOCKED } from './constants'; +import { CROSS_CHAIN_COMMAND_NAME_TRANSFER, MODULE_NAME_NFT, NFT_NOT_LOCKED } from './constants'; import { NFTMethod } from './method'; import { EscrowStore } from './stores/escrow'; +import { TransferCrossChainEvent } from './events/transfer_cross_chain'; +import { CCM_STATUS_OK } from '../token/constants'; +import { crossChainNFTTransferMessageParamsSchema } from './schemas'; export class InternalMethod extends BaseMethod { - // @ts-expect-error TODO: unused error. Remove when implementing. private _config!: ModuleConfig; - - // @ts-expect-error TODO: unused error. Remove when implementing. private _method!: NFTMethod; - - // @ts-expect-error TODO: unused error. Remove when implementing. private _interoperabilityMethod!: InteroperabilityMethod; public init(config: ModuleConfig): void { @@ -100,4 +99,73 @@ export class InternalMethod extends BaseMethod { nftID, }); } + + public async transferCrossChainInternal( + methodContext: MethodContext, + senderAddress: Buffer, + recipientAddress: Buffer, + nftID: Buffer, + receivingChainID: Buffer, + messageFee: bigint, + data: string, + includeAttributes: boolean, + ): Promise { + const chainID = this._method.getChainID(nftID); + const nftStore = this.stores.get(NFTStore); + const nft = await nftStore.get(methodContext, nftID); + + if (chainID.equals(this._config.ownChainID)) { + const escrowStore = this.stores.get(EscrowStore); + const userStore = this.stores.get(UserStore); + + nft.owner = receivingChainID; + await nftStore.save(methodContext, nftID, nft); + + await userStore.del(methodContext, userStore.getKey(senderAddress, nftID)); + + const escrowExists = await escrowStore.has( + methodContext, + escrowStore.getKey(receivingChainID, nftID), + ); + + if (!escrowExists) { + await this.createEscrowEntry(methodContext, receivingChainID, nftID); + } + } + + if (chainID.equals(receivingChainID)) { + await this._method.destroy(methodContext, senderAddress, nftID); + } + + let attributes: { module: string; attributes: Buffer }[] = []; + + if (includeAttributes) { + attributes = nft.attributesArray; + } + + this.events.get(TransferCrossChainEvent).log(methodContext, { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }); + + await this._interoperabilityMethod.send( + methodContext, + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributes, + data, + }), + ); + } } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 2c0cee0da4e..a712f07e1e0 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -13,7 +13,7 @@ */ import { MAX_DATA_LENGTH } from '../token/constants'; -import { LENGTH_NFT_ID } from './constants'; +import { LENGTH_NFT_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from './constants'; export const transferParamsSchema = { $id: '/lisk/nftTransferParams', @@ -39,3 +39,53 @@ export const transferParamsSchema = { }, }, }; + +export const crossChainNFTTransferMessageParamsSchema = { + $id: '/lisk/crossChainNFTTransferMessageParamsSchmema', + type: 'object', + required: ['nftID', 'senderAddress', 'recipientAddress', 'attributes', 'data'], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + senderAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 3, + }, + attributes: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + data: { + dataType: 'string', + maxLength: MAX_DATA_LENGTH, + fieldNumber: 5, + }, + }, +}; diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 40fa051c2f8..74d123c56aa 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -16,7 +16,9 @@ import { MethodContext } from '../../state_machine'; import { CCMsg } from '../interoperability'; // eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface ModuleConfig {} +export interface ModuleConfig { + ownChainID: Buffer; +} export interface InteroperabilityMethod { send( diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 3d7d42fc182..6b2d6a79f6a 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -20,9 +20,11 @@ import { EventQueue, createMethodContext } from '../../../../src/state_machine'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { + CROSS_CHAIN_COMMAND_NAME_TRANSFER, LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_NFT_ID, + MODULE_NAME_NFT, NFT_NOT_LOCKED, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; @@ -30,10 +32,26 @@ import { MethodContext } from '../../../../src/state_machine/method_context'; import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/events/transfer'; import { UserStore } from '../../../../src/modules/nft/stores/user'; import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; +import { NFTMethod } from '../../../../src/modules/nft/method'; +import { InteroperabilityMethod } from '../../../../src/modules/nft/types'; +import { + TransferCrossChainEvent, + TransferCrossChainEventData, +} from '../../../../src/modules/nft/events/transfer_cross_chain'; +import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; +import { CCM_STATUS_OK } from '../../../../src/modules/token/constants'; +import { crossChainNFTTransferMessageParamsSchema } from '../../../../src/modules/nft/schemas'; describe('InternalMethod', () => { const module = new NFTModule(); const internalMethod = new InternalMethod(module.stores, module.events); + const method = new NFTMethod(module.stores, module.events); + let interoperabilityMethod!: InteroperabilityMethod; + internalMethod.addDependencies(method, interoperabilityMethod); + + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + internalMethod.init({ ownChainID }); + let methodContext!: MethodContext; const checkEventResult = ( @@ -62,7 +80,7 @@ describe('InternalMethod', () => { const address = utils.getRandomBytes(LENGTH_ADDRESS); const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + let nftID = utils.getRandomBytes(LENGTH_NFT_ID); beforeEach(() => { methodContext = createMethodContext({ @@ -165,4 +183,331 @@ describe('InternalMethod', () => { ).rejects.toThrow('does not exist'); }); }); + + describe('transferCrossChainInternal', () => { + let receivingChainID: Buffer; + const messageFee = BigInt(1000); + const data = ''; + + beforeEach(() => { + receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + interoperabilityMethod = { + send: jest.fn().mockResolvedValue(Promise.resolve()), + error: jest.fn().mockResolvedValue(Promise.resolve()), + terminateChain: jest.fn().mockRejectedValue(Promise.resolve()), + }; + + internalMethod.addDependencies(method, interoperabilityMethod); + }); + + describe('if attributes are not included ccm contains empty attributes', () => { + const includeAttributes = false; + + it('should transfer the ownership of the NFT to the receiving chain and escrow it for a native NFT', async () => { + const chainID = ownChainID; + nftID = Buffer.concat([chainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributes: [], + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + await expect(nftStore.get(methodContext, nftID)).resolves.toEqual({ + owner: receivingChainID, + attributesArray: [], + }); + + await expect( + userStore.has(methodContext, userStore.getKey(senderAddress, nftID)), + ).resolves.toBeFalse(); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + + it('should destroy NFT if the chain ID of the NFT is the same as receiving chain', async () => { + nftID = Buffer.concat([ + receivingChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributes: [], + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + checkEventResult(methodContext.eventQueue, 2, DestroyEvent, 0, { + address: senderAddress, + nftID, + }); + + checkEventResult( + methodContext.eventQueue, + 2, + TransferCrossChainEvent, + 1, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + }); + + describe('if attributes are included ccm contains attributes of the NFT', () => { + const includeAttributes = true; + + it('should transfer the ownership of the NFT to the receiving chain and escrow it for a native NFT', async () => { + const chainID = ownChainID; + nftID = Buffer.concat([chainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + const attributesArray = [ + { + module: 'pos', + attributes: utils.getRandomBytes(20), + }, + ]; + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributes: attributesArray, + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + await expect(nftStore.get(methodContext, nftID)).resolves.toEqual({ + owner: receivingChainID, + attributesArray, + }); + + await expect( + userStore.has(methodContext, userStore.getKey(senderAddress, nftID)), + ).resolves.toBeFalse(); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + + it('should destroy NFT if the chain ID of the NFT is the same as receiving chain', async () => { + nftID = Buffer.concat([ + receivingChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + const attributesArray = [ + { + module: 'pos', + attributes: utils.getRandomBytes(20), + }, + ]; + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributes: attributesArray, + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + checkEventResult(methodContext.eventQueue, 2, DestroyEvent, 0, { + address: senderAddress, + nftID, + }); + + checkEventResult( + methodContext.eventQueue, + 2, + TransferCrossChainEvent, + 1, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + }); + }); }); From 7c668f10a77f4054630ccf600fa8ea4664defbcd Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Sun, 4 Jun 2023 20:44:39 +0200 Subject: [PATCH 048/170] Implement cross chain command --- .../modules/nft/cc_commands/cc_transfer.ts | 154 +++++ framework/src/modules/nft/constants.ts | 4 + .../src/modules/nft/events/ccm_transfer.ts | 4 + framework/src/modules/nft/internal_method.ts | 4 + framework/src/modules/nft/method.ts | 60 +- framework/src/modules/nft/schemas.ts | 12 +- .../modules/token/cc_commands/cc_transfer.ts | 1 - .../nft/cc_comands/cc_transfer.spec.ts | 615 ++++++++++++++++++ .../test/unit/modules/nft/method.spec.ts | 124 ++++ 9 files changed, 973 insertions(+), 5 deletions(-) create mode 100644 framework/src/modules/nft/cc_commands/cc_transfer.ts create mode 100644 framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts new file mode 100644 index 00000000000..e172160896e --- /dev/null +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -0,0 +1,154 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { codec } from '@liskhq/lisk-codec'; +import { validator } from '@liskhq/lisk-validator'; +import { CCTransferMessageParams, crossChainNFTTransferMessageParamsSchema } from '../schemas'; +import { NFTAttributes, NFTStore } from '../stores/nft'; +import { NFTMethod } from '../method'; +import { + CCM_STATUS_CODE_OK, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + FEE_CREATE_NFT, + NftEventResult, +} from '../constants'; +import { InternalMethod } from '../internal_method'; +import { BaseCCCommand } from '../../interoperability/base_cc_command'; +import { CrossChainMessageContext } from '../../interoperability/types'; +import { MAX_RESERVED_ERROR_STATUS } from '../../interoperability/constants'; +import { FeeMethod } from '../types'; +import { EscrowStore } from '../stores/escrow'; +import { CcmTransferEvent } from '../events/ccm_transfer'; + +export class CrossChainTransferCommand extends BaseCCCommand { + public schema = crossChainNFTTransferMessageParamsSchema; + private _method!: NFTMethod; + private _internalMethod!: InternalMethod; + private _feeMethod!: FeeMethod; + + public get name(): string { + return CROSS_CHAIN_COMMAND_NAME_TRANSFER; + } + + public init(args: { method: NFTMethod; internalMethod: InternalMethod; feeMethod: FeeMethod }) { + this._method = args.method; + this._internalMethod = args.internalMethod; + this._feeMethod = args.feeMethod; + } + + public async verify(context: CrossChainMessageContext): Promise { + const { ccm, getMethodContext } = context; + const params = codec.decode( + crossChainNFTTransferMessageParamsSchema, + ccm.params, + ); + validator.validate(crossChainNFTTransferMessageParamsSchema, params); + + if (ccm.status > MAX_RESERVED_ERROR_STATUS) { + throw new Error('Invalid CCM error code'); + } + + const { nftID } = params; + const { sendingChainID } = ccm; + const nftChainID = this._method.getChainID(nftID); + const ownChainID = this._internalMethod.getOwnChainID(); + + if (![ownChainID, sendingChainID].some(allowedChainID => nftChainID.equals(allowedChainID))) { + throw new Error('NFT is not native to either the sending chain or the receiving chain'); + } + + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(getMethodContext(), nftID); + if (nftChainID.equals(ownChainID) && !nftExists) { + throw new Error('Non-existent entry in the NFT substore'); + } + + const owner = await this._method.getNFTOwner(getMethodContext(), nftID); + if (nftChainID.equals(ownChainID) && !owner.equals(sendingChainID)) { + throw new Error('NFT has not been properly escrowed'); + } + + if (!nftChainID.equals(ownChainID) && nftExists) { + throw new Error('NFT substore entry already exists'); + } + } + + public async execute(context: CrossChainMessageContext): Promise { + const { ccm, getMethodContext } = context; + const params = codec.decode( + crossChainNFTTransferMessageParamsSchema, + ccm.params, + ); + validator.validate(crossChainNFTTransferMessageParamsSchema, params); + const { sendingChainID, status } = ccm; + const { nftID, senderAddress, attributesArray: receivedAttributes } = params; + const nftChainID = this._method.getChainID(nftID); + const ownChainID = this._internalMethod.getOwnChainID(); + const nftStore = this.stores.get(NFTStore); + const escrowStore = this.stores.get(EscrowStore); + let recipientAddress: Buffer; + recipientAddress = params.recipientAddress; + + if (nftChainID.equals(ownChainID)) { + const storeData = await nftStore.get(getMethodContext(), nftID); + if (status === CCM_STATUS_CODE_OK) { + storeData.owner = recipientAddress; + await nftStore.save(getMethodContext(), nftID, storeData); + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); + await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); + } else { + recipientAddress = senderAddress; + storeData.owner = recipientAddress; + await nftStore.save(getMethodContext(), nftID, storeData); + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); + await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); + } + } else { + const isSupported = await this._method.isNFTSupported(getMethodContext(), nftID); + if (!isSupported) { + this.events.get(CcmTransferEvent).error( + context, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_NFT_NOT_SUPPORTED, + ); + throw new Error('Non-supported NFT'); + } + if (status === CCM_STATUS_CODE_OK) { + this._feeMethod.payFee(getMethodContext(), BigInt(FEE_CREATE_NFT)); + await nftStore.save(getMethodContext(), nftID, { + owner: recipientAddress, + attributesArray: receivedAttributes as NFTAttributes[], + }); + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); + } else { + recipientAddress = senderAddress; + await nftStore.save(getMethodContext(), nftID, { + owner: recipientAddress, + attributesArray: receivedAttributes as NFTAttributes[], + }); + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); + } + } + + this.events.get(CcmTransferEvent).log(context, { + senderAddress, + recipientAddress, + nftID, + }); + } +} diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index a63d4095872..e732b5f17f2 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -21,6 +21,10 @@ export const LENGTH_ADDRESS = 20; export const MODULE_NAME_NFT = 'nft'; export const NFT_NOT_LOCKED = MODULE_NAME_NFT; export const CROSS_CHAIN_COMMAND_NAME_TRANSFER = 'crossChainTransfer'; +export const CCM_STATUS_CODE_OK = 0; +export const EMPTY_BYTES = Buffer.alloc(0); +export const ALL_SUPPORTED_NFTS_KEY = EMPTY_BYTES; +export const FEE_CREATE_NFT = 5000000; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, diff --git a/framework/src/modules/nft/events/ccm_transfer.ts b/framework/src/modules/nft/events/ccm_transfer.ts index 1e72b946398..990f267885b 100644 --- a/framework/src/modules/nft/events/ccm_transfer.ts +++ b/framework/src/modules/nft/events/ccm_transfer.ts @@ -58,4 +58,8 @@ export class CcmTransferEvent extends BaseEvent { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + return nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + } + + public async isNFTSupported(methodContext: MethodContext, nftID: Buffer): Promise { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const nftChainID = this.getChainID(nftID); + if (nftChainID.equals(this._config.ownChainID)) { + return true; + } + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + const supportForAllKeysExists = await supportedNFTsStore.has( + methodContext, + ALL_SUPPORTED_NFTS_KEY, + ); + if (supportForAllKeysExists) { + return true; + } + + const supportForNftChainIdExists = await supportedNFTsStore.has(methodContext, nftChainID); + if (supportForNftChainIdExists) { + const supportedNFTsStoreData = await supportedNFTsStore.get(methodContext, nftChainID); + if (supportedNFTsStoreData.supportedCollectionIDArray.length === 0) { + return true; + } + const collectionID = await this.getCollectionID(methodContext, nftID); + if ( + supportedNFTsStoreData.supportedCollectionIDArray.some(id => + collectionID.equals(id.collectionID), + ) + ) { + return true; + } + } + + return false; + } } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index a712f07e1e0..00cadae89a7 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -43,7 +43,7 @@ export const transferParamsSchema = { export const crossChainNFTTransferMessageParamsSchema = { $id: '/lisk/crossChainNFTTransferMessageParamsSchmema', type: 'object', - required: ['nftID', 'senderAddress', 'recipientAddress', 'attributes', 'data'], + required: ['nftID', 'senderAddress', 'recipientAddress', 'attributesArray', 'data'], properties: { nftID: { dataType: 'bytes', @@ -61,7 +61,7 @@ export const crossChainNFTTransferMessageParamsSchema = { format: 'lisk32', fieldNumber: 3, }, - attributes: { + attributesArray: { type: 'array', fieldNumber: 4, items: { @@ -89,3 +89,11 @@ export const crossChainNFTTransferMessageParamsSchema = { }, }, }; + +export interface CCTransferMessageParams { + nftID: Buffer; + attributes: { module: string; attributes: Buffer }[]; + senderAddress: Buffer; + recipientAddress: Buffer; + data: string; +} diff --git a/framework/src/modules/token/cc_commands/cc_transfer.ts b/framework/src/modules/token/cc_commands/cc_transfer.ts index d07cd5b4a58..5631996e10f 100644 --- a/framework/src/modules/token/cc_commands/cc_transfer.ts +++ b/framework/src/modules/token/cc_commands/cc_transfer.ts @@ -13,7 +13,6 @@ */ import { codec } from '@liskhq/lisk-codec'; import { validator } from '@liskhq/lisk-validator'; -// import { NotFoundError } from '@liskhq/lisk-db'; import { BaseCCCommand } from '../../interoperability/base_cc_command'; import { CrossChainMessageContext } from '../../interoperability/types'; import { TokenMethod } from '../method'; diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts new file mode 100644 index 00000000000..e0fffcd6232 --- /dev/null +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -0,0 +1,615 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { codec } from '@liskhq/lisk-codec'; +import { utils } from '@liskhq/lisk-cryptography'; +import { NFTModule } from '../../../../../src/modules/nft/module'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import { + ALL_SUPPORTED_NFTS_KEY, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + FEE_CREATE_NFT, + LENGTH_CHAIN_ID, + LENGTH_NFT_ID, + NftEventResult, +} from '../../../../../src/modules/nft/constants'; +import { NFTStore } from '../../../../../src/modules/nft/stores/nft'; +import { CCMsg, CrossChainMessageContext, ccuParamsSchema } from '../../../../../src'; +import { InternalMethod } from '../../../../../src/modules/nft/internal_method'; +import { NFTMethod } from '../../../../../src/modules/nft/method'; +import { EventQueue, MethodContext, createMethodContext } from '../../../../../src/state_machine'; +import { CrossChainTransferCommand } from '../../../../../src/modules/nft/cc_commands/cc_transfer'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { crossChainNFTTransferMessageParamsSchema } from '../../../../../src/modules/nft/schemas'; +import { + CCM_STATUS_OK, + CCM_STATUS_PROTOCOL_VIOLATION, +} from '../../../../../src/modules/token/constants'; +import { fakeLogger } from '../../../../utils/mocks/logger'; +import { CcmTransferEvent } from '../../../../../src/modules/nft/events/ccm_transfer'; +import { EscrowStore } from '../../../../../src/modules/nft/stores/escrow'; +import { UserStore } from '../../../../../src/modules/nft/stores/user'; +import { SupportedNFTsStore } from '../../../../../src/modules/nft/stores/supported_nfts'; + +describe('CrossChain Transfer Command', () => { + const module = new NFTModule(); + const method = new NFTMethod(module.stores, module.events); + const internalMethod = new InternalMethod(module.stores, module.events); + const feeMethod = { payFee: jest.fn() }; + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: any, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + const defaultAddress = utils.getRandomBytes(20); + const sendingChainID = Buffer.from([1, 1, 1, 1]); + const receivingChainID = Buffer.from([0, 0, 0, 1]); + const senderAddress = utils.getRandomBytes(20); + const recipientAddress = utils.getRandomBytes(20); + const attributesArray = [{ module: 'pos', attributes: Buffer.alloc(5) }]; + const getStore = (moduleID: Buffer, prefix: Buffer) => stateStore.getStore(moduleID, prefix); + const getMethodContext = () => methodContext; + const eventQueue = new EventQueue(0); + const contextStore = new Map(); + const nftID = Buffer.alloc(LENGTH_NFT_ID, 1); + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const ownChainID = Buffer.alloc(LENGTH_CHAIN_ID, 1); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + const interopMethod = { + send: jest.fn(), + error: jest.fn(), + terminateChain: jest.fn(), + }; + const defaultHeader = { + height: 0, + timestamp: 0, + }; + const defaultEncodedCCUParams = codec.encode(ccuParamsSchema, { + activeValidatorsUpdate: { + blsKeysUpdate: [], + bftWeightsUpdate: [], + bftWeightsUpdateBitmap: Buffer.alloc(0), + }, + certificate: Buffer.alloc(1), + certificateThreshold: BigInt(1), + inboxUpdate: { + crossChainMessages: [], + messageWitnessHashes: [], + outboxRootWitness: { + bitmap: Buffer.alloc(1), + siblingHashes: [], + }, + }, + sendingChainID: Buffer.from('04000001', 'hex'), + }); + const defaultTransaction = { + senderAddress: defaultAddress, + fee: BigInt(0), + params: defaultEncodedCCUParams, + }; + let params: Buffer; + let ccm: CCMsg; + let command: CrossChainTransferCommand; + let methodContext: MethodContext; + let stateStore: PrefixedStateReadWriter; + let context: CrossChainMessageContext; + let nftStore: NFTStore; + let escrowStore: EscrowStore; + let userStore: UserStore; + + beforeEach(async () => { + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + method.addDependencies(interopMethod); + method.init(config); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(config); + command = new CrossChainTransferCommand(module.stores, module.events); + command.init({ method, internalMethod, feeMethod }); + methodContext = createMethodContext({ + stateStore, + eventQueue: new EventQueue(0), + contextStore: new Map(), + }); + nftStore = module.stores.get(NFTStore); + await nftStore.save(methodContext, nftID, { + owner: sendingChainID, + attributesArray: [], + }); + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray, + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + }); + + describe('verify', () => { + it('should resolve if verification succeeds', async () => { + await expect(command.verify(context)).resolves.toBeUndefined(); + }); + + it('throw for if validation fails', async () => { + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID: Buffer.alloc(LENGTH_NFT_ID + 1, 1), + senderAddress, + recipientAddress, + attributesArray, + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue, + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.verify(context)).rejects.toThrow(`Property '.nftID' maxLength exceeded`); + }); + + it('throw for invalid ccm status', async () => { + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: 72, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue, + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.verify(context)).rejects.toThrow('Invalid CCM error code'); + }); + + it('throw if nft chain id is equal to neither own chain id or sending chain id', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID: Buffer.alloc(LENGTH_NFT_ID, 1), + senderAddress, + recipientAddress, + attributesArray, + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue, + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.verify(context)).rejects.toThrow( + 'NFT is not native to either the sending chain or the receiving chain', + ); + }); + + it('should throw if nft chain id equals own chain id but no entry exists in nft substore for the nft id', async () => { + await nftStore.del(methodContext, nftID); + + await expect(command.verify(context)).rejects.toThrow( + 'Non-existent entry in the NFT substore', + ); + }); + + it('should throw if nft chain id equals own chain id but the owner of nft is different from the sending chain', async () => { + await nftStore.del(methodContext, nftID); + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + + await expect(command.verify(context)).rejects.toThrow('NFT has not been properly escrowed'); + }); + + it('throw if nft chain id is not equal to own chain id and entry already exists in nft substore for the nft id', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + + await expect(command.verify(context)).rejects.toThrow('NFT substore entry already exists'); + }); + }); + + describe('execute', () => { + beforeEach(async () => { + userStore = module.stores.get(UserStore); + escrowStore = module.stores.get(EscrowStore); + await escrowStore.set(methodContext, escrowStore.getKey(sendingChainID, nftID), {}); + }); + + it('should throw if validation fails', async () => { + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID: Buffer.alloc(LENGTH_NFT_ID, 1), + senderAddress: utils.getRandomBytes(32), + recipientAddress, + attributesArray, + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue, + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).rejects.toThrow( + `Property '.senderAddress' address length invalid`, + ); + }); + + it('should throw if fail to decode the CCM', async () => { + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params: Buffer.from(''), + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue, + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).rejects.toThrow( + 'Message does not contain a property for fieldNumber: 1.', + ); + }); + + it('should set appropriate values to stores and emit appropriate successful ccm transfer event for nft chain id equals own chain id and ccm status code ok', async () => { + await expect(command.execute(context)).resolves.toBeUndefined(); + const nftStoreData = await nftStore.get(methodContext, nftID); + const userAccountExists = await userStore.has( + methodContext, + userStore.getKey(recipientAddress, nftID), + ); + const escrowAccountExists = await escrowStore.has( + methodContext, + escrowStore.getKey(sendingChainID, nftID), + ); + expect(nftStoreData.owner).toStrictEqual(recipientAddress); + expect(nftStoreData.attributesArray).toEqual([]); + expect(userAccountExists).toBe(true); + expect(escrowAccountExists).toBe(false); + checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { + senderAddress, + recipientAddress, + nftID, + }); + }); + + it('should set appropriate values to stores and emit appropriate successful ccm transfer event for nft chain id equals own chain id but not ccm status code ok', async () => { + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_PROTOCOL_VIOLATION, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).resolves.toBeUndefined(); + const nftStoreData = await nftStore.get(methodContext, nftID); + const userAccountExistsForRecipient = await userStore.has( + methodContext, + userStore.getKey(recipientAddress, nftID), + ); + const userAccountExistsForSender = await userStore.has( + methodContext, + userStore.getKey(senderAddress, nftID), + ); + const escrowAccountExists = await escrowStore.has( + methodContext, + escrowStore.getKey(sendingChainID, nftID), + ); + expect(nftStoreData.owner).toStrictEqual(senderAddress); + expect(nftStoreData.attributesArray).toEqual([]); + expect(userAccountExistsForRecipient).toBe(false); + expect(userAccountExistsForSender).toBe(true); + expect(escrowAccountExists).toBe(false); + checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { + senderAddress, + recipientAddress: senderAddress, + nftID, + }); + }); + + it('should reject and emit unsuccessful ccm transfer event if nft chain id does not equal own chain id and nft is not supported', async () => { + const newNftID = utils.getRandomBytes(LENGTH_NFT_ID); + await nftStore.save(methodContext, newNftID, { + owner: sendingChainID, + attributesArray: [], + }); + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID: newNftID, + senderAddress, + recipientAddress, + attributesArray, + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).rejects.toThrow('Non-supported NFT'); + checkEventResult( + context.eventQueue, + 1, + CcmTransferEvent, + 0, + { + senderAddress, + recipientAddress, + nftID: newNftID, + }, + NftEventResult.RESULT_NFT_NOT_SUPPORTED, + ); + }); + + it('should set appropriate values to stores and emit appropriate successful ccm transfer event if nft chain id does not equal own chain id but nft is supported and ccm status code ok', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + jest.spyOn(feeMethod, 'payFee'); + + await expect(command.execute(context)).resolves.toBeUndefined(); + const nftStoreData = await nftStore.get(methodContext, nftID); + const userAccountExists = await userStore.has( + methodContext, + userStore.getKey(recipientAddress, nftID), + ); + const escrowAccountExists = await escrowStore.has( + methodContext, + escrowStore.getKey(sendingChainID, nftID), + ); + expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); + expect(nftStoreData.owner).toStrictEqual(recipientAddress); + expect(nftStoreData.attributesArray).toEqual(attributesArray); + expect(userAccountExists).toBe(true); + expect(escrowAccountExists).toBe(true); + checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { + senderAddress, + recipientAddress, + nftID, + }); + }); + + it('should set appropriate values to stores and emit appropriate successful ccm transfer event if nft chain id does not equal own chain id but nft is supported and not ccm status code ok', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_PROTOCOL_VIOLATION, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + jest.spyOn(feeMethod, 'payFee'); + + await expect(command.execute(context)).resolves.toBeUndefined(); + const nftStoreData = await nftStore.get(methodContext, nftID); + const userAccountExistsForRecipient = await userStore.has( + methodContext, + userStore.getKey(recipientAddress, nftID), + ); + const userAccountExistsForSender = await userStore.has( + methodContext, + userStore.getKey(senderAddress, nftID), + ); + const escrowAccountExists = await escrowStore.has( + methodContext, + escrowStore.getKey(sendingChainID, nftID), + ); + expect(feeMethod.payFee).not.toHaveBeenCalled(); + expect(nftStoreData.owner).toStrictEqual(senderAddress); + expect(nftStoreData.attributesArray).toEqual(attributesArray); + expect(userAccountExistsForRecipient).toBe(false); + expect(userAccountExistsForSender).toBe(true); + expect(escrowAccountExists).toBe(true); + checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { + senderAddress, + recipientAddress: senderAddress, + nftID, + }); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 8f227a8cb0a..7e204f73010 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -21,8 +21,10 @@ import { MethodContext, createMethodContext } from '../../../../src/state_machin import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { + ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_CHAIN_ID, + LENGTH_COLLECTION_ID, LENGTH_NFT_ID, NFT_NOT_LOCKED, NftEventResult, @@ -30,6 +32,7 @@ import { import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { UserStore } from '../../../../src/modules/nft/stores/user'; import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; +import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; describe('NFTMethod', () => { const module = new NFTModule(); @@ -286,4 +289,125 @@ describe('NFTMethod', () => { }); }); }); + + describe('getCollectionID', () => { + it('should throw if entry does not exist in the nft substore for the nft id', async () => { + await expect(method.getCollectionID(methodContext, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should return the first bytes of length LENGTH_CHAIN_ID from provided nftID', async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + const expectedValue = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + const receivedValue = await method.getCollectionID(methodContext, nftID); + expect(receivedValue).toEqual(expectedValue); + }); + }); + + describe('isNFTSupported', () => { + beforeEach(async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + }); + + it('should throw if entry does not exist in the nft substore for the nft id', async () => { + await nftStore.del(methodContext, nftID); + await expect(method.isNFTSupported(methodContext, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should return true if nft chain id equals own chain id', async () => { + const ownChainID = nftID.slice(0, LENGTH_CHAIN_ID); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(config); + + const isSupported = await method.isNFTSupported(methodContext, nftID); + expect(isSupported).toBe(true); + }); + + it('should return true if nft chain id does not equal own chain id but all nft keys are supported', async () => { + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(config); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + const isSupported = await method.isNFTSupported(methodContext, nftID); + expect(isSupported).toBe(true); + }); + + it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array is empty', async () => { + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(config); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + supportedCollectionIDArray: [], + }); + + const isSupported = await method.isNFTSupported(methodContext, nftID); + expect(isSupported).toBe(true); + }); + + it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array includes collection id for nft id', async () => { + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(config); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + supportedCollectionIDArray: [ + { collectionID: nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID) }, + { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, + ], + }); + + const isSupported = await method.isNFTSupported(methodContext, nftID); + expect(isSupported).toBe(true); + }); + + it('should return false if nft chain id does not equal own chain id and nft chain id is supported but corresponding supported collection id array does not include collection id for nft id', async () => { + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const config = { + ownChainID, + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(config); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + supportedCollectionIDArray: [ + { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, + { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, + ], + }); + + const isSupported = await method.isNFTSupported(methodContext, nftID); + expect(isSupported).toBe(false); + }); + }); }); From e38e2c00a488e00c316d59b4bc5ae22caea9aaa2 Mon Sep 17 00:00:00 2001 From: sitetester Date: Mon, 5 Jun 2023 13:26:00 +0300 Subject: [PATCH 049/170] Script for `Message Recovery Initialization` Command (#8459) * Script for `Message Recovery Initialization` Command * cleanup * Update log * Declare local constants * Message recovery script (#8499) * Message recovery script * Declare local constants * `await` not needed --------- Co-authored-by: !shan --- .../initializeMessageRecovery.ts | 512 ++++++++++++++++++ .../messageRecovery/messageRecovery.ts | 245 +++++++++ .../interop/messageRecovery/parse_events.ts | 210 +++++++ .../interop/messageRecovery/start_nodes.sh | 64 +++ 4 files changed, 1031 insertions(+) create mode 100644 examples/interop/messageRecovery/initializeMessageRecovery.ts create mode 100644 examples/interop/messageRecovery/messageRecovery.ts create mode 100644 examples/interop/messageRecovery/parse_events.ts create mode 100644 examples/interop/messageRecovery/start_nodes.sh diff --git a/examples/interop/messageRecovery/initializeMessageRecovery.ts b/examples/interop/messageRecovery/initializeMessageRecovery.ts new file mode 100644 index 00000000000..b1fa9ab03bc --- /dev/null +++ b/examples/interop/messageRecovery/initializeMessageRecovery.ts @@ -0,0 +1,512 @@ +import { + apiClient, + chain, + cryptography, + MODULE_NAME_INTEROPERABILITY, + messageRecoveryInitializationParamsSchema, + ChainStatus, + codec, + Transaction, + ChannelDataJSON, + ChannelData, + ChainAccountJSON, + Inbox, + Outbox, + db, + ProveResponse, + OutboxRootWitness, +} from 'lisk-sdk'; +import { join } from 'path'; +import { ensureDir } from 'fs-extra'; +import * as os from 'os'; + +// LIP 45 +const STORE_PREFIX_INTEROPERABILITY = Buffer.from('83ed0d25', 'hex'); +const SUBSTORE_PREFIX_CHANNEL_DATA = Buffer.from('a000', 'hex'); + +const HASH_LENGTH = 32; +const CHAIN_ID_LENGTH = 4; +const LOCAL_ID_LENGTH = 4; +const TOKEN_ID_LENGTH = CHAIN_ID_LENGTH + LOCAL_ID_LENGTH; + +const getDBInstance = async ( + dataPath: string, + dbName = 'messageRecoveryPlugin.db', +): Promise => { + const dirPath = join(dataPath.replace('~', os.homedir()), 'plugins/data', dbName); + console.log(`dirPath: ${dirPath}`); + + await ensureDir(dirPath); + return new db.Database(dirPath); +}; + +interface Data { + readonly blockHeader: chain.BlockHeaderJSON; +} + +interface MessageRecoveryInitializationParams { + chainID: Buffer; + channel: Buffer; + bitmap: Buffer; + siblingHashes: Buffer[]; +} + +const channelDataJSONToObj = (channelData: ChannelDataJSON): ChannelData => { + const { inbox, messageFeeTokenID, outbox, partnerChainOutboxRoot, minReturnFeePerByte } = + channelData; + + const inboxJSON: Inbox = { + appendPath: inbox.appendPath.map(ap => Buffer.from(ap, 'hex')), + root: Buffer.from(inbox.root, 'hex'), + size: inbox.size, + }; + + const outboxJSON: Outbox = { + appendPath: outbox.appendPath.map(ap => Buffer.from(ap, 'hex')), + root: Buffer.from(outbox.root, 'hex'), + size: outbox.size, + }; + + return { + messageFeeTokenID: Buffer.from(messageFeeTokenID, 'hex'), + outbox: outboxJSON, + inbox: inboxJSON, + partnerChainOutboxRoot: Buffer.from(partnerChainOutboxRoot, 'hex'), + minReturnFeePerByte: BigInt(minReturnFeePerByte), + }; +}; + +const proveResponseJSONToObj = (proveResponseJSON: ProveResponseJSON): ProveResponse => { + const { + proof: { queries, siblingHashes }, + } = proveResponseJSON; + + return { + proof: { + queries: queries.map(query => ({ + bitmap: Buffer.from(query.bitmap, 'hex'), + key: Buffer.from(query.key, 'hex'), + value: Buffer.from(query.value, 'hex'), + })), + siblingHashes: siblingHashes.map(siblingHash => Buffer.from(siblingHash, 'hex')), + }, + }; +}; + +const inclusionProofsWithHeightAndStateRootSchema = { + $id: `scripts/recovery/inclusionProofs`, + type: 'object', + properties: { + inclusionProofs: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + properties: { + height: { dataType: 'uint32', fieldNumber: 1 }, + inclusionProof: { + type: 'object', + fieldNumber: 2, + properties: { + siblingHashes: { + type: 'array', + fieldNumber: 1, + items: { + dataType: 'bytes', + }, + }, + bitmap: { + dataType: 'bytes', + fieldNumber: 2, + }, + key: { + dataType: 'bytes', + fieldNumber: 3, + }, + value: { + dataType: 'bytes', + fieldNumber: 4, + }, + }, + }, + stateRoot: { dataType: 'bytes', fieldNumber: 3 }, + }, + }, + }, + }, +}; +type ProveResponseJSON = JSONObject; + +const inboxOutboxProps = { + appendPath: { + type: 'array', + items: { + dataType: 'bytes', + minLength: HASH_LENGTH, + maxLength: HASH_LENGTH, + }, + fieldNumber: 1, + }, + size: { + dataType: 'uint32', + fieldNumber: 2, + }, + root: { + dataType: 'bytes', + minLength: HASH_LENGTH, + maxLength: HASH_LENGTH, + fieldNumber: 3, + }, +}; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#channel-data-substore +const channelSchema = { + $id: '/modules/interoperability/channel', + type: 'object', + required: [ + 'inbox', + 'outbox', + 'partnerChainOutboxRoot', + 'messageFeeTokenID', + 'minReturnFeePerByte', + ], + properties: { + inbox: { + type: 'object', + fieldNumber: 1, + required: ['appendPath', 'size', 'root'], + properties: inboxOutboxProps, + }, + outbox: { + type: 'object', + fieldNumber: 2, + required: ['appendPath', 'size', 'root'], + properties: inboxOutboxProps, + }, + partnerChainOutboxRoot: { + dataType: 'bytes', + minLength: HASH_LENGTH, + maxLength: HASH_LENGTH, + fieldNumber: 3, + }, + messageFeeTokenID: { + dataType: 'bytes', + minLength: TOKEN_ID_LENGTH, + maxLength: TOKEN_ID_LENGTH, + fieldNumber: 4, + }, + minReturnFeePerByte: { + dataType: 'uint64', + fieldNumber: 5, + }, + }, +}; + +type Primitive = string | number | bigint | boolean | null | undefined; +type Replaced = T extends TReplace | TKeep + ? T extends TReplace + ? TWith | Exclude + : T + : { + [P in keyof T]: Replaced; + }; + +type JSONObject = Replaced; + +interface InclusionProofWithHeightAndStateRoot { + height: number; + stateRoot: Buffer; + inclusionProof: OutboxRootWitness & { key: Buffer; value: Buffer }; +} + +type KVStore = db.Database; +const DB_KEY_INCLUSION_PROOF = Buffer.from([1]); + +class InclusionProofModel { + private readonly _db: KVStore; + + public constructor(db: KVStore) { + this._db = db; + } + + public async close() { + await this._db.close(); + } + + public async getAll(): Promise { + let proofs: InclusionProofWithHeightAndStateRoot[] = []; + try { + const encodedInfo = await this._db.get(DB_KEY_INCLUSION_PROOF); + proofs = codec.decode<{ inclusionProofs: InclusionProofWithHeightAndStateRoot[] }>( + inclusionProofsWithHeightAndStateRootSchema, + encodedInfo, + ).inclusionProofs; + } catch (error) { + if (!(error instanceof db.NotFoundError)) { + throw error; + } + } + return proofs; + } + + public async getByHeight( + height: number, + ): Promise { + return (await this.getAll()).find(proof => proof.height === height); + } + + /** + * This will save proofs greater than or equal to given height + * @param height Last certified height + */ + public async deleteProofsUntilHeight(height: number) { + const filteredProofs = (await this.getAll()).filter(proofs => proofs.height >= height); + + await this._db.set( + DB_KEY_INCLUSION_PROOF, + codec.encode(inclusionProofsWithHeightAndStateRootSchema, { + inclusionProofs: filteredProofs, + }), + ); + } + + public async save(inclusionProofWithHeightAndStateRoot: InclusionProofWithHeightAndStateRoot) { + const proofs = await this.getAll(); + proofs.push(inclusionProofWithHeightAndStateRoot); + + const encodedInfo = codec.encode(inclusionProofsWithHeightAndStateRootSchema, { + inclusionProofs: proofs, + }); + await this._db.set(DB_KEY_INCLUSION_PROOF, encodedInfo); + } +} + +const relayerKeyInfo = { + address: 'lsk952ztknjoa3h58es4vgu5ovnoscv3amo7zg4zz', + keyPath: "m/44'/134'/3'", + publicKey: '8960f85f7ab3cc473f29c3a00e6ad66c569f2a84125388274a4f382e11306099', + privateKey: + 'a16702175f750eab29ba286a1e30c86eb6057b2aa8547925a1139341d50ee16c8960f85f7ab3cc473f29c3a00e6ad66c569f2a84125388274a4f382e11306099', + plain: { + generatorKeyPath: "m/25519'/134'/0'/3'", + generatorKey: '60df111d5d97bf45c426d889673a8f04499ba312480b1d913fc49c5a77908b83', + generatorPrivateKey: + 'b623e9d77567fee9c7ea6502275c19849cf5ded916aa5c967835144d5f1295d560df111d5d97bf45c426d889673a8f04499ba312480b1d913fc49c5a77908b83', + blsKeyPath: 'm/12381/134/0/3', + blsKey: + '99b210271475977210d5e92c02e325f011706c5c9fc3861ecfdb8163e078ed1214e8710669e1625b30899c624305bd0e', + blsProofOfPossession: + 'a4486989094ac9225362084642072777ff0a028d89cc735908ad267be53827821093e34f960857140882e2b062f1a02e193ce9f2ad765268ed82fe462e4755dd378d8edf220d1395c9687a3c88f1fc48a5990ebb43585516e18d7228f0b8b9fd', + blsPrivateKey: '3d34f3e44a5ce6b2a3c7b79be6ab76ece0fa46749cf66c41e4d000c6ae3353b6', + }, + encrypted: {}, +}; + +/** + * Steps: + * cd examples/interop/ + * + * make sure `exports.LIVENESS_LIMIT = 2592000;` in `lisk-framework/dist-node/modules/interoperability/constants.js` + * ./start_example (script to configure & register chains) + * + * Call `chainConnector_getSentCCUs` to see if any CCU was sent (sidechain status must change to ACTIVE after first CCU) + * + * call `interoperability_getChainAccount` endpoint to verify `status`, if it still shows 0, observe logs + * `pm2 logs 2` (2 is id of pos-sidechain-example-one), + * Initially, it'll log ``No valid CCU can be generated for the height: X` (e.g. till height 20) + * + * run ***this*** script (make sure sidechain status has changed to ACTIVE (on mainchain)) + * ts-node ./messageRecovery/initializeMessageRecovery.ts // it will keep on saving inclusion proofs for sidechain + * // observe logs to see both mainchain & sidechain are receiving blocks & sidechain is saving inclusion proofs + * // Make sure `sidechainAccount.lastCertificate.height` is increasing (if not sidechain might already have been terminated) + * + * // Now stop ALL nodes + * pm2 stop all + + * terminate sidechain + * // Before terminating a sidechain, make sure, `sidechainAccount.lastCertificate.height` (on mainchain) has reached `Successfully stored inclusion proof at height x` (from sidechain) + * + * pwd + * /examples/interop/pos-mainchain-fast + * + * Change constant in `lisk-framework/dist-node/modules/interoperability/constants.js` + * // exports.LIVENESS_LIMIT = 2592000; + * => exports.LIVENESS_LIMIT = 30; // Next wait for 30 seconds + * + * pm2 start all + * // Now `this running` script (from other terminal window) should show logs again + * while `sidechainAccount.lastCertificate.height` logging SAME value (an indication sidechain has already been terminated) + * + * Run `terminateSidechainForLiveness` command in console (note: `--send` is missing here) + * cd pos-mainchain-fast + * ./bin/run transaction:create interoperability terminateSidechainForLiveness 200000000 --json --passphrase="two thunder nurse process feel fence addict size broccoli swing city speed build slide virus ridge jazz mushroom road fish border argue weapon lens" --key-derivation-path="m/44'/134'/1'" --data-path ~/.lisk/mainchain-node-one + * Please enter: chainID: 04000001 (taken from examples/interop/README.md) + * + * 3. Call `txpool_postTransaction` to `http://127.0.0.1:7881/rpc` with generated transaction + * // Here `7881` is port of mainchain-node-one + */ + +(async () => { + console.log('Starting init message recovery script...'); + + let inclusionProofModel: InclusionProofModel; + try { + inclusionProofModel = new InclusionProofModel(await getDBInstance('~/.lisk')); + console.log('DB is initialized.'); + } catch (error) { + console.log('Error occurred while initializing DB', error); + process.exit(); + } + + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + const sidechainClient = await apiClient.createIPCClient(`~/.lisk/pos-sidechain-example-one`); + + const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + const sidechainNodeInfo = await sidechainClient.invoke('system_getNodeInfo'); + + const recoveryKey = Buffer.concat([ + STORE_PREFIX_INTEROPERABILITY, + SUBSTORE_PREFIX_CHANNEL_DATA, + cryptography.utils.hash(Buffer.from(mainchainNodeInfo.chainID as string, 'hex')), + ]); + console.log('recoveryKey: ', recoveryKey); + + // Collect inclusion proofs on sidechain and save it in recoveryDB + sidechainClient.subscribe('chain_newBlock', async (data?: Record) => { + const { blockHeader: receivedBlock } = data as unknown as Data; + const newBlockHeader = chain.BlockHeader.fromJSON(receivedBlock).toObject(); + console.log( + `Received new block on sidechain ${sidechainNodeInfo.chainID} with height ${newBlockHeader.height}`, + ); + + // Returns proof for sidechain lastBlock header stateRoot (which is state root of the last block that was forged) + const proof = proveResponseJSONToObj( + await sidechainClient.invoke('state_prove', { + queryKeys: [recoveryKey.toString('hex')], // `queryKey` is `string` + }), + ).proof; + console.log('proof: ', proof); + + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0039.md#proof-verification + // To check the proof, the Verifier calls ```verify(queryKeys, proof, merkleRoot) function``` + const smt = new db.SparseMerkleTree(); + console.log( + 'smt.verify: ', + await smt.verify(newBlockHeader.stateRoot, [proof.queries[0].key], proof), + ); + + const inclusionProof = { + key: proof.queries[0].key, + value: proof.queries[0].value, + bitmap: proof.queries[0].bitmap, + siblingHashes: proof.siblingHashes, + }; + + const inclusionProofWithHeightAndStateRoot = { + height: newBlockHeader.height, + stateRoot: newBlockHeader.stateRoot, + inclusionProof, + }; + + await inclusionProofModel.save(inclusionProofWithHeightAndStateRoot); + console.log(`Successfully stored inclusion proof at height ${newBlockHeader.height}`); + }); + + mainchainClient.subscribe('chain_newBlock', async (_data?: Record) => { + const sidechainAccount = await mainchainClient.invoke( + 'interoperability_getChainAccount', + { chainID: sidechainNodeInfo.chainID }, + ); + let lastCertifiedHeight = sidechainAccount.lastCertificate.height; + console.log(`sidechainAccount.lastCertificate.height: ${lastCertifiedHeight}`); + + if (sidechainAccount.status === ChainStatus.TERMINATED) { + // Create recovery transaction + const inclusionProofAtLastCertifiedHeight = await inclusionProofModel.getByHeight( + lastCertifiedHeight, + ); + console.log(`inclusionProofAtLastCertifiedHeight: ${inclusionProofAtLastCertifiedHeight}`); + if (!inclusionProofAtLastCertifiedHeight) { + console.log(`No inclusionProof exists at a given height: ${lastCertifiedHeight}`); + } + + if (inclusionProofAtLastCertifiedHeight) { + const smt = new db.SparseMerkleTree(); + + console.log('State Root: ', inclusionProofAtLastCertifiedHeight.stateRoot.toString('hex')); + console.log('recoveryKey: ', recoveryKey.toString('hex')); + console.log( + 'siblingHashes: ', + inclusionProofAtLastCertifiedHeight.inclusionProof.siblingHashes, + ); + console.log('queries: ', { + bitmap: inclusionProofAtLastCertifiedHeight.inclusionProof.bitmap, + key: inclusionProofAtLastCertifiedHeight.inclusionProof.key, + value: inclusionProofAtLastCertifiedHeight.inclusionProof.value, + }); + + console.log( + 'smt.verify: ', + await smt.verify(inclusionProofAtLastCertifiedHeight.stateRoot, [recoveryKey], { + siblingHashes: inclusionProofAtLastCertifiedHeight.inclusionProof.siblingHashes, + queries: [ + { + bitmap: inclusionProofAtLastCertifiedHeight.inclusionProof.bitmap, + key: inclusionProofAtLastCertifiedHeight.inclusionProof.key, + value: inclusionProofAtLastCertifiedHeight.inclusionProof.value, + }, + ], + }), + ); + + const messageRecoveryInitializationParams: MessageRecoveryInitializationParams = { + // chainID: The ID of the sidechain whose terminated outbox account is to be initialized. + chainID: Buffer.from(sidechainNodeInfo.chainID as string, 'hex'), + // channel: The channel of this chain stored on the terminated sidechain. + channel: codec.encode( + channelSchema, + channelDataJSONToObj( + await sidechainClient.invoke('interoperability_getChannel', { + chainID: mainchainNodeInfo.chainID, + }), + ), + ), + // bitmap: The bitmap of the inclusion proof of the channel in the sidechain state tree. + bitmap: inclusionProofAtLastCertifiedHeight.inclusionProof.bitmap, + siblingHashes: inclusionProofAtLastCertifiedHeight.inclusionProof.siblingHashes, + }; + + const tx = new Transaction({ + module: MODULE_NAME_INTEROPERABILITY, + command: 'initializeMessageRecovery', + fee: BigInt(5450000000), + params: codec.encodeJSON( + messageRecoveryInitializationParamsSchema, + messageRecoveryInitializationParams, + ), + nonce: BigInt( + ( + await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { + address: cryptography.address.getLisk32AddressFromPublicKey( + Buffer.from(relayerKeyInfo.publicKey, 'hex'), + ), + }) + ).nonce, + ), + senderPublicKey: Buffer.from(relayerKeyInfo.publicKey, 'hex'), + signatures: [], + }); + + tx.sign( + Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), + Buffer.from(relayerKeyInfo.privateKey, 'hex'), + ); + + console.log('Final transaction to be sent to tx_pool: ', tx.getBytes().toString('hex')); + } + + await inclusionProofModel.deleteProofsUntilHeight(lastCertifiedHeight); + process.exit(0); + } + }); +})(); diff --git a/examples/interop/messageRecovery/messageRecovery.ts b/examples/interop/messageRecovery/messageRecovery.ts new file mode 100644 index 00000000000..106cf9fb538 --- /dev/null +++ b/examples/interop/messageRecovery/messageRecovery.ts @@ -0,0 +1,245 @@ +import { + cryptography, + codec, + CCMsg, + ccmSchema, + apiClient, + Transaction, + db, + MODULE_NAME_INTEROPERABILITY, + messageRecoveryParamsSchema, +} from 'lisk-sdk'; + +// to transfer some LSK, we can use this script - examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts + +import { join } from 'path'; +import { ensureDir } from 'fs-extra'; +import { checkDBError } from '@liskhq/lisk-framework-chain-connector-plugin/dist-node/db'; +import { MerkleTree } from '@liskhq/lisk-tree'; +import { utils } from '@liskhq/lisk-cryptography'; +import * as os from 'os'; + +export const relayerKeyInfo = { + address: 'lsk952ztknjoa3h58es4vgu5ovnoscv3amo7zg4zz', + keyPath: "m/44'/134'/3'", + publicKey: '8960f85f7ab3cc473f29c3a00e6ad66c569f2a84125388274a4f382e11306099', + privateKey: + 'a16702175f750eab29ba286a1e30c86eb6057b2aa8547925a1139341d50ee16c8960f85f7ab3cc473f29c3a00e6ad66c569f2a84125388274a4f382e11306099', + plain: { + generatorKeyPath: "m/25519'/134'/0'/3'", + generatorKey: '60df111d5d97bf45c426d889673a8f04499ba312480b1d913fc49c5a77908b83', + generatorPrivateKey: + 'b623e9d77567fee9c7ea6502275c19849cf5ded916aa5c967835144d5f1295d560df111d5d97bf45c426d889673a8f04499ba312480b1d913fc49c5a77908b83', + blsKeyPath: 'm/12381/134/0/3', + blsKey: + '99b210271475977210d5e92c02e325f011706c5c9fc3861ecfdb8163e078ed1214e8710669e1625b30899c624305bd0e', + blsProofOfPossession: + 'a4486989094ac9225362084642072777ff0a028d89cc735908ad267be53827821093e34f960857140882e2b062f1a02e193ce9f2ad765268ed82fe462e4755dd378d8edf220d1395c9687a3c88f1fc48a5990ebb43585516e18d7228f0b8b9fd', + blsPrivateKey: '3d34f3e44a5ce6b2a3c7b79be6ab76ece0fa46749cf66c41e4d000c6ae3353b6', + }, + encrypted: {}, +}; + +const ccmsInfoSchema = { + $id: 'msgRecoveryPlugin/ccmsFromEvents', + type: 'object', + properties: { + ccms: { + type: 'array', + fieldNumber: 1, + items: { + ...ccmSchema, + }, + }, + }, +}; + +interface CCMsInfo { + ccms: CCMsg[]; +} + +export interface Proof { + readonly siblingHashes: ReadonlyArray; + readonly idxs: ReadonlyArray; + readonly size: number; +} + +/** + * Sequence of steps. Also, some steps are mentioned in `initializeMessageRecovery.ts` + * + * pm2 stop all + * rm -rf ~/.lisk + * ./start_nodes + * ts-node ./messageRecovery/events/parse_events.ts (start parsing events) + * + * -------------------- + * + * Make sure ```exports.LIVENESS_LIMIT = 2592000;``` in ```lisk-framework/dist-node/modules/interoperability/constants.js``` + * ts-node pos-mainchain-fast/config/scripts/sidechain_registration.ts (Register sidechain (keep chain connector ON)) + * ts-node pos-sidechain-example-one/config/scripts/mainchain_registration.ts + * + * + * Wait till nodes status change to ACTIVE (as initially they are in REGISTERED status)(check `interoperability_getChainAccount` endpoint) + * + * Start saving inclusion proofs + * - ts-node ./messageRecovery/initializeMessageRecovery.ts (in new console tab/window) + * + * Change constant in ```exports.LIVENESS_LIMIT = 30;``` in ```/lisk-sdk/examples/interop/pos-mainchain-fast/node_modules/lisk-framework/dist-node/modules/interoperability/constants.js``` + * Wait for at least 30 sec + * + * ------------------ + * + * - Turn OFF chain connector plugin on mainchain + * - Make crossChainTransfer on mainchain to sidechain + * - Make the sidechain terminate on mainchain (because of Liveness) + * - Submit Liveness termination transaction on mainchain + * + * Now you are ready to recover. + * By this time you should have below CCMs with idx in sidechain(outbox) on mainchain, + * + * 0. registrationCCM + * 1. crossChainTransferCCM + * 2. terminationCCM + * + * You can now try to recover 1. crossChainTransferCCM (where the balance should return to the sender) + */ + +(async () => { + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + + const sidechainClient = await apiClient.createIPCClient(`~/.lisk/pos-sidechain-example-one`); + const sidechainNodeInfo = await sidechainClient.invoke('system_getNodeInfo'); + + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#message-recovery-from-the-sidechain-channel-outbox + // TODO: The proof of inclusion for the pending CCMs into the outboxRoot property of the terminated outbox account has to be available. + + /** + * // LIP 54 + * ``` + * Notice that the message recovery mechanism requires that the channel outbox is stored in the chain where the commands are processed. + * In the SDK 6, sidechain channels can only be stored on the mainchain. This means that the message recovery mechanism + * would only work on the mainchain. + */ + + // This mechanism allows to recover any CCM pending in the sidechain channel outbox. + // sidechain channel is stored on mainchain (during sidechain registration process - LIP 43) + + // All cross-chain messages must have the correct format, which is checked by the following logic: + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0049.md#validateformat + + // ``` The pending CCMs to be recovered have to be available to the sender of the recovery command. ``` + // Before preparing this array, it's worth to check Verification section of `Message Recovery Command` + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#verification-1 + + type KVStore = db.Database; + const DB_KEY_EVENTS = Buffer.from([1]); + + class EventsModel { + private readonly _db: KVStore; + + public constructor(db: KVStore) { + this._db = db; + } + + public async close() { + await this._db.close(); + } + + public async getCCMs(): Promise { + let ccms: CCMsg[] = []; + try { + const encodedInfo = await this._db.get(DB_KEY_EVENTS); + ccms = codec.decode(ccmsInfoSchema, encodedInfo).ccms; + } catch (error) { + checkDBError(error); + } + return ccms; + } + } + + const getDBInstance = async (dataPath: string, dbName = 'events.db'): Promise => { + const dirPath = join(dataPath.replace('~', os.homedir()), 'plugins/data', dbName); + console.log(`dirPath: ${dirPath}`); + + await ensureDir(dirPath); + return new db.Database(dirPath); + }; + + const toBytes = (ccm: CCMsg) => codec.encode(ccmSchema, ccm); + + const LEAF_PREFIX = Buffer.from('00', 'hex'); + const eventsModel = new EventsModel(await getDBInstance('~/.lisk')); + const merkleTree = new MerkleTree(); + + const ccms = await eventsModel.getCCMs(); + console.log(ccms); + + const transferCrossChainCCM = ccms.filter( + ccm => ccm.crossChainCommand === 'transferCrossChain', + )[0]; + console.log('Pending token transfer CCM to recover: ', transferCrossChainCCM); + + await merkleTree.init(ccms.map(ccm => toBytes(ccm))); + console.log('merkleTree.root: ', merkleTree.root); + + const queryHash = utils.hash( + Buffer.concat( + [LEAF_PREFIX, toBytes(transferCrossChainCCM)], + LEAF_PREFIX.length + toBytes(transferCrossChainCCM).length, + ), + ); + + const queryHashes = [queryHash]; + console.log('queryHashes: ', queryHashes); + + const proof = await merkleTree.generateProof(queryHashes); + console.log('merkleTree: ', merkleTree); + console.log('merkleTree.generateProof: ', proof); + + interface MessageRecoveryParams { + chainID: Buffer; + crossChainMessages: Buffer[]; + idxs: number[]; + siblingHashes: Buffer[]; + } + + const messageRecoveryParams: MessageRecoveryParams = { + chainID: sidechainNodeInfo.chainID as Buffer, + crossChainMessages: [toBytes(transferCrossChainCCM)], + idxs: proof.idxs as number[], + siblingHashes: proof.siblingHashes as Buffer[], + }; + + // PRE-REQUISITE: examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts + // Final transaction to be submitted + + // In case of recovery, it will simply swap sending/receiving chains & run each CCM in input crossChainMessages[] again + // LIP 54: ```def applyRecovery(trs: Transaction, ccm: CCM) -> None:``` + const tx = new Transaction({ + module: MODULE_NAME_INTEROPERABILITY, + // COMMAND_RECOVER_MESSAGE string "recoverMessage" Name of message recovery command. (LIP 45) + command: 'recoverMessage', + fee: BigInt(5450000000), + params: codec.encodeJSON(messageRecoveryParamsSchema, messageRecoveryParams), + nonce: BigInt( + ( + await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { + address: cryptography.address.getLisk32AddressFromPublicKey( + Buffer.from(relayerKeyInfo.publicKey, 'hex'), + ), + }) + ).nonce, + ), + senderPublicKey: Buffer.from(relayerKeyInfo.publicKey, 'hex'), + signatures: [], + }); + + tx.sign( + Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), + Buffer.from(relayerKeyInfo.privateKey, 'hex'), + ); + + console.log('Final transaction to be posted to tx_pool: ', tx.getBytes().toString('hex')); + process.exit(0); +})(); diff --git a/examples/interop/messageRecovery/parse_events.ts b/examples/interop/messageRecovery/parse_events.ts new file mode 100644 index 00000000000..8b0e4593cf6 --- /dev/null +++ b/examples/interop/messageRecovery/parse_events.ts @@ -0,0 +1,210 @@ +// The complete Merkle tree with root equal to the last value of the outboxRoot property of the terminated outbox account +// can be computed from the history of the Lisk mainchain + +import { + chain, + CCMsg, + JSONObject, + MODULE_NAME_INTEROPERABILITY, + Schema, + apiClient, + db, + db as liskDB, +} from 'lisk-sdk'; +import { codec } from '@liskhq/lisk-codec'; +import { CcmSendSuccessEventData, CcmProcessedEventData, ccmSchema } from 'lisk-framework'; +import { EVENT_NAME_CCM_PROCESSED } from 'lisk-framework/dist-node/modules/interoperability/constants'; +import { join } from 'path'; +import * as os from 'os'; +import { ensureDir } from 'fs-extra'; + +export const checkDBError = (error: Error | unknown) => { + if (!(error instanceof liskDB.NotFoundError)) { + throw error; + } +}; + +type ModuleMetadata = { + stores: { key: string; data: Schema }[]; + events: { name: string; data: Schema }[]; + name: string; +}; + +type ModulesMetadata = [ModuleMetadata]; + +interface Data { + readonly blockHeader: chain.BlockHeaderJSON; +} + +const getInteropAndTokenModulesMetadata = async (mainchainClient: apiClient.APIClient) => { + const { modules: modulesMetadata } = await mainchainClient.invoke<{ modules: ModulesMetadata }>( + 'system_getMetadata', + ); + const interoperabilityMetadata = modulesMetadata.find( + metadata => metadata.name === MODULE_NAME_INTEROPERABILITY, + ); + if (!interoperabilityMetadata) { + throw new Error(`No metadata found for ${MODULE_NAME_INTEROPERABILITY} module.`); + } + + const tokenMetadata = modulesMetadata.find(metadata => metadata.name === 'token'); + if (!tokenMetadata) { + throw new Error(`No metadata found for token module.`); + } + + return [interoperabilityMetadata, tokenMetadata]; +}; + +type KVStore = db.Database; +const DB_KEY_EVENTS = Buffer.from([1]); + +const getDBInstance = async (dataPath: string, dbName = 'events.db'): Promise => { + const dirPath = join(dataPath.replace('~', os.homedir()), 'plugins/data', dbName); + console.log(`dirPath: ${dirPath}`); + + await ensureDir(dirPath); + return new db.Database(dirPath); +}; + +const ccmsInfoSchema = { + $id: 'msgRecoveryPlugin/ccmsFromEvents', + type: 'object', + properties: { + ccms: { + type: 'array', + fieldNumber: 1, + items: { + ...ccmSchema, + }, + }, + }, +}; + +interface CCMsInfo { + ccms: CCMsg[]; +} + +class EventsModel { + private readonly _db: KVStore; + + public constructor(db: KVStore) { + this._db = db; + } + + public async close() { + await this._db.close(); + } + + public async getCCMs(): Promise { + let ccms: CCMsg[] = []; + try { + const encodedInfo = await this._db.get(DB_KEY_EVENTS); + ccms = codec.decode(ccmsInfoSchema, encodedInfo).ccms; + } catch (error) { + checkDBError(error); + } + return ccms; + } + + public async setCCMs(ccms: CCMsg[]) { + const encodedInfo = codec.encode(ccmsInfoSchema, { ccms }); + await this._db.set(DB_KEY_EVENTS, encodedInfo); + } +} + +// It should be run after all nodes have started +// Then we need to run `ts-node pos-mainchain-fast/config/scripts/sidechain_registration.ts` (note the change: const SIDECHAIN_ARRAY = ['one']) +// & then ts-node pos-sidechain-example-one/config/scripts/mainchain_registration.ts (```one```) +(async () => { + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + + const eventsDb = await getDBInstance('~/.lisk'); + const eventsModel = new EventsModel(eventsDb); + + mainchainClient.subscribe('chain_newBlock', async (data?: Record) => { + const { blockHeader: receivedBlock } = data as unknown as Data; + const newBlockHeader = chain.BlockHeader.fromJSON(receivedBlock).toObject(); + console.log('\n'); + console.log( + `Received new block ${newBlockHeader.height} on mainchain ${mainchainNodeInfo.chainID}`, + ); + + const allCCMs = await eventsModel.getCCMs(); + console.log('allCCMs => ', allCCMs); + + // Check for events if any and store them + const blockEvents = await mainchainClient.invoke>( + 'chain_getEvents', + { height: newBlockHeader.height }, + ); + + const ccmsFromEvents: CCMsg[] = []; + const interopMetadata = (await getInteropAndTokenModulesMetadata(mainchainClient))[0]; + + const getEventsByName = (name: string) => { + return blockEvents.filter( + eventAttr => eventAttr.module === MODULE_NAME_INTEROPERABILITY && eventAttr.name === name, + ); + }; + + const getEventData = (name: string): Schema => { + const eventInfo = interopMetadata.events.filter(event => event.name === name); + if (!eventInfo?.[0]?.data) { + throw new Error(`No schema found for ${name} event data.`); + } + return eventInfo?.[0]?.data; + }; + + const parseCcmSendSuccessEvents = () => { + const eventsByName = getEventsByName('ccmSendSuccess'); + if (eventsByName) { + const data = getEventData('ccmSendSuccess'); + for (const ccmSentSuccessEvent of eventsByName) { + const ccmSendSuccessEventData = codec.decode( + data, + Buffer.from(ccmSentSuccessEvent.data, 'hex'), + ); + console.log('ccmSendSuccessEventData => ', ccmSendSuccessEventData); + + // Do we need to filter based on `ccm.sendingChainID = mainchain ? + const ccm = ccmSendSuccessEventData.ccm; + if (ccm.sendingChainID.equals(Buffer.from('04000000', 'hex'))) { + ccmsFromEvents.push(ccm); + console.log('ccmsFromEvents.length:::::::::::::::: ', ccmsFromEvents.length); + } + } + } + }; + + const parseCcmProcessedEvents = () => { + const eventsByName = getEventsByName(EVENT_NAME_CCM_PROCESSED); + if (eventsByName) { + const data = getEventData(EVENT_NAME_CCM_PROCESSED); + for (const ccmProcessedEvent of eventsByName) { + const ccmProcessedEventData = codec.decode( + data, + Buffer.from(ccmProcessedEvent.data, 'hex'), + ); + console.log('ccmProcessedEventData => ', ccmProcessedEventData); + + // Do we need to filter based on `ccm.sendingChainID = mainchain ? + const ccm = ccmProcessedEventData.ccm; + if (ccm.sendingChainID.equals(Buffer.from('04000000', 'hex'))) { + ccmsFromEvents.push(ccm); + } + } + } + }; + + parseCcmSendSuccessEvents(); + parseCcmProcessedEvents(); + + for (const ccmFromEvent of ccmsFromEvents) { + allCCMs.push(ccmFromEvent); + } + console.log('allCCMs.length(AFTER push): ', allCCMs.length); + + await eventsModel.setCCMs(allCCMs); + }); +})(); diff --git a/examples/interop/messageRecovery/start_nodes.sh b/examples/interop/messageRecovery/start_nodes.sh new file mode 100644 index 00000000000..9bca9d122ff --- /dev/null +++ b/examples/interop/messageRecovery/start_nodes.sh @@ -0,0 +1,64 @@ +#!/bin/sh + +if [ $1 == "--reset" ]; then + echo "*** Clearing Everything ..." + pm2 kill + # Storing Lisk Directory + LISK_PATH=$(pwd) + + cd ~/.lisk && rm -rf mainchain-node-* && rm -rf pos-sidechain-example-* + + # Going back to Lisk Directory + cd $LISK_PATH +fi; + +cd ../.. +echo "*** Building lisk-sdk ..." +{ + yarn cache clean + yarn && yarn build +} || { + echo "***** Error building lisk-sdk *****" + exit +} + +echo "*** Building pos-mainchain-fast ..." +cd examples/interop/pos-mainchain-fast +{ + yarn cache clean + yarn --registry https://npm.lisk.com && yarn build +} || { + echo "***** Error building pos-mainchain-fast *****" + exit +} +cd .. + +echo "*** Building pos-sidechain-example-one ..." +cd pos-sidechain-example-one +{ + yarn cache clean + yarn --registry https://npm.lisk.com && yarn build +} || { + echo "***** Error building pos-sidechain-example-one *****" + exit + } +cd .. + +echo "*** Building pos-sidechain-example-two ..." +cd pos-sidechain-example-two +{ + yarn cache clean + yarn --registry https://npm.lisk.com && yarn build +} || { + echo "***** Error building pos-sidechain-example-two *****" + exit +} +cd .. + +cd pos-mainchain-fast +pm2 start config/mainchain_node_one.sh +pm2 start config/mainchain_node_two.sh +cd .. +pm2 start run_sidechains.json + +echo "All nodes started ..." From efb5b6dccb957374cbc0986cf3382d9fea8d7c56 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Tue, 6 Jun 2023 05:28:25 +0200 Subject: [PATCH 050/170] Update tests --- framework/src/modules/nft/internal_method.ts | 6 +++--- .../unit/modules/nft/cc_comands/cc_transfer.spec.ts | 10 ---------- .../test/unit/modules/nft/internal_method.spec.ts | 8 ++++---- 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index dd00db0430e..faf6f94f401 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -137,10 +137,10 @@ export class InternalMethod extends BaseMethod { await this._method.destroy(methodContext, senderAddress, nftID); } - let attributes: { module: string; attributes: Buffer }[] = []; + let attributesArray: { module: string; attributes: Buffer }[] = []; if (includeAttributes) { - attributes = nft.attributesArray; + attributesArray = nft.attributesArray; } this.events.get(TransferCrossChainEvent).log(methodContext, { @@ -163,7 +163,7 @@ export class InternalMethod extends BaseMethod { nftID, senderAddress, recipientAddress, - attributes, + attributesArray, data, }), ); diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index e0fffcd6232..5c005cbfb95 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -532,15 +532,10 @@ describe('CrossChain Transfer Command', () => { methodContext, userStore.getKey(recipientAddress, nftID), ); - const escrowAccountExists = await escrowStore.has( - methodContext, - escrowStore.getKey(sendingChainID, nftID), - ); expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); expect(nftStoreData.owner).toStrictEqual(recipientAddress); expect(nftStoreData.attributesArray).toEqual(attributesArray); expect(userAccountExists).toBe(true); - expect(escrowAccountExists).toBe(true); checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { senderAddress, recipientAddress, @@ -595,16 +590,11 @@ describe('CrossChain Transfer Command', () => { methodContext, userStore.getKey(senderAddress, nftID), ); - const escrowAccountExists = await escrowStore.has( - methodContext, - escrowStore.getKey(sendingChainID, nftID), - ); expect(feeMethod.payFee).not.toHaveBeenCalled(); expect(nftStoreData.owner).toStrictEqual(senderAddress); expect(nftStoreData.attributesArray).toEqual(attributesArray); expect(userAccountExistsForRecipient).toBe(false); expect(userAccountExistsForSender).toBe(true); - expect(escrowAccountExists).toBe(true); checkEventResult(context.eventQueue, 1, CcmTransferEvent, 0, { senderAddress, recipientAddress: senderAddress, diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 6b2d6a79f6a..270a29d3786 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -211,7 +211,7 @@ describe('InternalMethod', () => { nftID, senderAddress, recipientAddress, - attributes: [], + attributesArray: [], data, }); @@ -288,7 +288,7 @@ describe('InternalMethod', () => { nftID, senderAddress, recipientAddress, - attributes: [], + attributesArray: [], data, }); @@ -366,7 +366,7 @@ describe('InternalMethod', () => { nftID, senderAddress, recipientAddress, - attributes: attributesArray, + attributesArray, data, }); @@ -450,7 +450,7 @@ describe('InternalMethod', () => { nftID, senderAddress, recipientAddress, - attributes: attributesArray, + attributesArray, data, }); From eff844a505cb85fd8407dbbffa8f5912cf976366 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Tue, 6 Jun 2023 15:34:39 +0200 Subject: [PATCH 051/170] Add internal function per feedback --- framework/src/modules/nft/cc_commands/cc_transfer.ts | 2 ++ framework/src/modules/nft/internal_method.ts | 9 +++++++++ framework/src/modules/nft/schemas.ts | 2 +- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index e172160896e..9a2331926d0 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -104,6 +104,8 @@ export class CrossChainTransferCommand extends BaseCCCommand { const storeData = await nftStore.get(getMethodContext(), nftID); if (status === CCM_STATUS_CODE_OK) { storeData.owner = recipientAddress; + // commented line below can be used by custom modules when defining their own logic for getNewAttributes function + // storeData.attributesArray = this._internalMethod.getNewAttributes(nftID, storeData.attributesArray, params.attributesArray); await nftStore.save(getMethodContext(), nftID, storeData); await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index faf6f94f401..2af97f56df5 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -172,4 +172,13 @@ export class InternalMethod extends BaseMethod { public getOwnChainID(): Buffer { return this._config.ownChainID; } + + // template for custom module to be able to define their own logic as described in https://github.com/LiskHQ/lips/blob/main/proposals/lip-0052.md#attributes + public getNewAttributes( + _nftID: Buffer, + storedAttributes: NFTAttributes[], + _receivedAttributes: NFTAttributes[], + ): NFTAttributes[] { + return storedAttributes; + } } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 00cadae89a7..4bccf862491 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -92,7 +92,7 @@ export const crossChainNFTTransferMessageParamsSchema = { export interface CCTransferMessageParams { nftID: Buffer; - attributes: { module: string; attributes: Buffer }[]; + attributesArray: { module: string; attributes: Buffer }[]; senderAddress: Buffer; recipientAddress: Buffer; data: string; From 0ae50b12b495ca319d9bca4238d45aa8dfe18d46 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Wed, 7 Jun 2023 12:13:41 +0200 Subject: [PATCH 052/170] transferCrossChainInternal Method for NFT Module (#8520) * :seedling: Adds EscrowStore#getKey * :bug: Fixes schema for DestroyEvent * :recycle: Adds result parameter to DestroyEvent#log * :seedling: Adds NFTMethod.getChainID * :seedling: Adds NFTMethod.destroy * :seedling: Adds InternalMethod.createEscrowEntry * :recycle: test for InternalMethod * :seedling: Adds InternalMethod.transferCrossChainInternal * :recycle: /nft/crossChainNFTTransferMessageParamsSchema * :recycle: specs for NFTMethod * :recycle: NFTMethod.destroy * :recycle: NFTMethod.destroy consumes NFTMethod.getLockingModule * :rewind: NFTMethod.destroy consumes NFTMethod.getLockingModule * :white_check_mark: for NFTMethod.destroy * :recycle: :white_check_mark: for NFTMethod.destroy Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --------- Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --- framework/src/modules/nft/constants.ts | 4 +- framework/src/modules/nft/events/destroy.ts | 13 +- framework/src/modules/nft/internal_method.ts | 91 ++++- framework/src/modules/nft/method.ts | 90 ++++- framework/src/modules/nft/schemas.ts | 52 ++- framework/src/modules/nft/stores/escrow.ts | 4 + framework/src/modules/nft/types.ts | 4 +- .../unit/modules/nft/internal_method.spec.ts | 370 +++++++++++++++++- .../test/unit/modules/nft/method.spec.ts | 183 ++++++++- .../unit/modules/nft/stores/escrow.spec.ts | 36 ++ 10 files changed, 824 insertions(+), 23 deletions(-) create mode 100644 framework/test/unit/modules/nft/stores/escrow.spec.ts diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index 323e8dfc67f..a63d4095872 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -18,7 +18,9 @@ export const LENGTH_COLLECTION_ID = 4; export const MIN_LENGTH_MODULE_NAME = 1; export const MAX_LENGTH_MODULE_NAME = 32; export const LENGTH_ADDRESS = 20; -export const NFT_NOT_LOCKED = 'nft'; +export const MODULE_NAME_NFT = 'nft'; +export const NFT_NOT_LOCKED = MODULE_NAME_NFT; +export const CROSS_CHAIN_COMMAND_NAME_TRANSFER = 'crossChainTransfer'; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index 1294f466ba9..3475c03a869 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -33,7 +33,7 @@ export const createEventSchema = { nftID: { dataType: 'bytes', minLength: LENGTH_NFT_ID, - maxLenght: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, fieldNumber: 2, }, result: { @@ -46,10 +46,11 @@ export const createEventSchema = { export class DestroyEvent extends BaseEvent { public schema = createEventSchema; - public log(ctx: EventQueuer, data: DestroyEventData): void { - this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ - data.address, - data.nftID, - ]); + public log( + ctx: EventQueuer, + data: DestroyEventData, + result: NftEventResult = NftEventResult.RESULT_SUCCESSFUL, + ): void { + this.add(ctx, { ...data, result }, [data.address, data.nftID]); } } diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 16f21c62cb1..c50a2ac36d8 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -12,23 +12,23 @@ * Removal or modification of this copyright notice is prohibited. */ +import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; import { NFTStore, NFTAttributes } from './stores/nft'; import { InteroperabilityMethod, ModuleConfig } from './types'; import { MethodContext } from '../../state_machine'; import { TransferEvent } from './events/transfer'; import { UserStore } from './stores/user'; -import { NFT_NOT_LOCKED } from './constants'; +import { CROSS_CHAIN_COMMAND_NAME_TRANSFER, MODULE_NAME_NFT, NFT_NOT_LOCKED } from './constants'; import { NFTMethod } from './method'; +import { EscrowStore } from './stores/escrow'; +import { TransferCrossChainEvent } from './events/transfer_cross_chain'; +import { CCM_STATUS_OK } from '../token/constants'; +import { crossChainNFTTransferMessageParamsSchema } from './schemas'; export class InternalMethod extends BaseMethod { - // @ts-expect-error TODO: unused error. Remove when implementing. private _config!: ModuleConfig; - - // @ts-expect-error TODO: unused error. Remove when implementing. private _method!: NFTMethod; - - // @ts-expect-error TODO: unused error. Remove when implementing. private _interoperabilityMethod!: InteroperabilityMethod; public init(config: ModuleConfig): void { @@ -40,6 +40,16 @@ export class InternalMethod extends BaseMethod { this._interoperabilityMethod = interoperabilityMethod; } + public async createEscrowEntry( + methodContext: MethodContext, + receivingChainID: Buffer, + nftID: Buffer, + ): Promise { + const escrowStore = this.stores.get(EscrowStore); + + await escrowStore.set(methodContext, escrowStore.getKey(receivingChainID, nftID), {}); + } + public async createUserEntry( methodContext: MethodContext, address: Buffer, @@ -89,4 +99,73 @@ export class InternalMethod extends BaseMethod { nftID, }); } + + public async transferCrossChainInternal( + methodContext: MethodContext, + senderAddress: Buffer, + recipientAddress: Buffer, + nftID: Buffer, + receivingChainID: Buffer, + messageFee: bigint, + data: string, + includeAttributes: boolean, + ): Promise { + const chainID = this._method.getChainID(nftID); + const nftStore = this.stores.get(NFTStore); + const nft = await nftStore.get(methodContext, nftID); + + if (chainID.equals(this._config.ownChainID)) { + const escrowStore = this.stores.get(EscrowStore); + const userStore = this.stores.get(UserStore); + + nft.owner = receivingChainID; + await nftStore.save(methodContext, nftID, nft); + + await userStore.del(methodContext, userStore.getKey(senderAddress, nftID)); + + const escrowExists = await escrowStore.has( + methodContext, + escrowStore.getKey(receivingChainID, nftID), + ); + + if (!escrowExists) { + await this.createEscrowEntry(methodContext, receivingChainID, nftID); + } + } + + if (chainID.equals(receivingChainID)) { + await this._method.destroy(methodContext, senderAddress, nftID); + } + + let attributesArray: { module: string; attributes: Buffer }[] = []; + + if (includeAttributes) { + attributesArray = nft.attributesArray; + } + + this.events.get(TransferCrossChainEvent).log(methodContext, { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }); + + await this._interoperabilityMethod.send( + methodContext, + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray, + data, + }), + ); + } } diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index cccadfa8523..502812ea3ff 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -14,9 +14,10 @@ import { BaseMethod } from '../base_method'; import { InteroperabilityMethod, ModuleConfig } from './types'; import { NFTStore } from './stores/nft'; -import { ImmutableMethodContext } from '../../state_machine'; -import { LENGTH_CHAIN_ID } from './constants'; +import { ImmutableMethodContext, MethodContext } from '../../state_machine'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID, NFT_NOT_LOCKED, NftEventResult } from './constants'; import { UserStore } from './stores/user'; +import { DestroyEvent } from './events/destroy'; export class NFTMethod extends BaseMethod { // @ts-expect-error TODO: unused error. Remove when implementing. @@ -32,6 +33,14 @@ export class NFTMethod extends BaseMethod { this._interoperabilityMethod = interoperabilityMethod; } + public getChainID(nftID: Buffer): Buffer { + if (nftID.length !== LENGTH_NFT_ID) { + throw new Error(`NFT ID must have length ${LENGTH_NFT_ID}`); + } + + return nftID.slice(0, LENGTH_CHAIN_ID); + } + public async getNFTOwner(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { const nftStore = this.stores.get(NFTStore); @@ -61,4 +70,81 @@ export class NFTMethod extends BaseMethod { return userData.lockingModule; } + + public async destroy( + methodContext: MethodContext, + address: Buffer, + nftID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(methodContext, nftID); + + if (!nftExists) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this.getNFTOwner(methodContext, nftID); + + if (owner.length === LENGTH_CHAIN_ID) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + + throw new Error('NFT is escrowed to another chain'); + } + + if (!owner.equals(address)) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + + throw new Error('Not initiated by the NFT owner'); + } + + const userStore = this.stores.get(UserStore); + const userKey = userStore.getKey(owner, nftID); + const { lockingModule } = await userStore.get(methodContext, userKey); + + if (lockingModule !== NFT_NOT_LOCKED) { + this.events.get(DestroyEvent).log( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + + throw new Error('Locked NFTs cannot be destroyed'); + } + + await nftStore.del(methodContext, nftID); + + await userStore.del(methodContext, userKey); + + this.events.get(DestroyEvent).log(methodContext, { + address, + nftID, + }); + } } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 2c0cee0da4e..100bd5c1e15 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -13,7 +13,7 @@ */ import { MAX_DATA_LENGTH } from '../token/constants'; -import { LENGTH_NFT_ID } from './constants'; +import { LENGTH_NFT_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from './constants'; export const transferParamsSchema = { $id: '/lisk/nftTransferParams', @@ -39,3 +39,53 @@ export const transferParamsSchema = { }, }, }; + +export const crossChainNFTTransferMessageParamsSchema = { + $id: '/lisk/crossChainNFTTransferMessageParamsSchmema', + type: 'object', + required: ['nftID', 'senderAddress', 'recipientAddress', 'attributesArray', 'data'], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + senderAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 2, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 3, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + data: { + dataType: 'string', + maxLength: MAX_DATA_LENGTH, + fieldNumber: 5, + }, + }, +}; diff --git a/framework/src/modules/nft/stores/escrow.ts b/framework/src/modules/nft/stores/escrow.ts index 719bf0b7fbe..b5d224088bd 100644 --- a/framework/src/modules/nft/stores/escrow.ts +++ b/framework/src/modules/nft/stores/escrow.ts @@ -25,4 +25,8 @@ type EscrowStoreData = Record; export class EscrowStore extends BaseStore { public schema = escrowStoreSchema; + + public getKey(receivingChainID: Buffer, nftID: Buffer): Buffer { + return Buffer.concat([receivingChainID, nftID]); + } } diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 40fa051c2f8..74d123c56aa 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -16,7 +16,9 @@ import { MethodContext } from '../../state_machine'; import { CCMsg } from '../interoperability'; // eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface ModuleConfig {} +export interface ModuleConfig { + ownChainID: Buffer; +} export interface InteroperabilityMethod { send( diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 2e46a80553c..270a29d3786 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -20,26 +20,46 @@ import { EventQueue, createMethodContext } from '../../../../src/state_machine'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { + CROSS_CHAIN_COMMAND_NAME_TRANSFER, LENGTH_ADDRESS, + LENGTH_CHAIN_ID, LENGTH_NFT_ID, + MODULE_NAME_NFT, NFT_NOT_LOCKED, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { MethodContext } from '../../../../src/state_machine/method_context'; -import { TransferEvent } from '../../../../src/modules/nft/events/transfer'; +import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/events/transfer'; import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; +import { NFTMethod } from '../../../../src/modules/nft/method'; +import { InteroperabilityMethod } from '../../../../src/modules/nft/types'; +import { + TransferCrossChainEvent, + TransferCrossChainEventData, +} from '../../../../src/modules/nft/events/transfer_cross_chain'; +import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; +import { CCM_STATUS_OK } from '../../../../src/modules/token/constants'; +import { crossChainNFTTransferMessageParamsSchema } from '../../../../src/modules/nft/schemas'; describe('InternalMethod', () => { const module = new NFTModule(); const internalMethod = new InternalMethod(module.stores, module.events); + const method = new NFTMethod(module.stores, module.events); + let interoperabilityMethod!: InteroperabilityMethod; + internalMethod.addDependencies(method, interoperabilityMethod); + + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + internalMethod.init({ ownChainID }); + let methodContext!: MethodContext; - const checkEventResult = ( + const checkEventResult = ( eventQueue: EventQueue, length: number, EventClass: any, index: number, - expectedResult: any, + expectedResult: EventDataType, result: any = 0, ) => { expect(eventQueue.getEvents()).toHaveLength(length); @@ -55,11 +75,12 @@ describe('InternalMethod', () => { const userStore = module.stores.get(UserStore); const nftStore = module.stores.get(NFTStore); + const escrowStore = module.stores.get(EscrowStore); const address = utils.getRandomBytes(LENGTH_ADDRESS); const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + let nftID = utils.getRandomBytes(LENGTH_NFT_ID); beforeEach(() => { methodContext = createMethodContext({ @@ -69,6 +90,18 @@ describe('InternalMethod', () => { }); }); + describe('createEscrowEntry', () => { + it('should create an entry in EscrowStore', async () => { + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await internalMethod.createEscrowEntry(methodContext, receivingChainID, nftID); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + }); + }); + describe('createNFTEntry', () => { it('should create an entry in NFStore with attributes sorted by module', async () => { const unsortedAttributesArray = [ @@ -137,7 +170,7 @@ describe('InternalMethod', () => { lockingModule: NFT_NOT_LOCKED, }); - checkEventResult(methodContext.eventQueue, 1, TransferEvent, 0, { + checkEventResult(methodContext.eventQueue, 1, TransferEvent, 0, { senderAddress, recipientAddress, nftID, @@ -150,4 +183,331 @@ describe('InternalMethod', () => { ).rejects.toThrow('does not exist'); }); }); + + describe('transferCrossChainInternal', () => { + let receivingChainID: Buffer; + const messageFee = BigInt(1000); + const data = ''; + + beforeEach(() => { + receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + interoperabilityMethod = { + send: jest.fn().mockResolvedValue(Promise.resolve()), + error: jest.fn().mockResolvedValue(Promise.resolve()), + terminateChain: jest.fn().mockRejectedValue(Promise.resolve()), + }; + + internalMethod.addDependencies(method, interoperabilityMethod); + }); + + describe('if attributes are not included ccm contains empty attributes', () => { + const includeAttributes = false; + + it('should transfer the ownership of the NFT to the receiving chain and escrow it for a native NFT', async () => { + const chainID = ownChainID; + nftID = Buffer.concat([chainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray: [], + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + await expect(nftStore.get(methodContext, nftID)).resolves.toEqual({ + owner: receivingChainID, + attributesArray: [], + }); + + await expect( + userStore.has(methodContext, userStore.getKey(senderAddress, nftID)), + ).resolves.toBeFalse(); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + + it('should destroy NFT if the chain ID of the NFT is the same as receiving chain', async () => { + nftID = Buffer.concat([ + receivingChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray: [], + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + checkEventResult(methodContext.eventQueue, 2, DestroyEvent, 0, { + address: senderAddress, + nftID, + }); + + checkEventResult( + methodContext.eventQueue, + 2, + TransferCrossChainEvent, + 1, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + }); + + describe('if attributes are included ccm contains attributes of the NFT', () => { + const includeAttributes = true; + + it('should transfer the ownership of the NFT to the receiving chain and escrow it for a native NFT', async () => { + const chainID = ownChainID; + nftID = Buffer.concat([chainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + const attributesArray = [ + { + module: 'pos', + attributes: utils.getRandomBytes(20), + }, + ]; + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray, + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + await expect(nftStore.get(methodContext, nftID)).resolves.toEqual({ + owner: receivingChainID, + attributesArray, + }); + + await expect( + userStore.has(methodContext, userStore.getKey(senderAddress, nftID)), + ).resolves.toBeFalse(); + + await expect( + escrowStore.get(methodContext, escrowStore.getKey(receivingChainID, nftID)), + ).resolves.toEqual({}); + + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + + it('should destroy NFT if the chain ID of the NFT is the same as receiving chain', async () => { + nftID = Buffer.concat([ + receivingChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + const attributesArray = [ + { + module: 'pos', + attributes: utils.getRandomBytes(20), + }, + ]; + + const ccmParameters = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray, + data, + }); + + await nftStore.save(methodContext, nftID, { + owner: senderAddress, + attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(senderAddress, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await expect( + internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + + checkEventResult(methodContext.eventQueue, 2, DestroyEvent, 0, { + address: senderAddress, + nftID, + }); + + checkEventResult( + methodContext.eventQueue, + 2, + TransferCrossChainEvent, + 1, + { + senderAddress, + recipientAddress, + nftID, + receivingChainID, + includeAttributes, + }, + ); + + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenCalledOnce(); + expect(internalMethod['_interoperabilityMethod'].send).toHaveBeenNthCalledWith( + 1, + expect.anything(), + senderAddress, + MODULE_NAME_NFT, + CROSS_CHAIN_COMMAND_NAME_TRANSFER, + receivingChainID, + messageFee, + CCM_STATUS_OK, + ccmParameters, + ); + }); + }); + }); }); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 346f7473a59..ecd66c39a33 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -12,6 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ +import { codec } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; import { NFTMethod } from '../../../../src/modules/nft/method'; import { NFTModule } from '../../../../src/modules/nft/module'; @@ -23,11 +24,14 @@ import { LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_NFT_ID, + NFT_NOT_LOCKED, + NftEventResult, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; -describe('NFTMethods', () => { +describe('NFTMethod', () => { const module = new NFTModule(); const method = new NFTMethod(module.stores, module.events); @@ -39,6 +43,25 @@ describe('NFTMethods', () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); let owner: Buffer; + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: EventDataType, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + beforeEach(() => { owner = utils.getRandomBytes(LENGTH_ADDRESS); @@ -49,6 +72,18 @@ describe('NFTMethods', () => { }); }); + describe('getChainID', () => { + it('should throw if nftID has invalid length', () => { + expect(() => { + method.getChainID(utils.getRandomBytes(LENGTH_NFT_ID - 1)); + }).toThrow(`NFT ID must have length ${LENGTH_NFT_ID}`); + }); + + it('should return the first bytes of length LENGTH_CHAIN_ID from provided nftID', () => { + expect(method.getChainID(nftID)).toEqual(nftID.slice(0, LENGTH_CHAIN_ID)); + }); + }); + describe('getNFTOwner', () => { it('should fail if NFT does not exist', async () => { await expect(method.getNFTOwner(methodContext, nftID)).rejects.toThrow( @@ -101,4 +136,150 @@ describe('NFTMethods', () => { await expect(method.getLockingModule(methodContext, nftID)).resolves.toEqual(lockingModule); }); }); + + describe('destroy', () => { + let existingNFT: { nftID: any; owner: any }; + let lockedExistingNFT: { nftID: any; owner: any }; + let escrowedNFT: { nftID: any; owner: any }; + + beforeEach(async () => { + existingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + lockedExistingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + escrowedNFT = { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + await nftStore.save(methodContext, existingNFT.nftID, { + owner: existingNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await nftStore.save(methodContext, lockedExistingNFT.nftID, { + owner: lockedExistingNFT.owner, + attributesArray: [], + }); + + await userStore.set( + methodContext, + userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), + { + lockingModule: 'token', + }, + ); + + await nftStore.save(methodContext, escrowedNFT.nftID, { + owner: escrowedNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(escrowedNFT.owner, escrowedNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + }); + + it('should fail and emit Destroy event if NFT does not exist', async () => { + const address = utils.getRandomBytes(LENGTH_ADDRESS); + + await expect(method.destroy(methodContext, address, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should fail and emit Destroy event if NFT is not owned by the provided address', async () => { + const notOwner = utils.getRandomBytes(LENGTH_ADDRESS); + + await expect(method.destroy(methodContext, notOwner, existingNFT.nftID)).rejects.toThrow( + 'Not initiated by the NFT owner', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: notOwner, + nftID: existingNFT.nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + }); + + it('should fail and emit Destroy event if NFT is escrowed', async () => { + await expect( + method.destroy(methodContext, escrowedNFT.owner, escrowedNFT.nftID), + ).rejects.toThrow('NFT is escrowed to another chain'); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: escrowedNFT.owner, + nftID: escrowedNFT.nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + }); + + it('should fail and emit Destroy event if NFT is locked', async () => { + await expect( + method.destroy(methodContext, lockedExistingNFT.owner, lockedExistingNFT.nftID), + ).rejects.toThrow('Locked NFTs cannot be destroyed'); + + checkEventResult( + methodContext.eventQueue, + 1, + DestroyEvent, + 0, + { + address: lockedExistingNFT.owner, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + }); + + it('should delete NFTStore and UserStore entry and emit Destroy event', async () => { + await expect( + method.destroy(methodContext, existingNFT.owner, existingNFT.nftID), + ).resolves.toBeUndefined(); + + await expect(nftStore.has(methodContext, existingNFT.nftID)).resolves.toBeFalse(); + await expect( + userStore.has(methodContext, Buffer.concat([existingNFT.owner, escrowedNFT.nftID])), + ).resolves.toBeFalse(); + + checkEventResult(methodContext.eventQueue, 1, DestroyEvent, 0, { + address: existingNFT.owner, + nftID: existingNFT.nftID, + }); + }); + }); }); diff --git a/framework/test/unit/modules/nft/stores/escrow.spec.ts b/framework/test/unit/modules/nft/stores/escrow.spec.ts new file mode 100644 index 00000000000..89d27e973af --- /dev/null +++ b/framework/test/unit/modules/nft/stores/escrow.spec.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { EscrowStore } from '../../../../../src/modules/nft/stores/escrow'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID } from '../../../../../src/modules/nft/constants'; + +describe('EscrowStore', () => { + let store: EscrowStore; + + beforeEach(() => { + store = new EscrowStore('NFT', 5); + }); + + describe('getKey', () => { + it('should concatenate the provided receivingChainID and nftID', () => { + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + expect(store.getKey(receivingChainID, nftID)).toEqual( + Buffer.concat([receivingChainID, nftID]), + ); + }); + }); +}); From 30ebd4480a786d2cd0c0e450968f77c6e90d3550 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 8 Jun 2023 00:17:10 +0200 Subject: [PATCH 053/170] Implement nft methods --- framework/src/modules/nft/events/create.ts | 4 +- framework/src/modules/nft/method.ts | 98 +++++++++- framework/src/modules/nft/module.ts | 4 +- .../nft/cc_comands/cc_transfer.spec.ts | 2 +- .../test/unit/modules/nft/method.spec.ts | 184 ++++++++++++++++++ 5 files changed, 284 insertions(+), 8 deletions(-) diff --git a/framework/src/modules/nft/events/create.ts b/framework/src/modules/nft/events/create.ts index c14b93d1a88..be3f55ae96c 100644 --- a/framework/src/modules/nft/events/create.ts +++ b/framework/src/modules/nft/events/create.ts @@ -34,13 +34,13 @@ export const createEventSchema = { nftID: { dataType: 'bytes', minLength: LENGTH_NFT_ID, - maxLenght: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, fieldNumber: 2, }, collectionID: { dataType: 'bytes', minLength: LENGTH_COLLECTION_ID, - maxLenght: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, fieldNumber: 3, }, result: { diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index a807e9ad674..83c2593982a 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -12,11 +12,12 @@ * Removal or modification of this copyright notice is prohibited. */ import { BaseMethod } from '../base_method'; -import { InteroperabilityMethod, ModuleConfig } from './types'; -import { NFTStore } from './stores/nft'; +import { FeeMethod, InteroperabilityMethod, ModuleConfig } from './types'; +import { NFTAttributes, NFTStore } from './stores/nft'; import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { ALL_SUPPORTED_NFTS_KEY, + FEE_CREATE_NFT, LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, @@ -26,18 +27,21 @@ import { import { UserStore } from './stores/user'; import { DestroyEvent } from './events/destroy'; import { SupportedNFTsStore } from './stores/supported_nfts'; +import { CreateEvent } from './events/create'; export class NFTMethod extends BaseMethod { private _config!: ModuleConfig; // @ts-expect-error TODO: unused error. Remove when implementing. private _interoperabilityMethod!: InteroperabilityMethod; + private _feeMethod!: FeeMethod; public init(config: ModuleConfig): void { this._config = config; } - public addDependencies(interoperabilityMethod: InteroperabilityMethod) { + public addDependencies(interoperabilityMethod: InteroperabilityMethod, feeMethod: FeeMethod) { this._interoperabilityMethod = interoperabilityMethod; + this._feeMethod = feeMethod; } public getChainID(nftID: Buffer): Buffer { @@ -203,4 +207,92 @@ export class NFTMethod extends BaseMethod { return false; } + + public async getAttributesArray( + methodContext: MethodContext, + nftID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const storeData = await nftStore.get(methodContext, nftID); + return storeData.attributesArray; + } + + public async getAttributes( + methodContext: MethodContext, + module: string, + nftID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const storeData = await nftStore.get(methodContext, nftID); + + for (const nftAttributes of storeData.attributesArray) { + if (nftAttributes.module === module) { + return nftAttributes.attributes; + } + } + + throw new Error('Specific module did not set any attributes.'); + } + + public async getNextAvailableIndex( + methodContext: MethodContext, + collectionID: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const nftStoreData = await nftStore.iterate(methodContext, { + gte: Buffer.alloc(LENGTH_NFT_ID, 0), + lte: Buffer.alloc(LENGTH_NFT_ID, 255), + }); + + let count = 0; + for (const { key } of nftStoreData) { + if (key.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID).equals(collectionID)) { + count += 1; + } + } + + return count; + } + + public async create( + methodContext: MethodContext, + address: Buffer, + collectionID: Buffer, + attributesArray: NFTAttributes[], + ): Promise { + const index = await this.getNextAvailableIndex(methodContext, collectionID); + const nftID = Buffer.concat([ + this._config.ownChainID, + collectionID, + Buffer.from(index.toString()), + ]); + this._feeMethod.payFee(methodContext, BigInt(FEE_CREATE_NFT)); + + const nftStore = this.stores.get(NFTStore); + await nftStore.save(methodContext, nftID, { + owner: address, + attributesArray, + }); + + const userStore = this.stores.get(UserStore); + await userStore.set(methodContext, userStore.getKey(address, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + this.events.get(CreateEvent).log(methodContext, { + address, + nftID, + collectionID, + }); + } } diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 62dd39ab424..5518b54092d 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -84,9 +84,9 @@ export class NFTModule extends BaseInteroperableModule { this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 4)); } - public addDependencies(interoperabilityMethod: InteroperabilityMethod, _feeMethod: FeeMethod) { + public addDependencies(interoperabilityMethod: InteroperabilityMethod, feeMethod: FeeMethod) { this._interoperabilityMethod = interoperabilityMethod; - this.method.addDependencies(interoperabilityMethod); + this.method.addDependencies(interoperabilityMethod, feeMethod); this._internalMethod.addDependencies(this.method, this._interoperabilityMethod); this.crossChainMethod.addDependencies(interoperabilityMethod); } diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 5c005cbfb95..0e09de2bf26 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -127,7 +127,7 @@ describe('CrossChain Transfer Command', () => { beforeEach(async () => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - method.addDependencies(interopMethod); + method.addDependencies(interopMethod, feeMethod); method.init(config); internalMethod.addDependencies(method, interopMethod); internalMethod.init(config); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 8225174ba30..cea60f266f4 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -22,6 +22,7 @@ import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_ import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { ALL_SUPPORTED_NFTS_KEY, + FEE_CREATE_NFT, LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, @@ -33,6 +34,7 @@ import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { UserStore } from '../../../../src/modules/nft/stores/user'; import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; +import { CreateEvent } from '../../../../src/modules/nft/events/create'; describe('NFTMethod', () => { const module = new NFTModule(); @@ -406,4 +408,186 @@ describe('NFTMethod', () => { expect(isSupported).toBe(false); }); }); + + describe('getAttributesArray', () => { + const expectedAttributesArray = [ + { module: 'customMod1', attributes: Buffer.alloc(5) }, + { module: 'customMod2', attributes: Buffer.alloc(2) }, + ]; + + it('should throw if entry does not exist in the nft substore for the nft id', async () => { + await expect(method.getAttributesArray(methodContext, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should return attributes array if entry exists in the nft substore for the nft id', async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: expectedAttributesArray, + }); + const returnedAttributesArray = await method.getAttributesArray(methodContext, nftID); + expect(returnedAttributesArray).toStrictEqual(expectedAttributesArray); + }); + }); + + describe('getAttributes', () => { + const module1 = 'customMod1'; + const module2 = 'customMod2'; + const module3 = 'customMod3'; + const expectedAttributesArray = [ + { module: module1, attributes: Buffer.alloc(5) }, + { module: module2, attributes: Buffer.alloc(2) }, + ]; + + beforeEach(async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: expectedAttributesArray, + }); + }); + + it('should throw if entry does not exist in the nft substore for the nft id', async () => { + await nftStore.del(methodContext, nftID); + await expect(method.getAttributes(methodContext, module1, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + }); + + it('should return attributes if entry exists in the nft substore for the nft id and attributes exists for the requested module', async () => { + const returnedAttributes = await method.getAttributes(methodContext, module1, nftID); + expect(returnedAttributes).toStrictEqual(expectedAttributesArray[0].attributes); + }); + + it('should throw if entry exists in the nft substore for the nft id but no attributes exists for the requested module', async () => { + await expect(method.getAttributes(methodContext, module3, nftID)).rejects.toThrow( + 'Specific module did not set any attributes.', + ); + }); + }); + + describe('getNextAvailableIndex', () => { + const attributesArray1 = [ + { module: 'customMod1', attributes: Buffer.alloc(5) }, + { module: 'customMod2', attributes: Buffer.alloc(2) }, + ]; + const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; + const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + + beforeEach(async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: attributesArray1, + }); + }); + + it('should return index count 0 if entry does not exist in the nft substore for the nft id', async () => { + await nftStore.del(methodContext, nftID); + const returnedIndex = await method.getNextAvailableIndex( + methodContext, + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ); + expect(returnedIndex).toBe(0); + }); + + it('should return index count 0 if entry exists in the nft substore for the nft id and no key matches the given collection id', async () => { + const returnedIndex = await method.getNextAvailableIndex( + methodContext, + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ); + expect(returnedIndex).toBe(0); + }); + + it('should return index count 1 if entry exists in the nft substore for the nft id and a key matches the given collection id', async () => { + const returnedIndex = await method.getNextAvailableIndex(methodContext, collectionID); + expect(returnedIndex).toBe(1); + }); + + it('should return non zero index count if entry exists in the nft substore for the nft id and more than 1 key matches the given collection id', async () => { + const newKey = Buffer.concat([utils.getRandomBytes(LENGTH_CHAIN_ID), collectionID]); + await nftStore.save(methodContext, newKey, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: attributesArray2, + }); + const returnedIndex = await method.getNextAvailableIndex(methodContext, collectionID); + expect(returnedIndex).toBe(2); + }); + }); + + describe('create', () => { + const interopMethod = { + send: jest.fn(), + error: jest.fn(), + terminateChain: jest.fn(), + }; + const feeMethod = { payFee: jest.fn() }; + const attributesArray1 = [ + { module: 'customMod1', attributes: Buffer.alloc(5) }, + { module: 'customMod2', attributes: Buffer.alloc(2) }, + ]; + const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; + const attributesArray3 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; + const config = { + ownChainID: Buffer.alloc(LENGTH_CHAIN_ID, 1), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + const address = utils.getRandomBytes(LENGTH_ADDRESS); + + beforeEach(() => { + method.addDependencies(interopMethod, feeMethod); + method.init(config); + jest.spyOn(feeMethod, 'payFee'); + }); + + it('should set data to stores with correct key and emit successfull create event when there is no entry in the nft substore', async () => { + const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('0')]); + + await method.create(methodContext, address, collectionID, attributesArray3); + const nftStoreData = await nftStore.get(methodContext, expectedKey); + const userStoreData = await userStore.get( + methodContext, + userStore.getKey(address, expectedKey), + ); + expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); + expect(nftStoreData.owner).toStrictEqual(address); + expect(nftStoreData.attributesArray).toEqual(attributesArray3); + expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); + checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { + address, + nftID: expectedKey, + collectionID, + }); + }); + + it('should set data to stores with correct key and emit successfull create event when there is some entry in the nft substore', async () => { + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: attributesArray1, + }); + const newKey = Buffer.concat([utils.getRandomBytes(LENGTH_CHAIN_ID), collectionID]); + await nftStore.save(methodContext, newKey, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: attributesArray2, + }); + const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('2')]); + + await method.create(methodContext, address, collectionID, attributesArray3); + const nftStoreData = await nftStore.get(methodContext, expectedKey); + const userStoreData = await userStore.get( + methodContext, + userStore.getKey(address, expectedKey), + ); + expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); + expect(nftStoreData.owner).toStrictEqual(address); + expect(nftStoreData.attributesArray).toEqual(attributesArray3); + expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); + checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { + address, + nftID: expectedKey, + collectionID, + }); + }); + }); }); From d7425969d64f2755cd9aade70122d89da7664829 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 9 Jun 2023 11:07:31 +0200 Subject: [PATCH 054/170] Crosschain NFT Transfer (#8560) * :label: Updates InteroperabilityMethod * :seedling: NFT TransferCrossChainCommand * :recycle: :memo: NFTErrorEventResult :seedling: DestroyEvent.error * :label: Updates InteroperabilityMethod mock defintion * :recycle: Removes unwanted comments --- .../nft/commands/transfer_cross_chain.ts | 136 +++++ framework/src/modules/nft/constants.ts | 6 +- framework/src/modules/nft/events/destroy.ts | 15 +- framework/src/modules/nft/method.ts | 8 +- framework/src/modules/nft/schemas.ts | 61 ++- framework/src/modules/nft/types.ts | 3 +- .../nft/cc_comands/cc_transfer.spec.ts | 1 + .../nft/commands/transfer_cross_chain.spec.ts | 463 ++++++++++++++++++ .../unit/modules/nft/internal_method.spec.ts | 4 + .../test/unit/modules/nft/method.spec.ts | 1 + 10 files changed, 682 insertions(+), 16 deletions(-) create mode 100644 framework/src/modules/nft/commands/transfer_cross_chain.ts create mode 100644 framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts new file mode 100644 index 00000000000..97fd17a8826 --- /dev/null +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -0,0 +1,136 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; +import { crossChainTransferParamsSchema } from '../schemas'; +import { NFTStore } from '../stores/nft'; +import { NFTMethod } from '../method'; +import { LENGTH_CHAIN_ID, NFT_NOT_LOCKED } from '../constants'; +import { TokenMethod } from '../../token'; +import { InteroperabilityMethod } from '../types'; +import { BaseCommand } from '../../base_command'; +import { + CommandExecuteContext, + CommandVerifyContext, + VerificationResult, + VerifyStatus, +} from '../../../state_machine'; +import { InternalMethod } from '../internal_method'; + +export interface Params { + nftID: Buffer; + receivingChainID: Buffer; + recipientAddress: Buffer; + data: string; + messageFee: bigint; + messageFeeTokenID: Buffer; + includeAttributes: boolean; +} + +export class TransferCrossChainCommand extends BaseCommand { + public schema = crossChainTransferParamsSchema; + + private _nftMethod!: NFTMethod; + private _tokenMethod!: TokenMethod; + private _interoperabilityMethod!: InteroperabilityMethod; + private _internalMethod!: InternalMethod; + + public init(args: { + nftMethod: NFTMethod; + tokenMethod: TokenMethod; + interoperabilityMethod: InteroperabilityMethod; + internalMethod: InternalMethod; + }): void { + this._nftMethod = args.nftMethod; + this._tokenMethod = args.tokenMethod; + this._interoperabilityMethod = args.interoperabilityMethod; + this._internalMethod = args.internalMethod; + } + + public async verify(context: CommandVerifyContext): Promise { + const { params } = context; + + validator.validate(this.schema, params); + + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(context.getMethodContext(), params.nftID); + + if (!nftExists) { + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this._nftMethod.getNFTOwner(context.getMethodContext(), params.nftID); + + if (owner.length === LENGTH_CHAIN_ID) { + throw new Error('NFT is escrowed to another chain'); + } + + const nftChainID = this._nftMethod.getChainID(params.nftID); + + if (!nftChainID.equals(context.chainID) && !nftChainID.equals(params.receivingChainID)) { + throw new Error('NFT must be native to either the sending or the receiving chain'); + } + + const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( + context.getMethodContext(), + params.receivingChainID, + ); + + if (!params.messageFeeTokenID.equals(messageFeeTokenID)) { + throw new Error('Mismatching message fee Token ID'); + } + + if (!owner.equals(context.transaction.senderAddress)) { + throw new Error('Transfer not initiated by the NFT owner'); + } + + const lockingModule = await this._nftMethod.getLockingModule( + context.getMethodContext(), + params.nftID, + ); + + if (lockingModule !== NFT_NOT_LOCKED) { + throw new Error('Locked NFTs cannot be transferred'); + } + + const availableBalance = await this._tokenMethod.getAvailableBalance( + context.getMethodContext(), + context.transaction.senderAddress, + params.messageFeeTokenID, + ); + + if (availableBalance < params.messageFee) { + throw new Error('Insufficient balance for the message fee'); + } + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._internalMethod.transferCrossChainInternal( + context.getMethodContext(), + context.transaction.senderAddress, + params.recipientAddress, + params.nftID, + params.receivingChainID, + params.messageFee, + params.data, + params.includeAttributes, + ); + } +} diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index e732b5f17f2..e14f1ded273 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -25,6 +25,7 @@ export const CCM_STATUS_CODE_OK = 0; export const EMPTY_BYTES = Buffer.alloc(0); export const ALL_SUPPORTED_NFTS_KEY = EMPTY_BYTES; export const FEE_CREATE_NFT = 5000000; +export const LENGTH_TOKEN_ID = 8; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, @@ -43,7 +44,4 @@ export const enum NftEventResult { RESULT_DATA_TOO_LONG = 13, } -export type NftErrorEventResult = Exclude< - NftEventResult, - NftEventResult.RESULT_NFT_ESCROWED | NftEventResult.RESULT_SUCCESSFUL ->; +export type NftErrorEventResult = Exclude; diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index 3475c03a869..15bf0ffb7ad 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -13,7 +13,7 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -import { LENGTH_NFT_ID, NftEventResult } from '../constants'; +import { LENGTH_NFT_ID, NftErrorEventResult, NftEventResult } from '../constants'; export interface DestroyEventData { address: Buffer; @@ -46,11 +46,14 @@ export const createEventSchema = { export class DestroyEvent extends BaseEvent { public schema = createEventSchema; - public log( - ctx: EventQueuer, - data: DestroyEventData, - result: NftEventResult = NftEventResult.RESULT_SUCCESSFUL, - ): void { + public log(ctx: EventQueuer, data: DestroyEventData): void { + this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ + data.address, + data.nftID, + ]); + } + + public error(ctx: EventQueuer, data: DestroyEventData, result: NftErrorEventResult): void { this.add(ctx, { ...data, result }, [data.address, data.nftID]); } } diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 83c2593982a..0d15d1f0305 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -92,7 +92,7 @@ export class NFTMethod extends BaseMethod { const nftExists = await nftStore.has(methodContext, nftID); if (!nftExists) { - this.events.get(DestroyEvent).log( + this.events.get(DestroyEvent).error( methodContext, { address, @@ -107,7 +107,7 @@ export class NFTMethod extends BaseMethod { const owner = await this.getNFTOwner(methodContext, nftID); if (owner.length === LENGTH_CHAIN_ID) { - this.events.get(DestroyEvent).log( + this.events.get(DestroyEvent).error( methodContext, { address, @@ -120,7 +120,7 @@ export class NFTMethod extends BaseMethod { } if (!owner.equals(address)) { - this.events.get(DestroyEvent).log( + this.events.get(DestroyEvent).error( methodContext, { address, @@ -137,7 +137,7 @@ export class NFTMethod extends BaseMethod { const { lockingModule } = await userStore.get(methodContext, userKey); if (lockingModule !== NFT_NOT_LOCKED) { - this.events.get(DestroyEvent).log( + this.events.get(DestroyEvent).error( methodContext, { address, diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 4bccf862491..9b261363e18 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -13,7 +13,13 @@ */ import { MAX_DATA_LENGTH } from '../token/constants'; -import { LENGTH_NFT_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from './constants'; +import { + LENGTH_CHAIN_ID, + LENGTH_NFT_ID, + LENGTH_TOKEN_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, +} from './constants'; export const transferParamsSchema = { $id: '/lisk/nftTransferParams', @@ -97,3 +103,56 @@ export interface CCTransferMessageParams { recipientAddress: Buffer; data: string; } + +export const crossChainTransferParamsSchema = { + $id: '/lisk/crossChainNFTTransferParamsSchema', + type: 'object', + required: [ + 'nftID', + 'receivingChainID', + 'recipientAddress', + 'data', + 'messageFee', + 'messageFeeTokenID', + 'includeAttributes', + ], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + receivingChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 2, + }, + recipientAddress: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 3, + }, + data: { + dataType: 'string', + minLength: 0, + maxLength: MAX_DATA_LENGTH, + fieldNumber: 4, + }, + messageFee: { + dataType: 'uint64', + fieldNumber: 5, + }, + messageFeeTokenID: { + dataType: 'bytes', + minLength: LENGTH_TOKEN_ID, + maxLength: LENGTH_TOKEN_ID, + fieldNumber: 6, + }, + includeAttributes: { + dataType: 'boolean', + fieldNumber: 7, + }, + }, +}; diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 74d123c56aa..d71c76e83a7 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { MethodContext } from '../../state_machine'; +import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { CCMsg } from '../interoperability'; // eslint-disable-next-line @typescript-eslint/no-empty-interface @@ -34,6 +34,7 @@ export interface InteroperabilityMethod { ): Promise; error(methodContext: MethodContext, ccm: CCMsg, code: number): Promise; terminateChain(methodContext: MethodContext, chainID: Buffer): Promise; + getMessageFeeTokenID(methodContext: ImmutableMethodContext, chainID: Buffer): Promise; } export interface FeeMethod { diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 0e09de2bf26..c84cbee033d 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -87,6 +87,7 @@ describe('CrossChain Transfer Command', () => { send: jest.fn(), error: jest.fn(), terminateChain: jest.fn(), + getMessageFeeTokenID: jest.fn(), }; const defaultHeader = { height: 0, diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts new file mode 100644 index 00000000000..ba942e60893 --- /dev/null +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -0,0 +1,463 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { Transaction } from '@liskhq/lisk-chain'; +import { codec } from '@liskhq/lisk-codec'; +import { address, utils } from '@liskhq/lisk-cryptography'; +import { NFTModule } from '../../../../../src/modules/nft/module'; +import { + TransferCrossChainCommand, + Params, +} from '../../../../../src/modules/nft/commands/transfer_cross_chain'; +import { crossChainTransferParamsSchema } from '../../../../../src/modules/nft/schemas'; +import { + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_NFT_ID, + LENGTH_TOKEN_ID, + NFT_NOT_LOCKED, +} from '../../../../../src/modules/nft/constants'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing/in_memory_prefixed_state'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { EventQueue, VerifyStatus, createMethodContext } from '../../../../../src/state_machine'; +import { TokenMethod } from '../../../../../src'; +import { MethodContext } from '../../../../../src/state_machine/method_context'; +import { NFTStore } from '../../../../../src/modules/nft/stores/nft'; +import { UserStore } from '../../../../../src/modules/nft/stores/user'; +import * as Token from '../../../../../src/modules/token/stores/user'; +import { NFTMethod } from '../../../../../src/modules/nft/method'; +import { InteroperabilityMethod } from '../../../../../src/modules/nft/types'; +import { createTransactionContext } from '../../../../../src/testing'; +import { InternalMethod } from '../../../../../src/modules/nft/internal_method'; +import { + TransferCrossChainEvent, + TransferCrossChainEventData, +} from '../../../../../src/modules/nft/events/transfer_cross_chain'; + +describe('TransferCrossChainComand', () => { + const module = new NFTModule(); + module.stores.register( + Token.UserStore, + new Token.UserStore(module.name, module.stores.keys.length + 1), + ); + + const command = new TransferCrossChainCommand(module.stores, module.events); + const nftMethod = new NFTMethod(module.stores, module.events); + const tokenMethod = new TokenMethod(module.stores, module.events, module.name); + const internalMethod = new InternalMethod(module.stores, module.events); + let interoperabilityMethod!: InteroperabilityMethod; + + const senderPublicKey = utils.getRandomBytes(32); + const owner = address.getAddressFromPublicKey(senderPublicKey); + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const messageFeeTokenID = utils.getRandomBytes(LENGTH_TOKEN_ID); + const availableBalance = BigInt(1000000); + + const nftStore = module.stores.get(NFTStore); + const userStore = module.stores.get(UserStore); + const tokenUserStore = module.stores.get(Token.UserStore); + + let stateStore!: PrefixedStateReadWriter; + let methodContext!: MethodContext; + + let existingNFT: { nftID: any; owner: any }; + let lockedExistingNFT: { nftID: any; owner: any }; + let escrowedNFT: { nftID: any; owner: any }; + + const validParams: Params = { + nftID: Buffer.alloc(LENGTH_NFT_ID), + receivingChainID, + recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS), + data: '', + messageFee: BigInt(100000), + messageFeeTokenID, + includeAttributes: false, + }; + + const checkEventResult = ( + eventQueue: EventQueue, + length: number, + EventClass: any, + index: number, + expectedResult: EventDataType, + result: any = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + + const eventData = codec.decode>( + new EventClass('nft').schema, + eventQueue.getEvents()[index].toObject().data, + ); + + expect(eventData).toEqual({ ...expectedResult, result }); + }; + + const createTransactionContextWithOverridingParams = ( + params: Record, + txParams: Record = {}, + ) => + createTransactionContext({ + chainID: ownChainID, + stateStore, + transaction: new Transaction({ + module: module.name, + command: 'transfer', + fee: BigInt(5000000), + nonce: BigInt(0), + senderPublicKey, + params: codec.encode(crossChainTransferParamsSchema, { + ...validParams, + ...params, + }), + signatures: [utils.getRandomBytes(64)], + ...txParams, + }), + }); + + beforeEach(async () => { + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + + methodContext = createMethodContext({ + stateStore, + eventQueue: new EventQueue(0), + contextStore: new Map(), + }); + + interoperabilityMethod = { + send: jest.fn().mockResolvedValue(Promise.resolve()), + error: jest.fn().mockResolvedValue(Promise.resolve()), + terminateChain: jest.fn().mockResolvedValue(Promise.resolve()), + getMessageFeeTokenID: jest.fn().mockResolvedValue(Promise.resolve(messageFeeTokenID)), + }; + + internalMethod.init({ + ownChainID, + }); + + internalMethod.addDependencies(nftMethod, interoperabilityMethod); + + command.init({ nftMethod, tokenMethod, interoperabilityMethod, internalMethod }); + + existingNFT = { + owner, + nftID: Buffer.concat([ownChainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]), + }; + + lockedExistingNFT = { + owner, + nftID: Buffer.concat([ownChainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]), + }; + + escrowedNFT = { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + await nftStore.save(methodContext, existingNFT.nftID, { + owner: existingNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await module.stores.get(NFTStore).save(methodContext, lockedExistingNFT.nftID, { + owner: lockedExistingNFT.owner, + attributesArray: [], + }); + + await userStore.set( + methodContext, + userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), + { + lockingModule: 'token', + }, + ); + + await module.stores.get(NFTStore).save(methodContext, escrowedNFT.nftID, { + owner: escrowedNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(escrowedNFT.owner, escrowedNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await tokenUserStore.set(methodContext, tokenUserStore.getKey(owner, messageFeeTokenID), { + availableBalance, + lockedBalances: [], + }); + }); + + describe('verify', () => { + it('should fail if NFT does not have valid length', async () => { + const nftMinLengthContext = createTransactionContextWithOverridingParams({ + nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), + }); + + const nftMaxLengthContext = createTransactionContextWithOverridingParams({ + nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), + }); + + await expect( + command.verify( + nftMinLengthContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ), + ).rejects.toThrow("'.nftID' minLength not satisfied"); + + await expect( + command.verify( + nftMaxLengthContext.createCommandExecuteContext(crossChainTransferParamsSchema), + ), + ).rejects.toThrow("'.nftID' maxLength exceeded"); + }); + + it('should fail if receivingChainID does not have valid length', async () => { + const receivingChainIDMinLengthContext = createTransactionContextWithOverridingParams({ + receivingChainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), + }); + + const receivingChainIDMaxLengthContext = createTransactionContextWithOverridingParams({ + receivingChainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1), + }); + + await expect( + command.verify( + receivingChainIDMinLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.receivingChainID' minLength not satisfied"); + + await expect( + command.verify( + receivingChainIDMaxLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.receivingChainID' maxLength exceeded"); + }); + + it('should fail if recipientAddress does not have valid length', async () => { + const recipientAddressMinLengthContext = createTransactionContextWithOverridingParams({ + recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS - 1), + }); + + const recipientAddressMaxLenghtContext = createTransactionContextWithOverridingParams({ + recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS + 1), + }); + + await expect( + command.verify( + recipientAddressMinLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.recipientAddress' address length invalid"); + + await expect( + command.verify( + recipientAddressMaxLenghtContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.recipientAddress' address length invalid"); + }); + + it('should fail if data has more than 64 characters', async () => { + const dataMaxLengthContext = createTransactionContextWithOverridingParams({ + data: '1'.repeat(65), + }); + + await expect( + command.verify( + dataMaxLengthContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ), + ).rejects.toThrow("'.data' must NOT have more than 64 characters"); + }); + + it('should fail if messageFeeTokenID does not have valid length', async () => { + const messageFeeTokenIDMinLengthContext = createTransactionContextWithOverridingParams({ + messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID - 1), + }); + + const messageFeeTokenIDMaxLengthContext = createTransactionContextWithOverridingParams({ + messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID + 1), + }); + + await expect( + command.verify( + messageFeeTokenIDMinLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.messageFeeTokenID' minLength not satisfied"); + + await expect( + command.verify( + messageFeeTokenIDMaxLengthContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ), + ).rejects.toThrow("'.messageFeeTokenID' maxLength exceeded"); + }); + + it('should fail if NFT does not exist', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('NFT substore entry does not exist'); + }); + + it('should fail if NFT is escrowed', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: escrowedNFT.nftID, + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('NFT is escrowed to another chain'); + }); + + it('should fail if NFT is not native to either the sending or receiving chain', async () => { + const nftID = utils.getRandomBytes(LENGTH_ADDRESS); + + const context = createTransactionContextWithOverridingParams({ + nftID, + }); + + await nftStore.save(methodContext, nftID, { + owner, + attributesArray: [], + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow(''); + }); + + it('should fail if messageFeeTokenID for receiving chain differs from the messageFeeTokenID of parameters', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: existingNFT.nftID, + messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID), + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('Mismatching message fee Token ID'); + }); + + it('should fail if the owner of the NFT is not the sender', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: existingNFT.nftID, + }); + + const nft = await nftStore.get(methodContext, existingNFT.nftID); + nft.owner = utils.getRandomBytes(LENGTH_ADDRESS); + await nftStore.save(methodContext, existingNFT.nftID, nft); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('Transfer not initiated by the NFT owner'); + }); + + it('should fail if NFT is locked', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: lockedExistingNFT.nftID, + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('Locked NFTs cannot be transferred'); + }); + + it('should fail if senders has insufficient balance of value messageFee and token messageFeeTokenID', async () => { + const context = createTransactionContextWithOverridingParams({ + messageFeeTokenID, + messageFee: availableBalance + BigInt(1), + nftID: existingNFT.nftID, + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).rejects.toThrow('Insufficient balance for the message fee'); + }); + + it('should verify if NFT is native', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: existingNFT.nftID, + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).resolves.toEqual({ status: VerifyStatus.OK }); + }); + + it('should verify if NFT is native to receiving chain', async () => { + const nftID = Buffer.concat([ + receivingChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + await nftStore.save(methodContext, nftID, { + owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(owner, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + const context = createTransactionContextWithOverridingParams({ + nftID, + }); + + await expect( + command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), + ).resolves.toEqual({ status: VerifyStatus.OK }); + }); + }); + + describe('execute', () => { + it('should transfer NFT and emit TransferCrossChainEvent', async () => { + const context = createTransactionContextWithOverridingParams({ + nftID: existingNFT.nftID, + }); + + await expect( + command.execute(context.createCommandExecuteContext(crossChainTransferParamsSchema)), + ).resolves.toBeUndefined(); + + checkEventResult( + context.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress: owner, + recipientAddress: validParams.recipientAddress, + receivingChainID: validParams.receivingChainID, + nftID: existingNFT.nftID, + includeAttributes: validParams.includeAttributes, + }, + ); + }); + }); +}); diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 270a29d3786..31dcd1a99c4 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -26,6 +26,7 @@ import { LENGTH_NFT_ID, MODULE_NAME_NFT, NFT_NOT_LOCKED, + LENGTH_TOKEN_ID, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { MethodContext } from '../../../../src/state_machine/method_context'; @@ -195,6 +196,9 @@ describe('InternalMethod', () => { send: jest.fn().mockResolvedValue(Promise.resolve()), error: jest.fn().mockResolvedValue(Promise.resolve()), terminateChain: jest.fn().mockRejectedValue(Promise.resolve()), + getMessageFeeTokenID: jest + .fn() + .mockResolvedValue(Promise.resolve(utils.getRandomBytes(LENGTH_TOKEN_ID))), }; internalMethod.addDependencies(method, interoperabilityMethod); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index cea60f266f4..d43fba2e223 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -519,6 +519,7 @@ describe('NFTMethod', () => { send: jest.fn(), error: jest.fn(), terminateChain: jest.fn(), + getMessageFeeTokenID: jest.fn(), }; const feeMethod = { payFee: jest.fn() }; const attributesArray1 = [ From be7e6fbe297a04c27dc199a140df04dc35a67916 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 9 Jun 2023 16:43:21 +0200 Subject: [PATCH 055/170] Lock and Unlock method for NFT (#8561) * :label: Updates NftErrorEventResult * :seedling: LockEvent.error * :seedling: NFTMethod.lock & NFTMethod.unlock * :bug: Fixes NFTMethod.unlock * :bug: Fixes NFTMethod.unlock to not log event if NFT is escrowed --- framework/src/modules/nft/events/lock.ts | 5 + framework/src/modules/nft/method.ts | 125 ++++++++ .../test/unit/modules/nft/method.spec.ts | 275 ++++++++++++++---- 3 files changed, 352 insertions(+), 53 deletions(-) diff --git a/framework/src/modules/nft/events/lock.ts b/framework/src/modules/nft/events/lock.ts index 9820836158f..b52ba2de613 100644 --- a/framework/src/modules/nft/events/lock.ts +++ b/framework/src/modules/nft/events/lock.ts @@ -17,6 +17,7 @@ import { LENGTH_NFT_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, + NftErrorEventResult, NftEventResult, } from '../constants'; @@ -58,4 +59,8 @@ export class LockEvent extends BaseEvent { + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(methodContext, nftID); + + if (!nftExists) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this.getNFTOwner(methodContext, nftID); + + if (owner.length === LENGTH_CHAIN_ID) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + + throw new Error('NFT is escrowed to another chain'); + } + + const userStore = this.stores.get(UserStore); + const userKey = userStore.getKey(owner, nftID); + const userData = await userStore.get(methodContext, userKey); + + if (userData.lockingModule !== NFT_NOT_LOCKED) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + + throw new Error('NFT is already locked'); + } + + userData.lockingModule = module; + + await userStore.set(methodContext, userKey, userData); + + this.events.get(LockEvent).log(methodContext, { + module, + nftID, + }); + } + + public async unlock(methodContext: MethodContext, module: string, nftID: Buffer): Promise { + const nftStore = this.stores.get(NFTStore); + + const nftExists = await nftStore.has(methodContext, nftID); + + if (!nftExists) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + + throw new Error('NFT substore entry does not exist'); + } + + const nftData = await nftStore.get(methodContext, nftID); + + if (nftData.owner.length === LENGTH_CHAIN_ID) { + throw new Error('NFT is escrowed to another chain'); + } + + const userStore = this.stores.get(UserStore); + const userKey = userStore.getKey(nftData.owner, nftID); + const userData = await userStore.get(methodContext, userKey); + + if (userData.lockingModule === NFT_NOT_LOCKED) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_NOT_LOCKED, + ); + + throw new Error('NFT is not locked'); + } + + if (userData.lockingModule !== module) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_UNAUTHORIZED_UNLOCK, + ); + + throw new Error('Unlocking NFT via module that did not lock it'); + } + + userData.lockingModule = NFT_NOT_LOCKED; + + await userStore.set(methodContext, userKey, userData); + + this.events.get(LockEvent).log(methodContext, { + module, + nftID, + }); + } } diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index d43fba2e223..1f14775963f 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -35,6 +35,7 @@ import { UserStore } from '../../../../src/modules/nft/stores/user'; import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; import { CreateEvent } from '../../../../src/modules/nft/events/create'; +import { LockEvent, LockEventData } from '../../../../src/modules/nft/events/lock'; describe('NFTMethod', () => { const module = new NFTModule(); @@ -67,7 +68,11 @@ describe('NFTMethod', () => { expect(eventData).toEqual({ ...expectedResult, result }); }; - beforeEach(() => { + let existingNFT: { nftID: any; owner: any }; + let lockedExistingNFT: { nftID: any; owner: any; lockingModule: string }; + let escrowedNFT: { nftID: any; owner: any }; + + beforeEach(async () => { owner = utils.getRandomBytes(LENGTH_ADDRESS); methodContext = createMethodContext({ @@ -75,6 +80,53 @@ describe('NFTMethod', () => { eventQueue: new EventQueue(0), contextStore: new Map(), }); + + existingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + lockedExistingNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + lockingModule: 'token', + }; + + escrowedNFT = { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }; + + await nftStore.save(methodContext, existingNFT.nftID, { + owner: existingNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + + await nftStore.save(methodContext, lockedExistingNFT.nftID, { + owner: lockedExistingNFT.owner, + attributesArray: [], + }); + + await userStore.set( + methodContext, + userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), + { + lockingModule: lockedExistingNFT.lockingModule, + }, + ); + + await nftStore.save(methodContext, escrowedNFT.nftID, { + owner: escrowedNFT.owner, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(escrowedNFT.owner, escrowedNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); }); describe('getChainID', () => { @@ -143,58 +195,6 @@ describe('NFTMethod', () => { }); describe('destroy', () => { - let existingNFT: { nftID: any; owner: any }; - let lockedExistingNFT: { nftID: any; owner: any }; - let escrowedNFT: { nftID: any; owner: any }; - - beforeEach(async () => { - existingNFT = { - owner: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - }; - - lockedExistingNFT = { - owner: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - }; - - escrowedNFT = { - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - }; - - await nftStore.save(methodContext, existingNFT.nftID, { - owner: existingNFT.owner, - attributesArray: [], - }); - - await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { - lockingModule: NFT_NOT_LOCKED, - }); - - await nftStore.save(methodContext, lockedExistingNFT.nftID, { - owner: lockedExistingNFT.owner, - attributesArray: [], - }); - - await userStore.set( - methodContext, - userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), - { - lockingModule: 'token', - }, - ); - - await nftStore.save(methodContext, escrowedNFT.nftID, { - owner: escrowedNFT.owner, - attributesArray: [], - }); - - await userStore.set(methodContext, userStore.getKey(escrowedNFT.owner, escrowedNFT.nftID), { - lockingModule: NFT_NOT_LOCKED, - }); - }); - it('should fail and emit Destroy event if NFT does not exist', async () => { const address = utils.getRandomBytes(LENGTH_ADDRESS); @@ -591,4 +591,173 @@ describe('NFTMethod', () => { }); }); }); + + describe('lock', () => { + it('should throw and log LockEvent if NFT does not exist', async () => { + await expect(method.lock(methodContext, module.name, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should throw and log LockEvent if NFT is escrowed', async () => { + await expect(method.lock(methodContext, module.name, escrowedNFT.nftID)).rejects.toThrow( + 'NFT is escrowed to another chain', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID: escrowedNFT.nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + }); + + it('should throw and log LockEvent if NFT is locked', async () => { + await expect( + method.lock(methodContext, module.name, lockedExistingNFT.nftID), + ).rejects.toThrow('NFT is already locked'); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + }); + + it('should update the locking module and log LockEvent', async () => { + const expectedLockingModule = 'lockingModule'; + await expect( + method.lock(methodContext, expectedLockingModule, existingNFT.nftID), + ).resolves.toBeUndefined(); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: expectedLockingModule, + nftID: existingNFT.nftID, + }, + NftEventResult.RESULT_SUCCESSFUL, + ); + + const { lockingModule } = await userStore.get( + methodContext, + userStore.getKey(existingNFT.owner, existingNFT.nftID), + ); + + expect(lockingModule).toEqual(expectedLockingModule); + }); + }); + + describe('unlock', () => { + it('should throw and log LockEvent if NFT does not exist', async () => { + await expect(method.unlock(methodContext, module.name, nftID)).rejects.toThrow( + 'NFT substore entry does not exist', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should throw if NFT is escrowed', async () => { + await expect(method.unlock(methodContext, module.name, escrowedNFT.nftID)).rejects.toThrow( + 'NFT is escrowed to another chain', + ); + }); + + it('should throw and log LockEvent if NFT is not locked', async () => { + await expect(method.unlock(methodContext, module.name, existingNFT.nftID)).rejects.toThrow( + 'NFT is not locked', + ); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID: existingNFT.nftID, + }, + NftEventResult.RESULT_NFT_NOT_LOCKED, + ); + }); + + it('should throw and log LockEvent if unlocking module is not the locking module', async () => { + await expect( + method.unlock(methodContext, module.name, lockedExistingNFT.nftID), + ).rejects.toThrow('Unlocking NFT via module that did not lock it'); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: module.name, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_UNAUTHORIZED_UNLOCK, + ); + }); + + it('should unlock and log LockEvent', async () => { + await expect( + method.unlock(methodContext, lockedExistingNFT.lockingModule, lockedExistingNFT.nftID), + ).resolves.toBeUndefined(); + + checkEventResult( + methodContext.eventQueue, + 1, + LockEvent, + 0, + { + module: lockedExistingNFT.lockingModule, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_SUCCESSFUL, + ); + + const { lockingModule } = await userStore.get( + methodContext, + userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), + ); + + expect(lockingModule).toEqual(NFT_NOT_LOCKED); + }); + }); }); From ec7d0feee9b9e1ce943e895dd1e35d20edadff1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Mon, 12 Jun 2023 03:23:13 +0200 Subject: [PATCH 056/170] Add unit test for Random module bitwiseXOR function (#8569) Add unit tests for bitwiseXOR() --- .../modules/random/bitwise_xor_fixtures.ts | 52 +++++++++++++++++++ .../test/unit/modules/random/utils.spec.ts | 37 +++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 framework/test/unit/modules/random/bitwise_xor_fixtures.ts create mode 100644 framework/test/unit/modules/random/utils.spec.ts diff --git a/framework/test/unit/modules/random/bitwise_xor_fixtures.ts b/framework/test/unit/modules/random/bitwise_xor_fixtures.ts new file mode 100644 index 00000000000..67cb2bafac7 --- /dev/null +++ b/framework/test/unit/modules/random/bitwise_xor_fixtures.ts @@ -0,0 +1,52 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export const bitwiseXORFixtures = [ + { + input: [Buffer.from([0, 0, 0, 0]), Buffer.from([0, 0, 0, 0])], + output: Buffer.from([0, 0, 0, 0]), + }, + { + input: [Buffer.from([1, 1, 1, 1]), Buffer.from([1, 1, 1, 1])], + output: Buffer.from([0, 0, 0, 0]), + }, + { + input: [Buffer.from([0, 1, 0, 0]), Buffer.from([0, 0, 1, 0])], + output: Buffer.from([0, 1, 1, 0]), + }, + { + input: [Buffer.from([0, 0, 1, 1]), Buffer.from([1, 1, 0, 0])], + output: Buffer.from([1, 1, 1, 1]), + }, + { + input: [ + Buffer.from([0, 0, 1, 1]), + Buffer.from([1, 1, 0, 0]), + Buffer.from([1, 1, 1, 0]), + Buffer.from([1, 0, 0, 0]), + ], + output: Buffer.from([1, 0, 0, 1]), + }, + { + input: [ + Buffer.from([1, 0, 1, 1]), + Buffer.from([0, 1, 1, 0]), + Buffer.from([1, 1, 1, 0]), + Buffer.from([0, 0, 0, 0]), + Buffer.from([1, 1, 1, 0]), + Buffer.from([1, 1, 0, 1]), + ], + output: Buffer.from([0, 0, 0, 0]), + }, +]; diff --git a/framework/test/unit/modules/random/utils.spec.ts b/framework/test/unit/modules/random/utils.spec.ts new file mode 100644 index 00000000000..75a638c128f --- /dev/null +++ b/framework/test/unit/modules/random/utils.spec.ts @@ -0,0 +1,37 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { bitwiseXOR } from '../../../../src/modules/random/utils'; +import { bitwiseXORFixtures } from './bitwise_xor_fixtures'; + +describe('Random module utils', () => { + describe('bitwiseXOR', () => { + it('should return the first element if there are no other elements', () => { + const buffer = Buffer.from([0, 1, 1, 1]); + const input = [buffer]; + + expect(bitwiseXOR(input)).toEqual(buffer); + }); + + it.each(bitwiseXORFixtures)('should return correct XOR value', ({ input, output }) => { + expect(bitwiseXOR(input)).toEqual(output); + }); + + it('should throw if input elements have different length', () => { + const input = [Buffer.from([0, 1, 1, 1]), Buffer.from([0, 0, 0, 1, 0])]; + + expect(() => bitwiseXOR(input)).toThrow('All input for XOR should be same size'); + }); + }); +}); From 4a2b9294d03e37433dedb8eeec19dfc41d0ddcdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Tue, 13 Jun 2023 06:09:39 +0200 Subject: [PATCH 057/170] Remove limit check for `numberOfSeeds` in `getRandomSeed()` (#8575) --- framework/src/modules/random/utils.ts | 3 --- framework/test/unit/modules/random/method.spec.ts | 9 --------- 2 files changed, 12 deletions(-) diff --git a/framework/src/modules/random/utils.ts b/framework/src/modules/random/utils.ts index 4d9bae67b41..29dea1193a2 100644 --- a/framework/src/modules/random/utils.ts +++ b/framework/src/modules/random/utils.ts @@ -73,9 +73,6 @@ export const getRandomSeed = ( if (height < 0 || numberOfSeeds < 0) { throw new Error('Height or number of seeds cannot be negative.'); } - if (numberOfSeeds > 1000) { - throw new Error('Number of seeds cannot be greater than 1000.'); - } const initRandomBuffer = utils.intToBuffer(height + numberOfSeeds, 4); let randomSeed = cryptography.utils.hash(initRandomBuffer).slice(0, 16); diff --git a/framework/test/unit/modules/random/method.spec.ts b/framework/test/unit/modules/random/method.spec.ts index 49e8747bcac..dad2560b7f3 100644 --- a/framework/test/unit/modules/random/method.spec.ts +++ b/framework/test/unit/modules/random/method.spec.ts @@ -292,15 +292,6 @@ describe('RandomModuleMethod', () => { ); }); - it('should throw error when numberOfSeeds is greater than 1000', async () => { - const height = 11; - const numberOfSeeds = 1001; - - await expect(randomMethod.getRandomBytes(context, height, numberOfSeeds)).rejects.toThrow( - 'Number of seeds cannot be greater than 1000.', - ); - }); - it('should throw error when height is non integer input', async () => { const height = 5.1; const numberOfSeeds = 2; From 13bde5c7677bb0be861e860a7db65b5dbb09f7ff Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Tue, 13 Jun 2023 16:29:42 +0200 Subject: [PATCH 058/170] SupportNFTs Methods (#8577) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :recycle: :white_check_mark: setup code for NFTMethods * :seedling: SupportedNFTsStore.getAll * Updates EventQueue.add to allow empty topics * :seedling: Adds NFTMethod.supportAllNFTs & NFTMethod.removeSupportAllNFTs * :seedling: Adds NFTMethod.supportAllNFTsFromChain & NFTMethod.removeSupportAllNFTsFromChain * :seedling: Adds NFTMethod.supportAllNFTsFromCollection & NFTMethod.removeSupportAllNFTsFromCollection * :pencil2: * :bug: Fixes NFTMethod.removeSupportAllNFTsFromChain to throw AllNFTsFromChainSupportRemovedEvent * :bug: Fixes NFTMethod.supportAllNFTs to return if all are already supported * :recycle: Updates test description * :white_check_mark: Updates tests for NFTMethod.supportAllNFTsFromCollection * :recycle: Updates NFTMethod.removeSupportAllNFTsFromCollection * Updates test description. Co-authored-by: Miroslav Jerković Updates test description. Co-authored-by: Miroslav Jerković Updates test description. Co-authored-by: Miroslav Jerković * :art: --- framework/src/modules/nft/method.ts | 212 +++++++ framework/src/modules/nft/module.ts | 5 + .../src/modules/nft/stores/supported_nfts.ts | 13 +- framework/src/state_machine/event_queue.ts | 4 +- .../test/unit/modules/nft/method.spec.ts | 540 ++++++++++++++++-- .../modules/nft/stores/supported_nfts.spec.ts | 23 +- .../unit/state_machine/event_queue.spec.ts | 11 - 7 files changed, 741 insertions(+), 67 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index fe20b86cef4..231032309c1 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -29,6 +29,12 @@ import { DestroyEvent } from './events/destroy'; import { SupportedNFTsStore } from './stores/supported_nfts'; import { CreateEvent } from './events/create'; import { LockEvent } from './events/lock'; +import { AllNFTsSupportedEvent } from './events/all_nfts_supported'; +import { AllNFTsSupportRemovedEvent } from './events/all_nfts_support_removed'; +import { AllNFTsFromChainSupportedEvent } from './events/all_nfts_from_chain_suported'; +import { AllNFTsFromCollectionSupportedEvent } from './events/all_nfts_from_collection_suppported'; +import { AllNFTsFromCollectionSupportRemovedEvent } from './events/all_nfts_from_collection_support_removed'; +import { AllNFTsFromChainSupportRemovedEvent } from './events/all_nfts_from_chain_support_removed'; export class NFTMethod extends BaseMethod { private _config!: ModuleConfig; @@ -420,4 +426,210 @@ export class NFTMethod extends BaseMethod { nftID, }); } + + public async supportAllNFTs(methodContext: MethodContext): Promise { + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const alreadySupported = await supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY); + + if (alreadySupported) { + return; + } + + const allSupportedNFTs = await supportedNFTsStore.getAll(methodContext); + + for (const { key } of allSupportedNFTs) { + await supportedNFTsStore.del(methodContext, key); + } + + await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + this.events.get(AllNFTsSupportedEvent).log(methodContext); + } + + public async removeSupportAllNFTs(methodContext: MethodContext): Promise { + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const allSupportedNFTs = await supportedNFTsStore.getAll(methodContext); + + for (const { key } of allSupportedNFTs) { + await supportedNFTsStore.del(methodContext, key); + } + + this.events.get(AllNFTsSupportRemovedEvent).log(methodContext); + } + + public async supportAllNFTsFromChain( + methodContext: MethodContext, + chainID: Buffer, + ): Promise { + if (chainID.equals(this._config.ownChainID)) { + return; + } + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + const allNFTsSuppported = await supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY); + + if (allNFTsSuppported) { + return; + } + + const chainSupportExists = await supportedNFTsStore.has(methodContext, chainID); + + if (chainSupportExists) { + const supportedCollections = await supportedNFTsStore.get(methodContext, chainID); + + if (supportedCollections.supportedCollectionIDArray.length === 0) { + return; + } + } + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + this.events.get(AllNFTsFromChainSupportedEvent).log(methodContext, chainID); + } + + public async removeSupportAllNFTsFromChain( + methodContext: MethodContext, + chainID: Buffer, + ): Promise { + if (chainID.equals(this._config.ownChainID)) { + throw new Error('Support for native NFTs cannot be removed'); + } + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const allNFTsSupported = await supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY); + + if (allNFTsSupported) { + throw new Error('All NFTs from all chains are supported'); + } + + const isChainSupported = await supportedNFTsStore.has(methodContext, chainID); + + if (!isChainSupported) { + return; + } + + await supportedNFTsStore.del(methodContext, chainID); + + this.events.get(AllNFTsFromChainSupportRemovedEvent).log(methodContext, chainID); + } + + public async supportAllNFTsFromCollection( + methodContext: MethodContext, + chainID: Buffer, + collectionID: Buffer, + ): Promise { + if (chainID.equals(this._config.ownChainID)) { + return; + } + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + const allNFTsSupported = await supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY); + + if (allNFTsSupported) { + return; + } + + const isChainSupported = await supportedNFTsStore.has(methodContext, chainID); + + let supportedChainData; + if (isChainSupported) { + supportedChainData = await supportedNFTsStore.get(methodContext, chainID); + + if (supportedChainData.supportedCollectionIDArray.length === 0) { + return; + } + + if ( + supportedChainData.supportedCollectionIDArray.some(collection => + collection.collectionID.equals(collectionID), + ) + ) { + return; + } + + supportedChainData.supportedCollectionIDArray.push({ collectionID }); + + await supportedNFTsStore.save(methodContext, chainID, supportedChainData); + + this.events.get(AllNFTsFromCollectionSupportedEvent).log(methodContext, { + chainID, + collectionID, + }); + + return; + } + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID, + }, + ], + }); + + this.events.get(AllNFTsFromCollectionSupportedEvent).log(methodContext, { + chainID, + collectionID, + }); + } + + public async removeSupportAllNFTsFromCollection( + methodContext: MethodContext, + chainID: Buffer, + collectionID: Buffer, + ): Promise { + if (chainID.equals(this._config.ownChainID)) { + return; + } + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const allNFTsSupported = await supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY); + + if (allNFTsSupported) { + throw new Error('All NFTs from all chains are supported'); + } + + const isChainSupported = await supportedNFTsStore.has(methodContext, chainID); + + if (!isChainSupported) { + return; + } + const supportedChainData = await supportedNFTsStore.get(methodContext, chainID); + + if (supportedChainData.supportedCollectionIDArray.length === 0) { + throw new Error('All NFTs from the specified chain are supported'); + } + + if ( + supportedChainData.supportedCollectionIDArray.some(supportedCollection => + supportedCollection.collectionID.equals(collectionID), + ) + ) { + supportedChainData.supportedCollectionIDArray = + supportedChainData.supportedCollectionIDArray.filter( + supportedCollection => !supportedCollection.collectionID.equals(collectionID), + ); + } + + if (supportedChainData.supportedCollectionIDArray.length === 0) { + await supportedNFTsStore.del(methodContext, chainID); + } else { + await supportedNFTsStore.save(methodContext, chainID, { + ...supportedChainData, + }); + } + + this.events.get(AllNFTsFromCollectionSupportRemovedEvent).log(methodContext, { + chainID, + collectionID, + }); + } } diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 5518b54092d..8d0811f23ec 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -19,6 +19,7 @@ import { InteroperabilityMethod } from '../token/types'; import { NFTInteroperableMethod } from './cc_method'; import { NFTEndpoint } from './endpoint'; import { AllNFTsFromChainSupportedEvent } from './events/all_nfts_from_chain_suported'; +import { AllNFTsFromChainSupportRemovedEvent } from './events/all_nfts_from_chain_support_removed'; import { AllNFTsFromCollectionSupportRemovedEvent } from './events/all_nfts_from_collection_support_removed'; import { AllNFTsFromCollectionSupportedEvent } from './events/all_nfts_from_collection_suppported'; import { AllNFTsSupportRemovedEvent } from './events/all_nfts_support_removed'; @@ -70,6 +71,10 @@ export class NFTModule extends BaseInteroperableModule { AllNFTsFromChainSupportedEvent, new AllNFTsFromChainSupportedEvent(this.name), ); + this.events.register( + AllNFTsFromChainSupportRemovedEvent, + new AllNFTsFromChainSupportRemovedEvent(this.name), + ); this.events.register( AllNFTsFromCollectionSupportedEvent, new AllNFTsFromCollectionSupportedEvent(this.name), diff --git a/framework/src/modules/nft/stores/supported_nfts.ts b/framework/src/modules/nft/stores/supported_nfts.ts index e16dcb0838e..63668534d31 100644 --- a/framework/src/modules/nft/stores/supported_nfts.ts +++ b/framework/src/modules/nft/stores/supported_nfts.ts @@ -12,8 +12,8 @@ * Removal or modification of this copyright notice is prohibited. */ -import { BaseStore, StoreGetter } from '../../base_store'; -import { LENGTH_COLLECTION_ID } from '../constants'; +import { BaseStore, ImmutableStoreGetter, StoreGetter } from '../../base_store'; +import { LENGTH_COLLECTION_ID, LENGTH_CHAIN_ID } from '../constants'; export interface SupportedNFTsStoreData { supportedCollectionIDArray: { @@ -59,4 +59,13 @@ export class SupportedNFTsStore extends BaseStore { await this.set(context, chainID, { supportedCollectionIDArray }); } + + public async getAll( + context: ImmutableStoreGetter, + ): Promise<{ key: Buffer; value: SupportedNFTsStoreData }[]> { + return this.iterate(context, { + gte: Buffer.alloc(LENGTH_CHAIN_ID, 0), + lte: Buffer.alloc(LENGTH_CHAIN_ID, 255), + }); + } } diff --git a/framework/src/state_machine/event_queue.ts b/framework/src/state_machine/event_queue.ts index 25954575558..a498fb07576 100644 --- a/framework/src/state_machine/event_queue.ts +++ b/framework/src/state_machine/event_queue.ts @@ -43,9 +43,7 @@ export class EventQueue { `Max size of event data is ${EVENT_MAX_EVENT_SIZE_BYTES} but received ${data.length}`, ); } - if (!allTopics.length) { - throw new Error('Topics must have at least one element.'); - } + if (allTopics.length > EVENT_MAX_TOPICS_PER_EVENT) { throw new Error( `Max topics per event is ${EVENT_MAX_TOPICS_PER_EVENT} but received ${allTopics.length}`, diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 1f14775963f..f80796d12d5 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -36,15 +36,36 @@ import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/even import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; import { CreateEvent } from '../../../../src/modules/nft/events/create'; import { LockEvent, LockEventData } from '../../../../src/modules/nft/events/lock'; +import { AllNFTsSupportedEvent } from '../../../../src/modules/nft/events/all_nfts_supported'; +import { AllNFTsSupportRemovedEvent } from '../../../../src/modules/nft/events/all_nfts_support_removed'; +import { + AllNFTsFromChainSupportedEvent, + AllNFTsFromChainSupportedEventData, +} from '../../../../src/modules/nft/events/all_nfts_from_chain_suported'; +import { + AllNFTsFromCollectionSupportRemovedEvent, + AllNFTsFromCollectionSupportRemovedEventData, +} from '../../../../src/modules/nft/events/all_nfts_from_collection_support_removed'; +import { + AllNFTsFromCollectionSupportedEvent, + AllNFTsFromCollectionSupportedEventData, +} from '../../../../src/modules/nft/events/all_nfts_from_collection_suppported'; +import { + AllNFTsFromChainSupportRemovedEvent, + AllNFTsFromChainSupportRemovedEventData, +} from '../../../../src/modules/nft/events/all_nfts_from_chain_support_removed'; describe('NFTMethod', () => { const module = new NFTModule(); const method = new NFTMethod(module.stores, module.events); + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + method.init({ ownChainID }); let methodContext!: MethodContext; const nftStore = module.stores.get(NFTStore); const userStore = module.stores.get(UserStore); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); const nftID = utils.getRandomBytes(LENGTH_NFT_ID); let owner: Buffer; @@ -65,10 +86,13 @@ describe('NFTMethod', () => { eventQueue.getEvents()[index].toObject().data, ); - expect(eventData).toEqual({ ...expectedResult, result }); + if (result !== null) { + expect(eventData).toEqual({ ...expectedResult, result }); + } }; let existingNFT: { nftID: any; owner: any }; + let existingNativeNFT: { nftID: any; owner: any }; let lockedExistingNFT: { nftID: any; owner: any; lockingModule: string }; let escrowedNFT: { nftID: any; owner: any }; @@ -86,6 +110,11 @@ describe('NFTMethod', () => { nftID: utils.getRandomBytes(LENGTH_NFT_ID), }; + existingNativeNFT = { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: Buffer.concat([ownChainID, Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]), + }; + lockedExistingNFT = { owner: utils.getRandomBytes(LENGTH_ADDRESS), nftID: utils.getRandomBytes(LENGTH_NFT_ID), @@ -102,6 +131,11 @@ describe('NFTMethod', () => { attributesArray: [], }); + await nftStore.save(methodContext, existingNativeNFT.nftID, { + owner: existingNativeNFT.owner, + attributesArray: [], + }); + await userStore.set(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), { lockingModule: NFT_NOT_LOCKED, }); @@ -322,27 +356,11 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id equals own chain id', async () => { - const ownChainID = nftID.slice(0, LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - - const isSupported = await method.isNFTSupported(methodContext, nftID); + const isSupported = await method.isNFTSupported(methodContext, existingNativeNFT.nftID); expect(isSupported).toBe(true); }); it('should return true if nft chain id does not equal own chain id but all nft keys are supported', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { supportedCollectionIDArray: [], }); @@ -352,14 +370,6 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array is empty', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [], }); @@ -369,14 +379,6 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array includes collection id for nft id', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID) }, @@ -389,14 +391,6 @@ describe('NFTMethod', () => { }); it('should return false if nft chain id does not equal own chain id and nft chain id is supported but corresponding supported collection id array does not include collection id for nft id', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, @@ -528,22 +522,17 @@ describe('NFTMethod', () => { ]; const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; const attributesArray3 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; - const config = { - ownChainID: Buffer.alloc(LENGTH_CHAIN_ID, 1), - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; + const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); const address = utils.getRandomBytes(LENGTH_ADDRESS); beforeEach(() => { method.addDependencies(interopMethod, feeMethod); - method.init(config); jest.spyOn(feeMethod, 'payFee'); }); it('should set data to stores with correct key and emit successfull create event when there is no entry in the nft substore', async () => { - const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('0')]); + const expectedKey = Buffer.concat([ownChainID, collectionID, Buffer.from('0')]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); @@ -572,7 +561,7 @@ describe('NFTMethod', () => { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: attributesArray2, }); - const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('2')]); + const expectedKey = Buffer.concat([ownChainID, collectionID, Buffer.from('2')]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); @@ -760,4 +749,455 @@ describe('NFTMethod', () => { expect(lockingModule).toEqual(NFT_NOT_LOCKED); }); }); + + describe('supportAllNFTs', () => { + it('should remove all existing entries, add ALL_SUPPORTED_NFTS_KEY entry and log AllNFTsSupportedEvent', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + await expect(method.supportAllNFTs(methodContext)).resolves.toBeUndefined(); + await expect( + supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY), + ).resolves.toBeTrue(); + + await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeFalse(); + + checkEventResult(methodContext.eventQueue, 1, AllNFTsSupportedEvent, 0, {}, null); + }); + + it('should not update SupportedNFTsStore if ALL_SUPPORTED_NFTS_KEY entry already exists', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect(method.supportAllNFTs(methodContext)).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + }); + + describe('removeSupportAllNFTs', () => { + it('should remove all existing entries and log AllNFTsSupportRemovedEvent', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, utils.getRandomBytes(LENGTH_CHAIN_ID), { + supportedCollectionIDArray: [], + }); + + await expect(method.removeSupportAllNFTs(methodContext)).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeFalse(); + + checkEventResult(methodContext.eventQueue, 1, AllNFTsSupportRemovedEvent, 0, {}, null); + }); + }); + + describe('supportAllNFTsFromChain', () => { + it('should not update SupportedNFTsStore if provided chainID is equal to ownChainID', async () => { + await expect( + method.supportAllNFTsFromChain(methodContext, ownChainID), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should not update SupportedNFTsStore if ALL_SUPPORTED_NFTS_KEY entry exists', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect( + method.supportAllNFTsFromChain(methodContext, utils.getRandomBytes(LENGTH_CHAIN_ID)), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should not update SupportedNFTStore if all collections of provided chainID are already supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + await expect(method.supportAllNFTsFromChain(methodContext, chainID)).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should update SupportedNFTStore if provided chainID does not exist', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await expect(method.supportAllNFTsFromChain(methodContext, chainID)).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.get(methodContext, chainID)).resolves.toEqual({ + supportedCollectionIDArray: [], + }); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromChainSupportedEvent, + 0, + { + chainID, + }, + null, + ); + }); + + it('should update SupportedNFTStore if provided chainID has supported collections', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ], + }); + + await expect(method.supportAllNFTsFromChain(methodContext, chainID)).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.get(methodContext, chainID)).resolves.toEqual({ + supportedCollectionIDArray: [], + }); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromChainSupportedEvent, + 0, + { + chainID, + }, + null, + ); + }); + }); + + describe('removeSupportAllNFTsFromChain', () => { + it('should throw if provided chainID is equal to ownChainID', async () => { + await expect(method.removeSupportAllNFTsFromChain(methodContext, ownChainID)).rejects.toThrow( + 'Support for native NFTs cannot be removed', + ); + }); + + it('should throw if all NFTs are supported', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect( + method.removeSupportAllNFTsFromChain(methodContext, utils.getRandomBytes(LENGTH_CHAIN_ID)), + ).rejects.toThrow('All NFTs from all chains are supported'); + }); + + it('should not update Supported NFTs store if provided chain does not exist', async () => { + await expect( + method.removeSupportAllNFTsFromChain(methodContext, utils.getRandomBytes(LENGTH_CHAIN_ID)), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should remove support for the provided chain and log AllNFTsFromChainSupportedEvent event', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + await expect( + method.removeSupportAllNFTsFromChain(methodContext, chainID), + ).resolves.toBeUndefined(); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromChainSupportRemovedEvent, + 0, + { + chainID, + }, + null, + ); + + await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeFalse(); + }); + }); + + describe('supportAllNFTsFromCollection', () => { + it('should not update SupportedNFTsStore if provided chainID is equal to ownChainID', async () => { + await expect( + method.supportAllNFTsFromCollection( + methodContext, + ownChainID, + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should not update SupportedNFTsStore if all NFTs are supported', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect( + method.supportAllNFTsFromCollection( + methodContext, + utils.getRandomBytes(LENGTH_CHAIN_ID), + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should not update SupportedNFTsStore if all collections of the provided chain are supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + await expect( + method.supportAllNFTsFromCollection( + methodContext, + chainID, + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should not update SupportedNFTsStore if the provided collection is already supported for the provided chain', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_COLLECTION_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID, + }, + ], + }); + + await expect( + method.supportAllNFTsFromCollection(methodContext, chainID, collectionID), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should add the collection to supported collections of the already supported chain lexicographically', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = Buffer.alloc(LENGTH_COLLECTION_ID, 0); + const alreadySupportedCollection = Buffer.alloc(LENGTH_COLLECTION_ID, 1); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID: alreadySupportedCollection, + }, + ], + }); + + await expect( + method.supportAllNFTsFromCollection(methodContext, chainID, collectionID), + ).resolves.toBeUndefined(); + + const expectedSupportedCollectionIDArray = [ + { + collectionID, + }, + { + collectionID: alreadySupportedCollection, + }, + ]; + + await expect(supportedNFTsStore.get(methodContext, chainID)).resolves.toEqual({ + supportedCollectionIDArray: expectedSupportedCollectionIDArray, + }); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromCollectionSupportedEvent, + 0, + { + chainID, + collectionID, + }, + null, + ); + }); + + it('should support the provided collection for the provided chain', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_COLLECTION_ID); + + await expect( + method.supportAllNFTsFromCollection(methodContext, chainID, collectionID), + ).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.get(methodContext, chainID)).resolves.toEqual({ + supportedCollectionIDArray: [{ collectionID }], + }); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromCollectionSupportedEvent, + 0, + { + chainID, + collectionID, + }, + null, + ); + }); + }); + + describe('removeSupportAllNFTsFromCollection', () => { + it('should not update SupportedNFTsStore if provided chainID is equal to ownChainID', async () => { + await expect( + method.removeSupportAllNFTsFromCollection( + methodContext, + ownChainID, + utils.getRandomBytes(LENGTH_CHAIN_ID), + ), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should throw if all NFTs are supported', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect( + method.removeSupportAllNFTsFromCollection( + methodContext, + utils.getRandomBytes(LENGTH_CHAIN_ID), + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).rejects.toThrow('All NFTs from all chains are supported'); + }); + + it('should throw if all NFTs for the specified chain are supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + await expect( + method.removeSupportAllNFTsFromCollection( + methodContext, + chainID, + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).rejects.toThrow('All NFTs from the specified chain are supported'); + }); + + it('should not update SupportedNFTsStore if collection is not already supported', async () => { + await expect( + method.removeSupportAllNFTsFromCollection( + methodContext, + utils.getRandomBytes(LENGTH_CHAIN_ID), + utils.getRandomBytes(LENGTH_COLLECTION_ID), + ), + ).resolves.toBeUndefined(); + + expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + }); + + it('should remove the support for provided collection and save the remaning supported collections lexicographically', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = Buffer.alloc(LENGTH_CHAIN_ID, 5); + + const supportedCollectionIDArray = [ + { + collectionID: Buffer.alloc(LENGTH_CHAIN_ID, 3), + }, + { + collectionID, + }, + { + collectionID: Buffer.alloc(LENGTH_CHAIN_ID, 7), + }, + ]; + + const expectedSupportedCollectionIDArray = [ + { + collectionID: Buffer.alloc(LENGTH_CHAIN_ID, 3), + }, + { + collectionID: Buffer.alloc(LENGTH_CHAIN_ID, 7), + }, + ]; + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray, + }); + + await expect( + method.removeSupportAllNFTsFromCollection(methodContext, chainID, collectionID), + ).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.get(methodContext, chainID)).resolves.toEqual({ + supportedCollectionIDArray: expectedSupportedCollectionIDArray, + }); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromCollectionSupportRemovedEvent, + 0, + { + collectionID, + chainID, + }, + null, + ); + }); + + it('should remove the entry for provided collection if the only supported collection is removed', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID, + }, + ], + }); + + await expect( + method.removeSupportAllNFTsFromCollection(methodContext, chainID, collectionID), + ).resolves.toBeUndefined(); + + await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeFalse(); + + checkEventResult( + methodContext.eventQueue, + 1, + AllNFTsFromCollectionSupportRemovedEvent, + 0, + { + collectionID, + chainID, + }, + null, + ); + }); + }); }); diff --git a/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts b/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts index 968cfa3bb42..054ad566eaa 100644 --- a/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts +++ b/framework/test/unit/modules/nft/stores/supported_nfts.spec.ts @@ -12,11 +12,12 @@ * Removal or modification of this copyright notice is prohibited. */ +import { utils } from '@liskhq/lisk-cryptography'; import { SupportedNFTsStore } from '../../../../../src/modules/nft/stores/supported_nfts'; import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; import { createStoreGetter } from '../../../../../src/testing/utils'; -import { LENGTH_COLLECTION_ID } from '../../../../../src/modules/nft/constants'; +import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID } from '../../../../../src/modules/nft/constants'; import { CHAIN_ID_LENGTH, StoreGetter } from '../../../../../src'; describe('NFTStore', () => { @@ -62,4 +63,24 @@ describe('NFTStore', () => { }); }); }); + + describe('getAll', () => { + it('should retrieve all NFTs with key between 0 and maximum value for Buffer of length LENGTH_CHAIN_ID', async () => { + await store.save(context, Buffer.alloc(LENGTH_CHAIN_ID, 0), { + supportedCollectionIDArray: [], + }); + + await store.save(context, Buffer.alloc(LENGTH_CHAIN_ID, 1), { + supportedCollectionIDArray: [], + }); + + await store.save(context, utils.getRandomBytes(LENGTH_CHAIN_ID), { + supportedCollectionIDArray: [], + }); + + const allSupportedNFTs = await store.getAll(context); + + expect([...allSupportedNFTs.keys()]).toHaveLength(3); + }); + }); }); diff --git a/framework/test/unit/state_machine/event_queue.spec.ts b/framework/test/unit/state_machine/event_queue.spec.ts index 0f83bcc61a4..7608c5faf9d 100644 --- a/framework/test/unit/state_machine/event_queue.spec.ts +++ b/framework/test/unit/state_machine/event_queue.spec.ts @@ -70,17 +70,6 @@ describe('EventQueue', () => { ).toThrow('Max size of event data is'); }); - it('should throw error if topics is empty', () => { - expect(() => - eventQueue.add( - 'token', - 'Token Event Name', - utils.getRandomBytes(EVENT_MAX_EVENT_SIZE_BYTES), - [], - ), - ).toThrow('Topics must have at least one element'); - }); - it('should throw error if topics length exceeds maxumum allowed', () => { expect(() => eventQueue.add( From a2005c8db8888c6bb66321cc2997034f2b2b6737 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Tue, 13 Jun 2023 22:59:21 +0200 Subject: [PATCH 059/170] Implement methods --- framework/src/modules/nft/constants.ts | 3 + framework/src/modules/nft/events/recover.ts | 6 +- .../src/modules/nft/events/set_attributes.ts | 6 +- .../nft/events/transfer_cross_chain.ts | 15 +- framework/src/modules/nft/internal_method.ts | 3 +- framework/src/modules/nft/method.ts | 211 +++++++++- framework/src/modules/nft/module.ts | 25 +- framework/src/modules/nft/schemas.ts | 8 +- framework/src/modules/nft/types.ts | 13 + .../nft/cc_comands/cc_transfer.spec.ts | 5 +- .../test/unit/modules/nft/method.spec.ts | 374 +++++++++++++++--- 11 files changed, 596 insertions(+), 73 deletions(-) diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index e14f1ded273..475de2ac82b 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -26,6 +26,7 @@ export const EMPTY_BYTES = Buffer.alloc(0); export const ALL_SUPPORTED_NFTS_KEY = EMPTY_BYTES; export const FEE_CREATE_NFT = 5000000; export const LENGTH_TOKEN_ID = 8; +export const MAX_LENGTH_DATA = 64; export const enum NftEventResult { RESULT_SUCCESSFUL = 0, @@ -42,6 +43,8 @@ export const enum NftEventResult { RESULT_RECOVER_FAIL_INVALID_INPUTS = 11, RESULT_INSUFFICIENT_BALANCE = 12, RESULT_DATA_TOO_LONG = 13, + INVALID_RECEIVING_CHAIN = 14, + RESULT_INVALID_ACCOUNT = 15, } export type NftErrorEventResult = Exclude; diff --git a/framework/src/modules/nft/events/recover.ts b/framework/src/modules/nft/events/recover.ts index 589e0585f12..3997f19e7ea 100644 --- a/framework/src/modules/nft/events/recover.ts +++ b/framework/src/modules/nft/events/recover.ts @@ -13,7 +13,7 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -import { LENGTH_NFT_ID, LENGTH_CHAIN_ID, NftEventResult } from '../constants'; +import { LENGTH_NFT_ID, LENGTH_CHAIN_ID, NftEventResult, NftErrorEventResult } from '../constants'; export interface RecoverEventData { terminatedChainID: Buffer; @@ -50,4 +50,8 @@ export class RecoverEvent extends BaseEvent { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + this.events.get(TransferEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this.getNFTOwner(methodContext, nftID); + if (owner.length === LENGTH_CHAIN_ID) { + this.events.get(TransferEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + throw new Error('NFT is escrowed to another chain'); + } + + if (!owner.equals(senderAddress)) { + this.events.get(TransferEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + throw new Error('Transfer not initiated by the NFT owner'); + } + + const userStore = this.stores.get(UserStore); + const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); + if (userData.lockingModule !== NFT_NOT_LOCKED) { + this.events.get(TransferEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + throw new Error('Locked NFTs cannot be transferred'); + } + + await this._internalMethod.transferInternal(methodContext, recipientAddress, nftID); + } + + public async transferCrossChain( + methodContext: MethodContext, + senderAddress: Buffer, + recipientAddress: Buffer, + nftID: Buffer, + receivingChainID: Buffer, + messageFee: bigint, + data: string, + includeAttributes: boolean, + ): Promise { + if (data.length > MAX_LENGTH_DATA) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_DATA_TOO_LONG, + ); + throw new Error('Data field is too long'); + } + + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + throw new Error('NFT substore entry does not exist'); + } + + const owner = await this.getNFTOwner(methodContext, nftID); + if (owner.length === LENGTH_CHAIN_ID) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + throw new Error('NFT is escrowed to another chain'); + } + + if (!owner.equals(senderAddress)) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + throw new Error('Transfer not initiated by the NFT owner'); + } + + const userStore = this.stores.get(UserStore); + const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); + if (userData.lockingModule !== NFT_NOT_LOCKED) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + throw new Error('Locked NFTs cannot be transferred'); + } + + const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( + methodContext, + receivingChainID, + ); + const availableBalance = await this._tokenMethod.getAvailableBalance( + methodContext, + senderAddress, + messageFeeTokenID, + ); + if (availableBalance < messageFee) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_INSUFFICIENT_BALANCE, + ); + throw new Error('Insufficient balance for the message fee'); + } + + await this._internalMethod.transferCrossChainInternal( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ); + } } diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 5518b54092d..abf30662b51 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -38,18 +38,24 @@ import { EscrowStore } from './stores/escrow'; import { NFTStore } from './stores/nft'; import { SupportedNFTsStore } from './stores/supported_nfts'; import { UserStore } from './stores/user'; -import { FeeMethod } from './types'; +import { FeeMethod, TokenMethod } from './types'; +import { CrossChainTransferCommand as CrossChainTransferMessageCommand } from './cc_commands/cc_transfer'; +import { TransferCrossChainCommand } from './commands/transfer_cross_chain'; +import { TransferCommand } from './commands/transfer'; export class NFTModule extends BaseInteroperableModule { public method = new NFTMethod(this.stores, this.events); public endpoint = new NFTEndpoint(this.stores, this.offchainStores); public crossChainMethod = new NFTInteroperableMethod(this.stores, this.events); + public crossChainTransferCommand = new CrossChainTransferMessageCommand(this.stores, this.events); + public crossChainCommand = [this.crossChainTransferCommand]; + private readonly _transferCommand = new TransferCommand(this.stores, this.events); + private readonly _ccTransferCommand = new TransferCrossChainCommand(this.stores, this.events); private readonly _internalMethod = new InternalMethod(this.stores, this.events); - private _interoperabilityMethod!: InteroperabilityMethod; - public commands = []; + public commands = [this._transferCommand, this._ccTransferCommand]; // eslint-disable-next-line no-useless-constructor public constructor() { @@ -84,9 +90,18 @@ export class NFTModule extends BaseInteroperableModule { this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 4)); } - public addDependencies(interoperabilityMethod: InteroperabilityMethod, feeMethod: FeeMethod) { + public addDependencies( + interoperabilityMethod: InteroperabilityMethod, + feeMethod: FeeMethod, + tokenMethod: TokenMethod, + ) { this._interoperabilityMethod = interoperabilityMethod; - this.method.addDependencies(interoperabilityMethod, feeMethod); + this.method.addDependencies( + interoperabilityMethod, + this._internalMethod, + feeMethod, + tokenMethod, + ); this._internalMethod.addDependencies(this.method, this._interoperabilityMethod); this.crossChainMethod.addDependencies(interoperabilityMethod); } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 9b261363e18..506b5216d11 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -12,13 +12,13 @@ * Removal or modification of this copyright notice is prohibited. */ -import { MAX_DATA_LENGTH } from '../token/constants'; import { LENGTH_CHAIN_ID, LENGTH_NFT_ID, LENGTH_TOKEN_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, + MAX_LENGTH_DATA, } from './constants'; export const transferParamsSchema = { @@ -40,7 +40,7 @@ export const transferParamsSchema = { data: { dataType: 'string', minLength: 0, - maxLength: MAX_DATA_LENGTH, + maxLength: MAX_LENGTH_DATA, fieldNumber: 3, }, }, @@ -90,7 +90,7 @@ export const crossChainNFTTransferMessageParamsSchema = { }, data: { dataType: 'string', - maxLength: MAX_DATA_LENGTH, + maxLength: MAX_LENGTH_DATA, fieldNumber: 5, }, }, @@ -137,7 +137,7 @@ export const crossChainTransferParamsSchema = { data: { dataType: 'string', minLength: 0, - maxLength: MAX_DATA_LENGTH, + maxLength: MAX_LENGTH_DATA, fieldNumber: 4, }, messageFee: { diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index d71c76e83a7..c6b63a9d44f 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -40,3 +40,16 @@ export interface InteroperabilityMethod { export interface FeeMethod { payFee(methodContext: MethodContext, amount: bigint): void; } + +export interface TokenMethod { + getAvailableBalance( + methodContext: MethodContext, + address: Buffer, + tokenID: Buffer, + ): Promise; +} + +export interface NFTMethod { + getChainID(nftID: Buffer): Buffer; + destroy(methodContext: MethodContext, address: Buffer, nftID: Buffer): Promise; +} diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index c84cbee033d..62f7e01c207 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -47,6 +47,9 @@ describe('CrossChain Transfer Command', () => { const method = new NFTMethod(module.stores, module.events); const internalMethod = new InternalMethod(module.stores, module.events); const feeMethod = { payFee: jest.fn() }; + const tokenMethod = { + getAvailableBalance: jest.fn(), + }; const checkEventResult = ( eventQueue: EventQueue, length: number, @@ -128,7 +131,7 @@ describe('CrossChain Transfer Command', () => { beforeEach(async () => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - method.addDependencies(interopMethod, feeMethod); + method.addDependencies(interopMethod, internalMethod, feeMethod, tokenMethod); method.init(config); internalMethod.addDependencies(method, interopMethod); internalMethod.init(config); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 1f14775963f..d4b5a78438e 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -14,6 +14,7 @@ import { codec } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; +import { when } from 'jest-when'; import { NFTMethod } from '../../../../src/modules/nft/method'; import { NFTModule } from '../../../../src/modules/nft/module'; import { EventQueue } from '../../../../src/state_machine'; @@ -27,6 +28,7 @@ import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, + LENGTH_TOKEN_ID, NFT_NOT_LOCKED, NftEventResult, } from '../../../../src/modules/nft/constants'; @@ -36,10 +38,33 @@ import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/even import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; import { CreateEvent } from '../../../../src/modules/nft/events/create'; import { LockEvent, LockEventData } from '../../../../src/modules/nft/events/lock'; +import { InternalMethod } from '../../../../src/modules/nft/internal_method'; +import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/events/transfer'; +import { + TransferCrossChainEvent, + TransferCrossChainEventData, +} from '../../../../src/modules/nft/events/transfer_cross_chain'; describe('NFTMethod', () => { const module = new NFTModule(); const method = new NFTMethod(module.stores, module.events); + const internalMethod = new InternalMethod(module.stores, module.events); + const messageFeeTokenID = utils.getRandomBytes(LENGTH_TOKEN_ID); + const interopMethod = { + send: jest.fn(), + error: jest.fn(), + terminateChain: jest.fn(), + getMessageFeeTokenID: jest.fn().mockResolvedValue(Promise.resolve(messageFeeTokenID)), + }; + const feeMethod = { payFee: jest.fn() }; + const tokenMethod = { + getAvailableBalance: jest.fn(), + }; + const config = { + ownChainID: Buffer.alloc(LENGTH_CHAIN_ID, 1), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; let methodContext!: MethodContext; @@ -73,6 +98,10 @@ describe('NFTMethod', () => { let escrowedNFT: { nftID: any; owner: any }; beforeEach(async () => { + method.addDependencies(interopMethod, internalMethod, feeMethod, tokenMethod); + method.init(config); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(config); owner = utils.getRandomBytes(LENGTH_ADDRESS); methodContext = createMethodContext({ @@ -307,6 +336,7 @@ describe('NFTMethod', () => { }); describe('isNFTSupported', () => { + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); beforeEach(async () => { await nftStore.save(methodContext, nftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), @@ -322,27 +352,16 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id equals own chain id', async () => { - const ownChainID = nftID.slice(0, LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - - const isSupported = await method.isNFTSupported(methodContext, nftID); + const newNftID = Buffer.alloc(LENGTH_NFT_ID, 1); + await nftStore.save(methodContext, newNftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + const isSupported = await method.isNFTSupported(methodContext, newNftID); expect(isSupported).toBe(true); }); it('should return true if nft chain id does not equal own chain id but all nft keys are supported', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { supportedCollectionIDArray: [], }); @@ -352,14 +371,6 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array is empty', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [], }); @@ -369,14 +380,6 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array includes collection id for nft id', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID) }, @@ -389,14 +392,6 @@ describe('NFTMethod', () => { }); it('should return false if nft chain id does not equal own chain id and nft chain id is supported but corresponding supported collection id array does not include collection id for nft id', async () => { - const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const config = { - ownChainID, - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; - method.init(config); - const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, @@ -515,30 +510,16 @@ describe('NFTMethod', () => { }); describe('create', () => { - const interopMethod = { - send: jest.fn(), - error: jest.fn(), - terminateChain: jest.fn(), - getMessageFeeTokenID: jest.fn(), - }; - const feeMethod = { payFee: jest.fn() }; const attributesArray1 = [ { module: 'customMod1', attributes: Buffer.alloc(5) }, { module: 'customMod2', attributes: Buffer.alloc(2) }, ]; const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; const attributesArray3 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; - const config = { - ownChainID: Buffer.alloc(LENGTH_CHAIN_ID, 1), - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), - }; const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); const address = utils.getRandomBytes(LENGTH_ADDRESS); beforeEach(() => { - method.addDependencies(interopMethod, feeMethod); - method.init(config); jest.spyOn(feeMethod, 'payFee'); }); @@ -760,4 +741,287 @@ describe('NFTMethod', () => { expect(lockingModule).toEqual(NFT_NOT_LOCKED); }); }); + + describe('transfer', () => { + const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); + + it('should throw and emit error transfer event if nft does not exist', async () => { + await expect( + method.transfer(methodContext, senderAddress, recipientAddress, nftID), + ).rejects.toThrow('NFT substore entry does not exist'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferEvent, + 0, + { + senderAddress, + recipientAddress, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should throw and emit error transfer event if nft is escrowed', async () => { + await expect( + method.transfer(methodContext, senderAddress, recipientAddress, escrowedNFT.nftID), + ).rejects.toThrow('NFT is escrowed to another chain'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferEvent, + 0, + { + senderAddress, + recipientAddress, + nftID: escrowedNFT.nftID, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + }); + + it('should throw and emit error transfer event if transfer is not initiated by the nft owner', async () => { + await expect( + method.transfer(methodContext, senderAddress, recipientAddress, existingNFT.nftID), + ).rejects.toThrow('Transfer not initiated by the NFT owner'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferEvent, + 0, + { + senderAddress, + recipientAddress, + nftID: existingNFT.nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + }); + + it('should throw and emit error transfer event if nft is locked', async () => { + await expect( + method.transfer( + methodContext, + lockedExistingNFT.owner, + recipientAddress, + lockedExistingNFT.nftID, + ), + ).rejects.toThrow('Locked NFTs cannot be transferred'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferEvent, + 0, + { + senderAddress: lockedExistingNFT.owner, + recipientAddress, + nftID: lockedExistingNFT.nftID, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + }); + + it('should resolve if all params are valid', async () => { + jest.spyOn(internalMethod, 'transferInternal'); + + await expect( + method.transfer(methodContext, existingNFT.owner, recipientAddress, existingNFT.nftID), + ).resolves.toBeUndefined(); + expect(internalMethod['transferInternal']).toHaveBeenCalledWith( + methodContext, + recipientAddress, + existingNFT.nftID, + ); + }); + }); + + describe('transferCrossChain', () => { + const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const messageFee = BigInt(1000); + const data = ''; + const includeAttributes = false; + + it('should throw and emit error transfer cross chain event if nft does not exist', async () => { + await expect( + method.transferCrossChain( + methodContext, + senderAddress, + recipientAddress, + nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('NFT substore entry does not exist'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should throw and emit error transfer cross chain event if nft is escrowed', async () => { + await expect( + method.transferCrossChain( + methodContext, + senderAddress, + recipientAddress, + escrowedNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('NFT is escrowed to another chain'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID: escrowedNFT.nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_ESCROWED, + ); + }); + + it('should throw and emit error transfer cross chain event if transfer is not initiated by the nft owner', async () => { + await expect( + method.transferCrossChain( + methodContext, + senderAddress, + recipientAddress, + existingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('Transfer not initiated by the NFT owner'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID: existingNFT.nftID, + includeAttributes, + }, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + }); + + it('should throw and emit error transfer cross chain event if nft is locked', async () => { + await expect( + method.transferCrossChain( + methodContext, + lockedExistingNFT.owner, + recipientAddress, + lockedExistingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('Locked NFTs cannot be transferred'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress: lockedExistingNFT.owner, + recipientAddress, + receivingChainID, + nftID: lockedExistingNFT.nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_LOCKED, + ); + }); + + it('should throw and emit error transfer cross chain event if balance is less than message fee', async () => { + when(tokenMethod.getAvailableBalance) + .calledWith(methodContext, existingNFT.owner, messageFeeTokenID) + .mockResolvedValue(messageFee - BigInt(10)); + + await expect( + method.transferCrossChain( + methodContext, + existingNFT.owner, + recipientAddress, + existingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('Insufficient balance for the message fee'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress: existingNFT.owner, + recipientAddress, + receivingChainID, + nftID: existingNFT.nftID, + includeAttributes, + }, + NftEventResult.RESULT_INSUFFICIENT_BALANCE, + ); + }); + + it('should resolve if all params are valid', async () => { + jest.spyOn(internalMethod, 'transferCrossChainInternal'); + when(tokenMethod.getAvailableBalance) + .calledWith(methodContext, existingNFT.owner, messageFeeTokenID) + .mockResolvedValue(messageFee + BigInt(10)); + + await expect( + method.transferCrossChain( + methodContext, + existingNFT.owner, + recipientAddress, + existingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).resolves.toBeUndefined(); + expect(internalMethod['transferCrossChainInternal']).toHaveBeenCalledWith( + methodContext, + existingNFT.owner, + recipientAddress, + existingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ); + }); + }); }); From 478b5c71debbd0e63d477091897bf6e3b99245e0 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 15 Jun 2023 00:41:59 +0200 Subject: [PATCH 060/170] Update verification per feedback --- framework/src/modules/nft/method.ts | 17 +++++++++ .../test/unit/modules/nft/method.spec.ts | 38 ++++++++++++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 6026ebf20bb..7e77f8a95f0 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -558,6 +558,23 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT is escrowed to another chain'); } + const nftChainID = this.getChainID(nftID); + const ownChainID = this._internalMethod.getOwnChainID(); + if (![ownChainID, receivingChainID].some(allowedChainID => nftChainID.equals(allowedChainID))) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_NOT_NATIVE, + ); + throw new Error('NFT must be native either to the sending chain or the receiving chain'); + } + if (!owner.equals(senderAddress)) { this.events.get(TransferCrossChainEvent).error( methodContext, diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index d4b5a78438e..301c8151382 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -840,12 +840,17 @@ describe('NFTMethod', () => { describe('transferCrossChain', () => { const senderAddress = utils.getRandomBytes(LENGTH_ADDRESS); const recipientAddress = utils.getRandomBytes(LENGTH_ADDRESS); - const receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); const messageFee = BigInt(1000); const data = ''; const includeAttributes = false; + let receivingChainID: Buffer; + + beforeEach(() => { + receivingChainID = existingNFT.nftID.slice(0, LENGTH_CHAIN_ID); + }); it('should throw and emit error transfer cross chain event if nft does not exist', async () => { + receivingChainID = nftID.slice(0, LENGTH_CHAIN_ID); await expect( method.transferCrossChain( methodContext, @@ -875,6 +880,7 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if nft is escrowed', async () => { + receivingChainID = escrowedNFT.nftID.slice(0, LENGTH_CHAIN_ID); await expect( method.transferCrossChain( methodContext, @@ -903,6 +909,35 @@ describe('NFTMethod', () => { ); }); + it('should throw and emit error transfer cross chain event if nft chain id is equal to neither own chain id or receiving chain id', async () => { + await expect( + method.transferCrossChain( + methodContext, + lockedExistingNFT.owner, + recipientAddress, + lockedExistingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('NFT must be native either to the sending chain or the receiving chain'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress: lockedExistingNFT.owner, + recipientAddress, + receivingChainID, + nftID: lockedExistingNFT.nftID, + includeAttributes, + }, + NftEventResult.RESULT_NFT_NOT_NATIVE, + ); + }); + it('should throw and emit error transfer cross chain event if transfer is not initiated by the nft owner', async () => { await expect( method.transferCrossChain( @@ -933,6 +968,7 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if nft is locked', async () => { + receivingChainID = lockedExistingNFT.nftID.slice(0, LENGTH_CHAIN_ID); await expect( method.transferCrossChain( methodContext, From 3ec3f89f2e291ff28c19fdc17c8af7bc69dc19d9 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 15 Jun 2023 11:58:26 +0800 Subject: [PATCH 061/170] :white_check_mark: Fix merge conflict --- .../pos/commands/register_validator.spec.ts | 31 ------------------- 1 file changed, 31 deletions(-) diff --git a/framework/test/unit/modules/pos/commands/register_validator.spec.ts b/framework/test/unit/modules/pos/commands/register_validator.spec.ts index 44243c3148a..06b32ab68e6 100644 --- a/framework/test/unit/modules/pos/commands/register_validator.spec.ts +++ b/framework/test/unit/modules/pos/commands/register_validator.spec.ts @@ -296,37 +296,6 @@ describe('Validator registration command', () => { expect(result.status).toBe(VerifyStatus.FAIL); expect(result.error?.message).toInclude('Insufficient transaction fee.'); }); - - it('should throw error if name is empty', async () => { - const invalidParams = codec.encode(validatorRegistrationCommandParamsSchema, { - ...transactionParams, - name: '', - }); - - const invalidTransaction = new Transaction({ - module: 'pos', - command: 'registerValidator', - senderPublicKey: publicKey, - nonce: BigInt(0), - fee: BigInt(100000000), - params: invalidParams, - signatures: [publicKey], - }); - - const context = testing - .createTransactionContext({ - transaction: invalidTransaction, - chainID, - }) - .createCommandVerifyContext( - validatorRegistrationCommandParamsSchema, - ); - - const result = await validatorRegistrationCommand.verify(context); - - expect(result.status).toBe(VerifyStatus.FAIL); - expect(result.error?.message).toInclude("'.name' must NOT have fewer than 1 characters"); - }); }); describe('execute', () => { From 5c65e808e1305e05adc27656dd401f95c09bbcec Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 15 Jun 2023 17:55:07 +0200 Subject: [PATCH 062/170] Implement nft methods --- framework/src/modules/nft/method.ts | 128 +++++++- .../test/unit/modules/nft/method.spec.ts | 273 +++++++++++++++++- 2 files changed, 399 insertions(+), 2 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 8f4f2513674..01a7bfe2a3c 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -12,13 +12,15 @@ * Removal or modification of this copyright notice is prohibited. */ +import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; import { FeeMethod, InteroperabilityMethod, ModuleConfig, TokenMethod } from './types'; -import { NFTAttributes, NFTStore } from './stores/nft'; +import { NFTAttributes, NFTStore, NFTStoreData, nftStoreSchema } from './stores/nft'; import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { ALL_SUPPORTED_NFTS_KEY, FEE_CREATE_NFT, + LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, @@ -40,6 +42,9 @@ import { AllNFTsFromChainSupportedEvent } from './events/all_nfts_from_chain_sup import { AllNFTsFromCollectionSupportedEvent } from './events/all_nfts_from_collection_suppported'; import { AllNFTsFromCollectionSupportRemovedEvent } from './events/all_nfts_from_collection_support_removed'; import { AllNFTsFromChainSupportRemovedEvent } from './events/all_nfts_from_chain_support_removed'; +import { RecoverEvent } from './events/recover'; +import { EscrowStore } from './stores/escrow'; +import { SetAttributesEvent } from './events/set_attributes'; export class NFTMethod extends BaseMethod { private _config!: ModuleConfig; @@ -854,4 +859,125 @@ export class NFTMethod extends BaseMethod { collectionID, }); } + + public async recover( + methodContext: MethodContext, + terminatedChainID: Buffer, + substorePrefix: Buffer, + storeKey: Buffer, + storeValue: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const nftID = storeKey; + let isDecodable = true; + let decodedValue: NFTStoreData; + try { + decodedValue = codec.decode(nftStoreSchema, storeValue); + } catch (error) { + isDecodable = false; + } + + if ( + !substorePrefix.equals(nftStore.subStorePrefix) || + storeKey.length !== LENGTH_NFT_ID || + !isDecodable + ) { + this.events.get(RecoverEvent).error( + methodContext, + { + terminatedChainID, + nftID, + }, + NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, + ); + throw new Error('Invalid inputs'); + } + + const nftChainID = this.getChainID(nftID); + const ownChainID = this._internalMethod.getOwnChainID(); + if (!nftChainID.equals(ownChainID)) { + this.events.get(RecoverEvent).error( + methodContext, + { + terminatedChainID, + nftID, + }, + NftEventResult.RESULT_INITIATED_BY_NONNATIVE_CHAIN, + ); + throw new Error('Recovery called by a foreign chain'); + } + + const nftData = await nftStore.get(methodContext, nftID); + if (!nftData.owner.equals(terminatedChainID)) { + this.events.get(RecoverEvent).error( + methodContext, + { + terminatedChainID, + nftID, + }, + NftEventResult.RESULT_NFT_NOT_ESCROWED, + ); + throw new Error('NFT was not escrowed to terminated chain'); + } + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const storeValueOwner = decodedValue!.owner; + if (storeValueOwner.length !== LENGTH_ADDRESS) { + this.events.get(RecoverEvent).error( + methodContext, + { + terminatedChainID, + nftID, + }, + NftEventResult.RESULT_INVALID_ACCOUNT, + ); + throw new Error('Invalid account information'); + } + + const escrowStore = this.stores.get(EscrowStore); + nftData.owner = storeValueOwner; + await nftStore.save(methodContext, nftID, nftData); + await this._internalMethod.createUserEntry(methodContext, nftData.owner, nftID); + await escrowStore.del(methodContext, escrowStore.getKey(terminatedChainID, nftID)); + + this.events.get(RecoverEvent).log(methodContext, { + terminatedChainID, + nftID, + }); + } + + public async setAttributes( + methodContext: MethodContext, + module: string, + nftID: Buffer, + attributes: Buffer, + ): Promise { + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(methodContext, nftID); + if (!nftExists) { + this.events.get(SetAttributesEvent).error( + methodContext, + { + nftID, + attributes, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + throw new Error('NFT substore entry does not exist'); + } + + const nftData = await nftStore.get(methodContext, nftID); + const index = nftData.attributesArray.findIndex(attr => attr.module === module); + if (index > -1) { + nftData.attributesArray[index] = { module, attributes }; + } else { + nftData.attributesArray.push({ module, attributes }); + } + await nftStore.save(methodContext, nftID, nftData); + + this.events.get(SetAttributesEvent).log(methodContext, { + nftID, + attributes, + }); + } } diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 345ddce4d60..6455cedefe3 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -32,7 +32,7 @@ import { NFT_NOT_LOCKED, NftEventResult, } from '../../../../src/modules/nft/constants'; -import { NFTStore } from '../../../../src/modules/nft/stores/nft'; +import { NFTStore, nftStoreSchema } from '../../../../src/modules/nft/stores/nft'; import { UserStore } from '../../../../src/modules/nft/stores/user'; import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; @@ -62,6 +62,12 @@ import { AllNFTsFromChainSupportRemovedEvent, AllNFTsFromChainSupportRemovedEventData, } from '../../../../src/modules/nft/events/all_nfts_from_chain_support_removed'; +import { RecoverEvent, RecoverEventData } from '../../../../src/modules/nft/events/recover'; +import { + SetAttributesEvent, + SetAttributesEventData, +} from '../../../../src/modules/nft/events/set_attributes'; +import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; describe('NFTMethod', () => { const module = new NFTModule(); @@ -1538,4 +1544,269 @@ describe('NFTMethod', () => { ); }); }); + + describe('recover', () => { + const terminatedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const substorePrefix = Buffer.from('8000', 'hex'); + const storeKey = utils.getRandomBytes(LENGTH_NFT_ID); + const storeValue = codec.encode(nftStoreSchema, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + + it('should throw and emit error recover event if substore prefix is not valid', async () => { + await expect( + method.recover(methodContext, terminatedChainID, Buffer.alloc(2, 2), storeKey, storeValue), + ).rejects.toThrow('Invalid inputs'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: storeKey, + }, + NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, + ); + }); + + it('should throw and emit error recover event if store key length is not valid', async () => { + const newStoreKey = utils.getRandomBytes(LENGTH_NFT_ID + 1); + + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + ).rejects.toThrow('Invalid inputs'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: newStoreKey, + }, + NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, + ); + }); + + it('should throw and emit error recover event if store value is not valid', async () => { + await expect( + method.recover( + methodContext, + terminatedChainID, + substorePrefix, + storeKey, + Buffer.from('asfas'), + ), + ).rejects.toThrow('Invalid inputs'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: storeKey, + }, + NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, + ); + }); + + it('should throw and emit error recover event if nft chain id is not same as own chain id', async () => { + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, storeValue), + ).rejects.toThrow('Recovery called by a foreign chain'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: storeKey, + }, + NftEventResult.RESULT_INITIATED_BY_NONNATIVE_CHAIN, + ); + }); + + it('should throw and emit error recover event if nft is not escrowed to terminated chain', async () => { + const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); + await nftStore.save(methodContext, newStoreKey, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }); + + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + ).rejects.toThrow('NFT was not escrowed to terminated chain'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: newStoreKey, + }, + NftEventResult.RESULT_NFT_NOT_ESCROWED, + ); + }); + + it('should throw and emit error recover event if store value owner length is invalid', async () => { + const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); + await nftStore.save(methodContext, newStoreKey, { + owner: terminatedChainID, + attributesArray: [], + }); + + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + ).rejects.toThrow('Invalid account information'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: newStoreKey, + }, + NftEventResult.RESULT_INVALID_ACCOUNT, + ); + }); + + it('should set appropriate values to stores and resolve with emitting success recover event if params are valid', async () => { + const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); + const storeValueOwner = utils.getRandomBytes(LENGTH_ADDRESS); + const newStoreValue = codec.encode(nftStoreSchema, { + owner: storeValueOwner, + attributesArray: [], + }); + await nftStore.save(methodContext, newStoreKey, { + owner: terminatedChainID, + attributesArray: [], + }); + jest.spyOn(internalMethod, 'createUserEntry'); + + await expect( + method.recover( + methodContext, + terminatedChainID, + substorePrefix, + newStoreKey, + newStoreValue, + ), + ).resolves.toBeUndefined(); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: newStoreKey, + }, + NftEventResult.RESULT_SUCCESSFUL, + ); + const nftStoreData = await nftStore.get(methodContext, newStoreKey); + const escrowStore = module.stores.get(EscrowStore); + const escrowAccountExists = await escrowStore.has( + methodContext, + escrowStore.getKey(terminatedChainID, newStoreKey), + ); + expect(nftStoreData.owner).toStrictEqual(storeValueOwner); + expect(nftStoreData.attributesArray).toEqual([]); + expect(internalMethod['createUserEntry']).toHaveBeenCalledWith( + methodContext, + storeValueOwner, + newStoreKey, + ); + expect(escrowAccountExists).toBe(false); + }); + }); + + describe('setAttributes', () => { + it('should throw and log LockEvent if NFT does not exist', async () => { + const attributes = Buffer.alloc(9); + + await expect( + method.setAttributes(methodContext, module.name, nftID, attributes), + ).rejects.toThrow('NFT substore entry does not exist'); + checkEventResult( + methodContext.eventQueue, + 1, + SetAttributesEvent, + 0, + { + nftID, + attributes, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + + it('should set attributes if NFT exists and no entry exists for the given module', async () => { + const attributes = Buffer.alloc(7); + + await expect( + method.setAttributes(methodContext, module.name, existingNFT.nftID, attributes), + ).resolves.toBeUndefined(); + checkEventResult( + methodContext.eventQueue, + 1, + SetAttributesEvent, + 0, + { + nftID: existingNFT.nftID, + attributes, + }, + NftEventResult.RESULT_SUCCESSFUL, + ); + const storedAttributes = await method.getAttributes( + methodContext, + module.name, + existingNFT.nftID, + ); + expect(storedAttributes).toStrictEqual(attributes); + }); + + it('should update attributes if NFT exists and an entry already exists for the given module', async () => { + const newAttributes = Buffer.alloc(12); + const attributesArray1 = [ + { module: 'customMod1', attributes: Buffer.alloc(5) }, + { module: 'customMod2', attributes: Buffer.alloc(2) }, + ]; + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: attributesArray1, + }); + + await expect( + method.setAttributes( + methodContext, + attributesArray1[0].module, + existingNFT.nftID, + newAttributes, + ), + ).resolves.toBeUndefined(); + checkEventResult( + methodContext.eventQueue, + 1, + SetAttributesEvent, + 0, + { + nftID: existingNFT.nftID, + attributes: newAttributes, + }, + NftEventResult.RESULT_SUCCESSFUL, + ); + const storedAttributes = await method.getAttributes( + methodContext, + attributesArray1[0].module, + existingNFT.nftID, + ); + expect(storedAttributes).toStrictEqual(newAttributes); + }); + }); }); From 128433b3effcaa91f8f1f8ccd67fcc72cca2e4d0 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Mon, 19 Jun 2023 05:00:03 +0200 Subject: [PATCH 063/170] Update framework/test/unit/modules/nft/method.spec.ts MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Miroslav Jerković --- framework/test/unit/modules/nft/method.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 6455cedefe3..2dd3d81568a 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1727,7 +1727,7 @@ describe('NFTMethod', () => { }); describe('setAttributes', () => { - it('should throw and log LockEvent if NFT does not exist', async () => { + it('should throw and log SetAttributesEvent if NFT does not exist', async () => { const attributes = Buffer.alloc(9); await expect( From fd20e6d5d438dfcc321f7336f99d270ec81549c7 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Mon, 19 Jun 2023 05:48:19 +0200 Subject: [PATCH 064/170] Update to use redundant function per feedback --- framework/src/modules/nft/cc_commands/cc_transfer.ts | 8 ++++++-- framework/src/modules/nft/method.ts | 8 ++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index 9a2331926d0..af4045cd2a8 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -104,8 +104,12 @@ export class CrossChainTransferCommand extends BaseCCCommand { const storeData = await nftStore.get(getMethodContext(), nftID); if (status === CCM_STATUS_CODE_OK) { storeData.owner = recipientAddress; - // commented line below can be used by custom modules when defining their own logic for getNewAttributes function - // storeData.attributesArray = this._internalMethod.getNewAttributes(nftID, storeData.attributesArray, params.attributesArray); + const storedAttributes = storeData.attributesArray; + storeData.attributesArray = this._internalMethod.getNewAttributes( + nftID, + storedAttributes, + receivedAttributes, + ); await nftStore.save(getMethodContext(), nftID, storeData); await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 01a7bfe2a3c..bee060c6eee 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -936,6 +936,14 @@ export class NFTMethod extends BaseMethod { const escrowStore = this.stores.get(EscrowStore); nftData.owner = storeValueOwner; + const storedAttributes = nftData.attributesArray; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const receivedAttributes = decodedValue!.attributesArray; + nftData.attributesArray = this._internalMethod.getNewAttributes( + nftID, + storedAttributes, + receivedAttributes, + ); await nftStore.save(methodContext, nftID, nftData); await this._internalMethod.createUserEntry(methodContext, nftData.owner, nftID); await escrowStore.del(methodContext, escrowStore.getKey(terminatedChainID, nftID)); From 267deac1507a249ee5b1189d66e4ce20cbdd4003 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Mon, 19 Jun 2023 22:49:49 +0200 Subject: [PATCH 065/170] NFTEndpoint (#8591) * :recycle: Updates signature of NFTMethod.getCollectionID & NFTMethod.isNFTSupported * :seedling: NFTEndpoint * :bug: :white_check_mark: for NFTEndpoint.getNFT * :pencil2: * :bug: Fixes getNFTsResponseSchema defintion * :white_check_mark: Adds schema validation logic for NFTEndpoints --- framework/src/modules/nft/endpoint.ts | 221 ++++- framework/src/modules/nft/method.ts | 10 +- framework/src/modules/nft/module.ts | 54 +- framework/src/modules/nft/schemas.ts | 231 ++++++ framework/src/modules/nft/types.ts | 11 + .../test/unit/modules/nft/endpoint.spec.ts | 757 ++++++++++++++++++ 6 files changed, 1276 insertions(+), 8 deletions(-) create mode 100644 framework/test/unit/modules/nft/endpoint.spec.ts diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts index aa2637fa295..1999d881077 100644 --- a/framework/src/modules/nft/endpoint.ts +++ b/framework/src/modules/nft/endpoint.ts @@ -12,14 +12,225 @@ * Removal or modification of this copyright notice is prohibited. */ -import { ModuleConfig } from './types'; +import * as cryptography from '@liskhq/lisk-cryptography'; +import { validator } from '@liskhq/lisk-validator'; import { BaseEndpoint } from '../base_endpoint'; +import { JSONObject, ModuleEndpointContext } from '../../types'; +import { + collectionExistsRequestSchema, + getCollectionIDsRequestSchema, + getEscrowedNFTIDsRequestSchema, + getNFTRequestSchema, + getNFTsRequestSchema, + hasNFTRequestSchema, + isNFTSupportedRequestSchema, +} from './schemas'; +import { NFTStore } from './stores/nft'; +import { LENGTH_NFT_ID } from './constants'; +import { UserStore } from './stores/user'; +import { NFT } from './types'; +import { SupportedNFTsStore } from './stores/supported_nfts'; +import { NFTMethod } from './method'; export class NFTEndpoint extends BaseEndpoint { - // @ts-expect-error TODO: unused error. Remove when implementing. - private _moduleConfig!: ModuleConfig; + private _nftMethod!: NFTMethod; - public init(moduleConfig: ModuleConfig) { - this._moduleConfig = moduleConfig; + public addDependencies(nftMethod: NFTMethod) { + this._nftMethod = nftMethod; + } + + public async getNFTs( + context: ModuleEndpointContext, + ): Promise<{ nfts: JSONObject & { id: string }>[] }> { + validator.validate<{ address: string }>(getNFTsRequestSchema, context.params); + + const nftStore = this.stores.get(NFTStore); + + const owner = cryptography.address.getAddressFromLisk32Address(context.params.address); + + const allNFTs = await nftStore.iterate(context.getImmutableMethodContext(), { + gte: Buffer.alloc(LENGTH_NFT_ID, 0), + lte: Buffer.alloc(LENGTH_NFT_ID, 255), + }); + + const ownedNFTs = allNFTs.filter(nft => nft.value.owner.equals(owner)); + + const userStore = this.stores.get(UserStore); + + const nfts = []; + + for (const ownedNFT of ownedNFTs) { + const ownedNFTUserData = await userStore.get( + context.getImmutableMethodContext(), + userStore.getKey(owner, ownedNFT.key), + ); + + nfts.push({ + id: ownedNFT.key.toString('hex'), + attributesArray: ownedNFT.value.attributesArray.map(attribute => ({ + module: attribute.module, + attributes: attribute.attributes.toString('hex'), + })), + lockingModule: ownedNFTUserData.lockingModule, + }); + } + + return { nfts }; + } + + public async hasNFT(context: ModuleEndpointContext): Promise<{ hasNFT: boolean }> { + const { params } = context; + validator.validate<{ address: string; id: string }>(hasNFTRequestSchema, params); + + const nftID = Buffer.from(params.id, 'hex'); + const owner = cryptography.address.getAddressFromLisk32Address(params.address); + + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(context.getImmutableMethodContext(), nftID); + + if (!nftExists) { + return { hasNFT: nftExists }; + } + + const nftData = await nftStore.get(context.getImmutableMethodContext(), nftID); + + return { hasNFT: nftData.owner.equals(owner) }; + } + + public async getNFT(context: ModuleEndpointContext): Promise> { + const { params } = context; + validator.validate<{ id: string }>(getNFTRequestSchema, params); + + const nftID = Buffer.from(params.id, 'hex'); + const nftStore = this.stores.get(NFTStore); + const nftExists = await nftStore.has(context.getImmutableMethodContext(), nftID); + + if (!nftExists) { + throw new Error('NFT does not exist'); + } + + const userStore = this.stores.get(UserStore); + const nftData = await nftStore.get(context.getImmutableMethodContext(), nftID); + const userData = await userStore.get( + context.getImmutableMethodContext(), + userStore.getKey(nftData.owner, nftID), + ); + + return { + owner: nftData.owner.toString('hex'), + attributesArray: nftData.attributesArray.map(attribute => ({ + module: attribute.module, + attributes: attribute.attributes.toString('hex'), + })), + lockingModule: userData.lockingModule, + }; + } + + public async getCollectionIDs( + context: ModuleEndpointContext, + ): Promise<{ collectionIDs: string[] }> { + const { params } = context; + + validator.validate<{ chainID: string }>(getCollectionIDsRequestSchema, params); + + const chainID = Buffer.from(params.chainID, 'hex'); + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const chainExists = await supportedNFTsStore.has(context.getImmutableMethodContext(), chainID); + + if (!chainExists) { + return { collectionIDs: [] }; + } + + const supportedNFTsData = await supportedNFTsStore.get( + context.getImmutableMethodContext(), + chainID, + ); + + return { + collectionIDs: supportedNFTsData.supportedCollectionIDArray.map(collection => + collection.collectionID.toString('hex'), + ), + }; + } + + public async collectionExists( + context: ModuleEndpointContext, + ): Promise<{ collectionExists: boolean }> { + const { params } = context; + + validator.validate<{ chainID: string; collectionID: string }>( + collectionExistsRequestSchema, + params, + ); + + const chainID = Buffer.from(params.chainID, 'hex'); + + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const chainExists = await supportedNFTsStore.has(context.getImmutableMethodContext(), chainID); + + if (!chainExists) { + return { collectionExists: false }; + } + + const collectionID = Buffer.from(params.collectionID, 'hex'); + + const supportedNFTsData = await supportedNFTsStore.get( + context.getImmutableMethodContext(), + chainID, + ); + + return { + collectionExists: supportedNFTsData.supportedCollectionIDArray.some(supportedCollection => + supportedCollection.collectionID.equals(collectionID), + ), + }; + } + + public async getEscrowedNFTIDs( + context: ModuleEndpointContext, + ): Promise<{ escrowedNFTIDs: string[] }> { + const { params } = context; + + validator.validate<{ chainID: string }>(getEscrowedNFTIDsRequestSchema, params); + + const chainD = Buffer.from(params.chainID, 'hex'); + + const nftStore = this.stores.get(NFTStore); + + const allNFTs = await nftStore.iterate(context.getImmutableMethodContext(), { + gte: Buffer.alloc(LENGTH_NFT_ID, 0), + lte: Buffer.alloc(LENGTH_NFT_ID, 255), + }); + + return { + escrowedNFTIDs: allNFTs + .filter(nft => nft.value.owner.equals(chainD)) + .map(nft => nft.key.toString('hex')), + }; + } + + public async isNFTSupported( + context: ModuleEndpointContext, + ): Promise<{ isNFTSupported: boolean }> { + const { params } = context; + + validator.validate<{ id: string }>(isNFTSupportedRequestSchema, params); + + const nftID = Buffer.from(params.id, 'hex'); + let isNFTSupported = false; + + try { + isNFTSupported = await this._nftMethod.isNFTSupported( + context.getImmutableMethodContext(), + nftID, + ); + } catch (err) { + return { isNFTSupported }; + } + + return { isNFTSupported }; } } diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 8f4f2513674..8bafcd26ad9 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -179,7 +179,10 @@ export class NFTMethod extends BaseMethod { }); } - public async getCollectionID(methodContext: MethodContext, nftID: Buffer): Promise { + public async getCollectionID( + methodContext: ImmutableMethodContext, + nftID: Buffer, + ): Promise { const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(methodContext, nftID); if (!nftExists) { @@ -188,7 +191,10 @@ export class NFTMethod extends BaseMethod { return nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); } - public async isNFTSupported(methodContext: MethodContext, nftID: Buffer): Promise { + public async isNFTSupported( + methodContext: ImmutableMethodContext, + nftID: Buffer, + ): Promise { const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(methodContext, nftID); if (!nftExists) { diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index a2a8fab8dce..9dce479a4ec 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -35,6 +35,22 @@ import { TransferCrossChainEvent } from './events/transfer_cross_chain'; import { UnlockEvent } from './events/unlock'; import { InternalMethod } from './internal_method'; import { NFTMethod } from './method'; +import { + collectionExistsRequestSchema, + collectionExistsResponseSchema, + getCollectionIDsRequestSchema, + getCollectionIDsResponseSchema, + getEscrowedNFTIDsRequestSchema, + getEscrowedNFTIDsResponseSchema, + getNFTRequestSchema, + getNFTResponseSchema, + getNFTsRequestSchema, + getNFTsResponseSchema, + hasNFTRequestSchema, + hasNFTResponseSchema, + isNFTSupportedRequestSchema, + isNFTSupportedResponseSchema, +} from './schemas'; import { EscrowStore } from './stores/escrow'; import { NFTStore } from './stores/nft'; import { SupportedNFTsStore } from './stores/supported_nfts'; @@ -114,7 +130,43 @@ export class NFTModule extends BaseInteroperableModule { public metadata(): ModuleMetadata { return { ...this.baseMetadata(), - endpoints: [], + endpoints: [ + { + name: this.endpoint.collectionExists.name, + request: collectionExistsRequestSchema, + response: collectionExistsResponseSchema, + }, + { + name: this.endpoint.getCollectionIDs.name, + request: getCollectionIDsRequestSchema, + response: getCollectionIDsResponseSchema, + }, + { + name: this.endpoint.getEscrowedNFTIDs.name, + request: getEscrowedNFTIDsRequestSchema, + response: getEscrowedNFTIDsResponseSchema, + }, + { + name: this.endpoint.getNFT.name, + request: getNFTRequestSchema, + response: getNFTResponseSchema, + }, + { + name: this.endpoint.getNFTs.name, + request: getNFTsRequestSchema, + response: getNFTsResponseSchema, + }, + { + name: this.endpoint.hasNFT.name, + request: hasNFTRequestSchema, + response: hasNFTResponseSchema, + }, + { + name: this.endpoint.isNFTSupported.name, + request: isNFTSupportedRequestSchema, + response: isNFTSupportedResponseSchema, + }, + ], assets: [], }; } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 506b5216d11..da8f393bc6c 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -14,6 +14,7 @@ import { LENGTH_CHAIN_ID, + LENGTH_COLLECTION_ID, LENGTH_NFT_ID, LENGTH_TOKEN_ID, MAX_LENGTH_MODULE_NAME, @@ -156,3 +157,233 @@ export const crossChainTransferParamsSchema = { }, }, }; + +export const getNFTsRequestSchema = { + $id: '/nft/endpoint/getNFTsRequest', + type: 'object', + properties: { + address: { + type: 'string', + format: 'lisk32', + }, + }, + required: ['address'], +}; + +export const getNFTsResponseSchema = { + $id: '/nft/endpoint/getNFTsResponse', + type: 'object', + properties: { + nfts: { + type: 'array', + items: { + type: 'object', + properties: { + id: { + type: 'string', + format: 'hex', + }, + attributesArray: { + type: 'array', + items: { + type: 'object', + properties: { + module: { + type: 'string', + }, + attributes: { + type: 'string', + format: 'hex', + }, + }, + }, + }, + lockingModule: { + type: 'string', + }, + }, + }, + }, + }, +}; + +export const hasNFTRequestSchema = { + $id: '/nft/endpoint/hasNFTRequest', + type: 'object', + properties: { + address: { + type: 'string', + format: 'lisk32', + }, + id: { + type: 'string', + format: 'hex', + minLength: LENGTH_NFT_ID * 2, + maxLength: LENGTH_NFT_ID * 2, + }, + }, + required: ['address', 'id'], +}; + +export const hasNFTResponseSchema = { + $id: '/nft/endpoint/hasNFTResponse', + type: 'object', + properties: { + hasNFT: { + type: 'boolean', + }, + }, +}; + +export const getNFTRequestSchema = { + $id: '/nft/endpoint/getNFTRequest', + type: 'object', + properties: { + id: { + type: 'string', + format: 'hex', + minLength: LENGTH_NFT_ID * 2, + maxLength: LENGTH_NFT_ID * 2, + }, + }, + required: ['id'], +}; + +export const getNFTResponseSchema = { + $id: '/nft/endpoint/getNFTResponse', + type: 'object', + properties: { + owner: { + type: 'string', + format: 'hex', + }, + attributesArray: { + type: 'array', + items: { + type: 'object', + properties: { + module: { + type: 'string', + }, + attributes: { + type: 'string', + format: 'hex', + }, + }, + }, + }, + lockingModule: { + type: 'string', + }, + }, +}; + +export const getCollectionIDsRequestSchema = { + $id: '/nft/endpoint/getCollectionIDsRequest', + type: 'object', + properties: { + chainID: { + type: 'string', + format: 'hex', + minLength: LENGTH_CHAIN_ID * 2, + maxLength: LENGTH_CHAIN_ID * 2, + }, + }, + required: ['chainID'], +}; + +export const getCollectionIDsResponseSchema = { + $id: '/nft/endpoint/getCollectionIDsRespone', + type: 'object', + properties: { + collectionIDs: { + type: 'array', + items: { + type: 'string', + format: 'hex', + }, + }, + }, +}; + +export const collectionExistsRequestSchema = { + $id: '/nft/endpoint/collectionExistsRequest', + type: 'object', + properties: { + chainID: { + type: 'string', + format: 'hex', + minLength: LENGTH_CHAIN_ID * 2, + maxLength: LENGTH_CHAIN_ID * 2, + }, + collectionID: { + type: 'string', + format: 'hex', + minLength: LENGTH_COLLECTION_ID * 2, + maxLength: LENGTH_COLLECTION_ID * 2, + }, + }, + required: ['chainID', 'collectionID'], +}; + +export const collectionExistsResponseSchema = { + $id: '/nft/endpoint/collectionExistsResponse', + type: 'object', + properties: { + collectionExists: { + type: 'boolean', + }, + }, +}; + +export const getEscrowedNFTIDsRequestSchema = { + $id: '/nft/endpoint/getEscrowedNFTIDsRequest', + type: 'object', + properties: { + chainID: { + type: 'string', + format: 'hex', + minLength: LENGTH_CHAIN_ID * 2, + maxLength: LENGTH_CHAIN_ID * 2, + }, + }, + required: ['chainID'], +}; + +export const getEscrowedNFTIDsResponseSchema = { + $id: '/nft/endpoint/getEscrowedNFTIDsResponse', + type: 'object', + properties: { + escrowedNFTIDs: { + type: 'array', + items: { + type: 'string', + format: 'hex', + }, + }, + }, +}; + +export const isNFTSupportedRequestSchema = { + $id: '/nft/endpoint/isNFTSupportedRequest', + type: 'object', + properties: { + id: { + type: 'string', + format: 'hex', + minLength: LENGTH_NFT_ID * 2, + maxLength: LENGTH_NFT_ID * 2, + }, + }, + required: ['id'], +}; + +export const isNFTSupportedResponseSchema = { + $id: '/nft/endpoint/isNFTSupportedResponse', + type: 'object', + properties: { + isNFTSupported: { + type: 'boolean', + }, + }, +}; diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index c6b63a9d44f..bfa88ef2189 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -53,3 +53,14 @@ export interface NFTMethod { getChainID(nftID: Buffer): Buffer; destroy(methodContext: MethodContext, address: Buffer, nftID: Buffer): Promise; } + +export interface NFTAttributes { + module: string; + attributes: Buffer; +} + +export interface NFT { + owner: string; + attributesArray: NFTAttributes[]; + lockingModule: string; +} diff --git a/framework/test/unit/modules/nft/endpoint.spec.ts b/framework/test/unit/modules/nft/endpoint.spec.ts new file mode 100644 index 00000000000..d179db7dbeb --- /dev/null +++ b/framework/test/unit/modules/nft/endpoint.spec.ts @@ -0,0 +1,757 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; +import { address, utils } from '@liskhq/lisk-cryptography'; +import { NFTEndpoint } from '../../../../src/modules/nft/endpoint'; +import { NFTMethod } from '../../../../src/modules/nft/method'; +import { NFTModule } from '../../../../src/modules/nft/module'; +import { MethodContext } from '../../../../src/state_machine'; +import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; +import { + InMemoryPrefixedStateDB, + createTransientMethodContext, + createTransientModuleEndpointContext, +} from '../../../../src/testing'; +import { NFTStore } from '../../../../src/modules/nft/stores/nft'; +import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { + ALL_SUPPORTED_NFTS_KEY, + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, + NFT_NOT_LOCKED, +} from '../../../../src/modules/nft/constants'; +import { NFT } from '../../../../src/modules/nft/types'; +import { JSONObject } from '../../../../src'; +import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; +import { + collectionExistsResponseSchema, + getCollectionIDsResponseSchema, + getEscrowedNFTIDsResponseSchema, + getNFTResponseSchema, + getNFTsResponseSchema, + hasNFTResponseSchema, + isNFTSupportedResponseSchema, +} from '../../../../src/modules/nft/schemas'; + +type NFTofOwner = Omit & { id: Buffer }; + +describe('NFTEndpoint', () => { + const module = new NFTModule(); + const method = new NFTMethod(module.stores, module.events); + const endpoint = new NFTEndpoint(module.stores, module.events); + const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + method.init({ ownChainID }); + + endpoint.addDependencies(method); + + const nftStore = module.stores.get(NFTStore); + const userStore = module.stores.get(UserStore); + const supportedNFTsStore = module.stores.get(SupportedNFTsStore); + + let stateStore: PrefixedStateReadWriter; + let methodContext: MethodContext; + + const owner = utils.getRandomBytes(LENGTH_ADDRESS); + const ownerAddress = address.getLisk32AddressFromAddress(owner); + + const nfts: NFTofOwner[] = [ + { + id: utils.getRandomBytes(LENGTH_NFT_ID), + attributesArray: [ + { + module: 'pos', + attributes: Buffer.alloc(10, 0), + }, + ], + lockingModule: NFT_NOT_LOCKED, + }, + { + id: utils.getRandomBytes(LENGTH_NFT_ID), + attributesArray: [], + lockingModule: 'pos', + }, + ]; + + beforeEach(() => { + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + methodContext = createTransientMethodContext({ stateStore }); + }); + + describe('getNFTs', () => { + beforeEach(async () => { + for (const nft of nfts) { + await nftStore.save(methodContext, nft.id, { + owner, + attributesArray: nft.attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(owner, nft.id), { + lockingModule: nft.lockingModule, + }); + } + + await nftStore.save(methodContext, utils.getRandomBytes(LENGTH_NFT_ID), { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + }); + + it('should fail if address does not have valid length', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: 'incorrect', + }, + }); + + await expect(endpoint.getNFTs(context)).rejects.toThrow( + `'.address' must match format "lisk32"`, + ); + }); + + it('should return empty NFTs collection if owner has no NFTs', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: address.getLisk32AddressFromAddress(utils.getRandomBytes(LENGTH_ADDRESS)), + }, + }); + + await expect(endpoint.getNFTs(context)).resolves.toEqual({ nfts: [] }); + + validator.validate(getNFTsResponseSchema, { nfts: [] }); + }); + + it('should return NFTs for the provided owner lexicograhpically per id', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + }, + }); + + const expectedNFTs = { + nfts: nfts + .sort((a, b) => a.id.compare(b.id)) + .map(nft => ({ + id: nft.id.toString('hex'), + attributesArray: nft.attributesArray.map(attribute => ({ + module: attribute.module, + attributes: attribute.attributes.toString('hex'), + })), + lockingModule: nft.lockingModule, + })), + }; + + await expect(endpoint.getNFTs(context)).resolves.toEqual(expectedNFTs); + + validator.validate(getNFTsResponseSchema, expectedNFTs); + }); + }); + + describe('hasNFT', () => { + it('should fail if address is not valid', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: 'incorrect', + id: utils.getRandomBytes(LENGTH_NFT_ID).toString('hex'), + }, + }); + + await expect(endpoint.hasNFT(context)).rejects.toThrow( + `'.address' must match format "lisk32"`, + ); + }); + + it('should fail if id does not have valid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + id: utils.getRandomBytes(LENGTH_NFT_ID - 1).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + id: utils.getRandomBytes(LENGTH_NFT_ID + 1).toString('hex'), + }, + }); + + await expect(endpoint.hasNFT(minLengthContext)).rejects.toThrow( + `'.id' must NOT have fewer than 32 characters`, + ); + + await expect(endpoint.hasNFT(maxLengthContext)).rejects.toThrow( + `'.id' must NOT have more than 32 characters`, + ); + }); + + it('should return false if provided NFT does not exist', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + id: utils.getRandomBytes(LENGTH_NFT_ID).toString('hex'), + }, + }); + + await expect(endpoint.hasNFT(context)).resolves.toEqual({ hasNFT: false }); + + validator.validate(hasNFTResponseSchema, { hasNFT: false }); + }); + + it('should return false if provided NFT is not owned by the provided address', async () => { + await nftStore.save(methodContext, nfts[0].id, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + id: nfts[0].id.toString('hex'), + }, + }); + + await expect(endpoint.hasNFT(context)).resolves.toEqual({ hasNFT: false }); + + validator.validate(hasNFTResponseSchema, { hasNFT: false }); + }); + + it('should return true if provided is owned by the provided address', async () => { + await nftStore.save(methodContext, nfts[0].id, { + owner, + attributesArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + address: ownerAddress, + id: nfts[0].id.toString('hex'), + }, + }); + + await expect(endpoint.hasNFT(context)).resolves.toEqual({ hasNFT: true }); + + validator.validate(hasNFTResponseSchema, { hasNFT: true }); + }); + }); + + describe('getNFT', () => { + it('should fail if id does not have valid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + id: utils.getRandomBytes(LENGTH_NFT_ID - 1).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + id: utils.getRandomBytes(LENGTH_NFT_ID + 1).toString('hex'), + }, + }); + + await expect(endpoint.getNFT(minLengthContext)).rejects.toThrow( + `'.id' must NOT have fewer than 32 characters`, + ); + + await expect(endpoint.getNFT(maxLengthContext)).rejects.toThrow( + `'.id' must NOT have more than 32 characters`, + ); + }); + + it('should fail if NFT does not exist', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nfts[0].id.toString('hex'), + }, + }); + + await expect(endpoint.getNFT(context)).rejects.toThrow('NFT does not exist'); + }); + + it('should return NFT details', async () => { + const attributesArray = [ + { + module: 'pos', + attributes: utils.getRandomBytes(10), + }, + ]; + await nftStore.save(methodContext, nfts[0].id, { + owner, + attributesArray, + }); + + await userStore.set(methodContext, userStore.getKey(owner, nfts[0].id), { + lockingModule: NFT_NOT_LOCKED, + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nfts[0].id.toString('hex'), + }, + }); + + const expectedNFT: JSONObject = { + owner: owner.toString('hex'), + attributesArray: attributesArray.map(attribute => ({ + module: attribute.module, + attributes: attribute.attributes.toString('hex'), + })), + lockingModule: NFT_NOT_LOCKED, + }; + + await expect(endpoint.getNFT(context)).resolves.toEqual(expectedNFT); + + validator.validate(getNFTResponseSchema, expectedNFT); + }); + }); + + describe('getCollectionIDs', () => { + it('should fail if provided chainID has invalid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1).toString('hex'), + }, + }); + + await expect(endpoint.getCollectionIDs(minLengthContext)).rejects.toThrow( + `'.chainID' must NOT have fewer than 8 characters`, + ); + + await expect(endpoint.getCollectionIDs(maxLengthContext)).rejects.toThrow( + `'.chainID' must NOT have more than 8 characters`, + ); + }); + + it('should return empty list if provided chainID does not exist', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + }, + }); + + await expect(endpoint.getCollectionIDs(context)).resolves.toEqual({ collectionIDs: [] }); + }); + + it('should return supported collections of the provided chain', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + const supportedCollections = [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ]; + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: supportedCollections, + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: chainID.toString('hex'), + }, + }); + + const expectedSupportedCollection = { + collectionIDs: supportedCollections.map(collection => + collection.collectionID.toString('hex'), + ), + }; + + await expect(endpoint.getCollectionIDs(context)).resolves.toEqual( + expectedSupportedCollection, + ); + + validator.validate(getCollectionIDsResponseSchema, expectedSupportedCollection); + }); + }); + + describe('collectionExists', () => { + it('should fail if provided chainID has invalid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + await expect(endpoint.collectionExists(minLengthContext)).rejects.toThrow( + `'.chainID' must NOT have fewer than 8 characters`, + ); + + await expect(endpoint.collectionExists(maxLengthContext)).rejects.toThrow( + `'.chainID' must NOT have more than 8 characters`, + ); + }); + + it('should fail if provided collectionID has invalid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID - 1).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID + 1).toString('hex'), + }, + }); + + await expect(endpoint.collectionExists(minLengthContext)).rejects.toThrow( + `'.collectionID' must NOT have fewer than 8 characters`, + ); + + await expect(endpoint.collectionExists(maxLengthContext)).rejects.toThrow( + `'.collectionID' must NOT have more than 8 characters`, + ); + }); + + it('should return false if provided chainID does not exist', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + await expect(endpoint.collectionExists(context)).resolves.toEqual({ + collectionExists: false, + }); + + validator.validate(collectionExistsResponseSchema, { collectionExists: false }); + }); + + it('should return false if provided collectionID does not exist for the provided chainID', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ], + }); + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: chainID.toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + await expect(endpoint.collectionExists(context)).resolves.toEqual({ + collectionExists: false, + }); + }); + + it('should return true if provided collectionID exists for the provided chainID', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_COLLECTION_ID); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID, + }, + ], + }); + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: chainID.toString('hex'), + collectionID: collectionID.toString('hex'), + }, + }); + + await expect(endpoint.collectionExists(context)).resolves.toEqual({ collectionExists: true }); + + validator.validate(collectionExistsResponseSchema, { collectionExists: true }); + }); + }); + + describe('getEscrowedNFTIDs', () => { + it('should fail if provided chainID has invalid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + await expect(endpoint.getEscrowedNFTIDs(minLengthContext)).rejects.toThrow( + `'.chainID' must NOT have fewer than 8 characters`, + ); + + await expect(endpoint.getEscrowedNFTIDs(maxLengthContext)).rejects.toThrow( + `'.chainID' must NOT have more than 8 characters`, + ); + }); + + it('should return empty list if provided chain has no NFTs escrowed to it', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + }, + }); + + await expect(endpoint.getEscrowedNFTIDs(context)).resolves.toEqual({ escrowedNFTIDs: [] }); + + validator.validate(getEscrowedNFTIDsResponseSchema, { escrowedNFTIDs: [] }); + }); + + it('should return list of escrowed NFTs for the chainID', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const nftIDs = [Buffer.alloc(LENGTH_NFT_ID, 0), Buffer.alloc(LENGTH_NFT_ID, 255)]; + + for (const nftID of nftIDs) { + await nftStore.save(methodContext, nftID, { + owner: chainID, + attributesArray: [], + }); + } + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: chainID.toString('hex'), + }, + }); + + const expectedNFTIDs = { escrowedNFTIDs: nftIDs.map(nftID => nftID.toString('hex')) }; + + await expect(endpoint.getEscrowedNFTIDs(context)).resolves.toEqual(expectedNFTIDs); + + validator.validate(getEscrowedNFTIDsResponseSchema, expectedNFTIDs); + }); + }); + + describe('isNFTSupported', () => { + it('should fail if id does not have valid length', async () => { + const minLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + id: utils.getRandomBytes(LENGTH_NFT_ID - 1).toString('hex'), + }, + }); + + const maxLengthContext = createTransientModuleEndpointContext({ + stateStore, + params: { + id: utils.getRandomBytes(LENGTH_NFT_ID + 1).toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(minLengthContext)).rejects.toThrow( + `'.id' must NOT have fewer than 32 characters`, + ); + + await expect(endpoint.isNFTSupported(maxLengthContext)).rejects.toThrow( + `'.id' must NOT have more than 32 characters`, + ); + }); + + it('should return false if NFT does not exist', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: utils.getRandomBytes(LENGTH_NFT_ID).toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: false }); + + validator.validate(isNFTSupportedResponseSchema, { isNFTSupported: false }); + }); + + it('should return true if chainID of NFT is equal to ownChainID', async () => { + const nftID = Buffer.concat([ownChainID, Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nftID.toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: true }); + + validator.validate(isNFTSupportedResponseSchema, { isNFTSupported: true }); + }); + + it('should return true if all NFTs are supported', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nftID.toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: true }); + }); + + it('should return true if all collections of the chain are supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const nftID = Buffer.concat([chainID, Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID)]); + + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nftID.toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: true }); + }); + + it('should return true if collection of the chain is supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_COLLECTION_ID); + const nftID = Buffer.concat([ + chainID, + collectionID, + Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID), + ]); + + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID, + }, + ], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nftID.toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: true }); + }); + + it('should return false if collection of the chain is not supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const collectionID = utils.getRandomBytes(LENGTH_COLLECTION_ID); + const nftID = Buffer.concat([ + chainID, + collectionID, + Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID), + ]); + + await nftStore.save(methodContext, nftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nftID.toString('hex'), + }, + }); + + await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: false }); + }); + }); +}); From 993411e9975fac192186030d5d9b4a4885d73f0e Mon Sep 17 00:00:00 2001 From: has5aan Date: Mon, 19 Jun 2023 23:13:24 +0200 Subject: [PATCH 066/170] :seedling: NFTModule.initGenesisState --- framework/src/modules/nft/module.ts | 154 +++- framework/src/modules/nft/schemas.ts | 132 ++++ framework/src/modules/nft/types.ts | 26 + .../nft/init_genesis_state_fixtures.ts | 381 +++++++++ .../test/unit/modules/nft/module.spec.ts | 740 +++++++++++++++++- 5 files changed, 1428 insertions(+), 5 deletions(-) create mode 100644 framework/test/unit/modules/nft/init_genesis_state_fixtures.ts diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 9dce479a4ec..ca3fc41e111 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -12,6 +12,9 @@ * Removal or modification of this copyright notice is prohibited. */ +import { dataStructures } from '@liskhq/lisk-utils'; +import { codec } from '@liskhq/lisk-codec'; +import { validator } from '@liskhq/lisk-validator'; import { GenesisBlockExecuteContext } from '../../state_machine'; import { ModuleInitArgs, ModuleMetadata } from '../base_module'; import { BaseInteroperableModule } from '../interoperability'; @@ -50,15 +53,17 @@ import { hasNFTResponseSchema, isNFTSupportedRequestSchema, isNFTSupportedResponseSchema, + genesisNFTStoreSchema, } from './schemas'; import { EscrowStore } from './stores/escrow'; import { NFTStore } from './stores/nft'; import { SupportedNFTsStore } from './stores/supported_nfts'; import { UserStore } from './stores/user'; -import { FeeMethod, TokenMethod } from './types'; +import { FeeMethod, GenesisNFTStore, TokenMethod } from './types'; import { CrossChainTransferCommand as CrossChainTransferMessageCommand } from './cc_commands/cc_transfer'; import { TransferCrossChainCommand } from './commands/transfer_cross_chain'; import { TransferCommand } from './commands/transfer'; +import { ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_CHAIN_ID } from './constants'; export class NFTModule extends BaseInteroperableModule { public method = new NFTMethod(this.stores, this.events); @@ -174,6 +179,149 @@ export class NFTModule extends BaseInteroperableModule { // eslint-disable-next-line @typescript-eslint/no-empty-function public async init(_args: ModuleInitArgs) {} - // eslint-disable-next-line @typescript-eslint/no-empty-function - public async initGenesisState(_context: GenesisBlockExecuteContext): Promise {} + public async initGenesisState(context: GenesisBlockExecuteContext): Promise { + const assetBytes = context.assets.getAsset(this.name); + + if (!assetBytes) { + return; + } + + const genesisStore = codec.decode(genesisNFTStoreSchema, assetBytes); + validator.validate(genesisNFTStoreSchema, genesisStore); + + const nftIDKeySet = new dataStructures.BufferSet(); + + for (const nft of genesisStore.nftSubstore) { + if (![LENGTH_CHAIN_ID, LENGTH_ADDRESS].includes(nft.owner.length)) { + throw new Error(`nftID ${nft.nftID.toString('hex')} has invalid owner`); + } + if (nftIDKeySet.has(nft.nftID)) { + throw new Error(`nftID ${nft.nftID.toString('hex')} duplicated`); + } + + nftIDKeySet.add(nft.nftID); + } + + for (const nft of genesisStore.nftSubstore) { + const ownerUsers = genesisStore.userSubstore.filter(userEntry => + userEntry.nftID.equals(nft.nftID), + ); + const ownerChains = genesisStore.escrowSubstore.filter(escrowEntry => + escrowEntry.nftID.equals(nft.nftID), + ); + + if (ownerUsers.length === 0 && ownerChains.length === 0) { + throw new Error( + `nftID ${nft.nftID.toString( + 'hex', + )} has no corresponding entry for UserSubstore or EscrowSubstore`, + ); + } + + if (ownerUsers.length > 0 && ownerChains.length > 0) { + throw new Error( + `nftID ${nft.nftID.toString( + 'hex', + )} has an entry for both UserSubstore and EscrowSubstore`, + ); + } + + if (ownerUsers.length > 1) { + throw new Error(`nftID ${nft.nftID.toString('hex')} has multiple entries for UserSubstore`); + } + + if (ownerChains.length > 1) { + throw new Error( + `nftID ${nft.nftID.toString('hex')} has multiple entries for EscrowSubstore`, + ); + } + + if (nft.owner.length === LENGTH_CHAIN_ID && ownerChains.length !== 1) { + throw new Error( + `nftID ${nft.nftID.toString( + 'hex', + )} should have a corresponding entry for EscrowSubstore only`, + ); + } + + const attributeSet: Record = {}; + + for (const attribute of nft.attributesArray) { + attributeSet[attribute.module] = (attributeSet[attribute.module] ?? 0) + 1; + + if (attributeSet[attribute.module] > 1) { + throw new Error( + `nftID ${nft.nftID.toString('hex')} has a duplicate attribute for ${ + attribute.module + } module`, + ); + } + } + } + + if (genesisStore.supportedNFTsSubstore.length === 0) { + return; + } + + const allNFTsSupported = genesisStore.supportedNFTsSubstore.some(supportedNFTs => + supportedNFTs.chainID.equals(ALL_SUPPORTED_NFTS_KEY), + ); + + if (genesisStore.supportedNFTsSubstore.length > 1 && allNFTsSupported) { + throw new Error( + 'SupportedNFTsSubstore should contain only one entry if all NFTs are supported', + ); + } + + if ( + allNFTsSupported && + genesisStore.supportedNFTsSubstore[0].supportedCollectionIDArray.length !== 0 + ) { + throw new Error('supportedCollectionIDArray must be empty if all NFTs are supported'); + } + + const supportedChainsKeySet = new dataStructures.BufferSet(); + for (const supportedNFT of genesisStore.supportedNFTsSubstore) { + if (supportedChainsKeySet.has(supportedNFT.chainID)) { + throw new Error(`chainID ${supportedNFT.chainID.toString('hex')} duplicated`); + } + + supportedChainsKeySet.add(supportedNFT.chainID); + } + + const nftStore = this.stores.get(NFTStore); + for (const nft of genesisStore.nftSubstore) { + const { nftID, owner, attributesArray } = nft; + + await nftStore.save(context, nftID, { + owner, + attributesArray, + }); + } + + const userStore = this.stores.get(UserStore); + for (const user of genesisStore.userSubstore) { + const { address, nftID, lockingModule } = user; + + await userStore.set(context, userStore.getKey(address, nftID), { + lockingModule, + }); + } + + const escrowStore = this.stores.get(EscrowStore); + for (const escrow of genesisStore.escrowSubstore) { + const { escrowedChainID, nftID } = escrow; + + await escrowStore.set(context, escrowStore.getKey(escrowedChainID, nftID), {}); + } + + for (const supportedNFT of genesisStore.supportedNFTsSubstore) { + const { chainID, supportedCollectionIDArray } = supportedNFT; + const supportedNFTsSubstore = this.stores.get(SupportedNFTsStore); + + await supportedNFTsSubstore.save(context, chainID, { + supportedCollectionIDArray, + }); + } + } } diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index da8f393bc6c..f35b837f389 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -20,6 +20,7 @@ import { MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, MAX_LENGTH_DATA, + LENGTH_ADDRESS, } from './constants'; export const transferParamsSchema = { @@ -387,3 +388,134 @@ export const isNFTSupportedResponseSchema = { }, }, }; + +export const genesisNFTStoreSchema = { + $id: '/nft/module/genesis', + type: 'object', + required: ['nftSubstore', 'userSubstore', 'escrowSubstore', 'supportedNFTsSubstore'], + properties: { + nftSubstore: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + required: ['nftID', 'owner', 'attributesArray'], + properties: { + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 1, + }, + owner: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_ADDRESS, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 3, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, + }, + }, + userSubstore: { + type: 'array', + fieldNumber: 2, + items: { + type: 'object', + required: ['address', 'nftID', 'lockingModule'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + lockingModule: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 3, + }, + }, + }, + }, + escrowSubstore: { + type: 'array', + fieldNumber: 3, + items: { + type: 'object', + required: ['escrowedChainID', 'nftID'], + properties: { + escrowedChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + }, + }, + }, + supportedNFTsSubstore: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['chainID', 'supportedCollectionIDArray'], + properties: { + chainID: { + dataType: 'bytes', + fieldNumber: 1, + }, + supportedCollectionIDArray: { + type: 'array', + fieldNumber: 2, + items: { + type: 'object', + required: ['collectionID'], + properties: { + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 1, + }, + }, + }, + }, + }, + }, + }, + }, +}; diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index bfa88ef2189..1438bc4daef 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -64,3 +64,29 @@ export interface NFT { attributesArray: NFTAttributes[]; lockingModule: string; } + +export interface GenesisNFTStore { + nftSubstore: { + nftID: Buffer; + owner: Buffer; + attributesArray: { + module: string; + attributes: Buffer; + }[]; + }[]; + userSubstore: { + address: Buffer; + nftID: Buffer; + lockingModule: string; + }[]; + escrowSubstore: { + escrowedChainID: Buffer; + nftID: Buffer; + }[]; + supportedNFTsSubstore: { + chainID: Buffer; + supportedCollectionIDArray: { + collectionID: Buffer; + }[]; + }[]; +} diff --git a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts new file mode 100644 index 00000000000..eadffcd2bdf --- /dev/null +++ b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts @@ -0,0 +1,381 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { GenesisNFTStore } from '../../../../src/modules/nft/types'; +import { + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, +} from '../../../../src/modules/nft/constants'; + +const nftID1 = utils.getRandomBytes(LENGTH_NFT_ID); +const nftID2 = utils.getRandomBytes(LENGTH_NFT_ID); +const owner = utils.getRandomBytes(LENGTH_ADDRESS); +const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + +export const validData: GenesisNFTStore = { + nftSubstore: [ + { + nftID: nftID1, + owner, + attributesArray: [ + { + module: 'pos', + attributes: utils.getRandomBytes(10), + }, + { + module: 'token', + attributes: utils.getRandomBytes(10), + }, + ], + }, + { + nftID: nftID2, + owner, + attributesArray: [ + { + module: 'pos', + attributes: utils.getRandomBytes(10), + }, + { + module: 'token', + attributes: utils.getRandomBytes(10), + }, + ], + }, + ], + userSubstore: [ + { + address: owner, + nftID: nftID1, + lockingModule: 'pos', + }, + { + address: owner, + nftID: nftID2, + lockingModule: 'token', + }, + ], + escrowSubstore: [ + { + escrowedChainID, + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + }, + ], + supportedNFTsSubstore: [ + { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + supportedCollectionIDArray: [], + }, + ], +}; + +export const validGenesisAssets = [['Valid genesis asset', validData]]; + +export const invalidSchemaNFTSubstoreGenesisAssets = [ + [ + 'Invalid nftID - minimum length not satisfied', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + }, + `nftID' minLength not satisfied`, + ], + [ + 'Invalid nftID - maximum length exceeded', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + }, + `nftID' maxLength exceeded`, + ], + [ + 'Invalid owner - minimum length not satisfied', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + owner: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), + attributesArray: [], + }, + ], + }, + `owner' minLength not satisfied`, + ], + [ + 'Invalid owner - maximum length exceeded', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + owner: utils.getRandomBytes(LENGTH_ADDRESS + 1), + attributesArray: [], + }, + ], + }, + `owner' maxLength exceeded`, + ], + [ + 'Invalid attributesArray.module - minimum length not satisfied', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: '', + attributes: utils.getRandomBytes(10), + }, + ], + }, + ], + }, + `module' must NOT have fewer than 1 characters`, + ], + [ + 'Invalid attributesArray.module - maximum length exceeded', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: '1'.repeat(33), + attributes: utils.getRandomBytes(10), + }, + ], + }, + ], + }, + `module' must NOT have more than 32 characters`, + ], + [ + 'Invalid attributesArray.module - must match pattern "^[a-zA-Z0-9]*$"', + { + ...validData, + nftSubstore: [ + { + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: '#$a1!', + attributes: utils.getRandomBytes(10), + }, + ], + }, + ], + }, + 'must match pattern "^[a-zA-Z0-9]*$"', + ], +]; + +export const invalidSchemaUserSubstoreGenesisAssests = [ + [ + 'Invalid owner address', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS - 1), + nftID: nftID1, + lockingModule: 'pos', + }, + ], + }, + `address' address length invalid`, + ], + [ + 'Invalid nftID - minimum length not satisified', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), + lockingModule: 'pos', + }, + ], + }, + `nftID' minLength not satisfied`, + ], + [ + 'Invalid nftID - maximum length exceeded', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), + lockingModule: 'pos', + }, + ], + }, + `nftID' maxLength exceeded`, + ], + [ + 'Invalid lockingModule - minimum length not satisfied', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + lockingModule: '', + }, + ], + }, + `lockingModule' must NOT have fewer than 1 characters`, + ], + [ + 'Invalid lockingModule - maximum length exceeded', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + lockingModule: 'pos'.repeat(33), + }, + ], + }, + `lockingModule' must NOT have more than 32 characters`, + ], + [ + 'lockingModule must match pattern - "^[a-zA-Z0-9]*$"', + { + ...validData, + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: utils.getRandomBytes(LENGTH_NFT_ID), + lockingModule: '$#pos"', + }, + ], + }, + `must match pattern "^[a-zA-Z0-9]*$"`, + ], +]; + +export const invalidSchemaEscrowSubstoreGenesisAssets = [ + [ + 'Invalid escrowedChainID - minimum length not satisfied', + { + ...validData, + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), + nftID: nftID1, + }, + ], + }, + `escrowedChainID' minLength not satisfied`, + ], + [ + 'Invalid escrowedChainID - maximum length exceeded', + { + ...validData, + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1), + nftID: nftID1, + }, + ], + }, + `escrowedChainID' maxLength exceeded`, + ], + [ + 'Invalid nftID - minimum length not satisfied', + { + ...validData, + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), + }, + ], + }, + `nftID' minLength not satisfied`, + ], + [ + 'Invalid nftID - maximum length exceeded', + { + ...validData, + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), + }, + ], + }, + `nftID' maxLength exceeded`, + ], +]; + +export const invalidSchemaSupportedNFTsSubstoreGenesisAssets = [ + [ + 'Invalid collectionID - minimum length not satisfied', + { + ...validData, + supportedNFTsSubstore: [ + { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID - 1), + }, + ], + }, + ], + }, + `collectionID' minLength not satisfied`, + ], + [ + 'Invalid collectionID - maximum length exceeded', + { + ...validData, + supportedNFTsSubstore: [ + { + chainID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID + 1), + }, + ], + }, + ], + }, + `collectionID' maxLength exceeded`, + ], +]; diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index ccbc53346ba..51827723523 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -11,8 +11,744 @@ * * Removal or modification of this copyright notice is prohibited. */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; +import { BlockAssets } from '@liskhq/lisk-chain'; +import { NFTModule } from '../../../../src/modules/nft/module'; +import { createGenesisBlockContext } from '../../../../src/testing'; +import { + invalidSchemaEscrowSubstoreGenesisAssets, + invalidSchemaNFTSubstoreGenesisAssets, + invalidSchemaSupportedNFTsSubstoreGenesisAssets, + invalidSchemaUserSubstoreGenesisAssests, + validData, +} from './init_genesis_state_fixtures'; +import { genesisNFTStoreSchema } from '../../../../src/modules/nft/schemas'; +import { + ALL_SUPPORTED_NFTS_KEY, + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, +} from '../../../../src/modules/nft/constants'; +import { NFTStore } from '../../../../src/modules/nft/stores/nft'; +import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; +import { UserStore } from '../../../../src/modules/nft/stores/user'; +import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; + describe('nft module', () => { - it('should be implemented', () => { - expect(true).toBeTrue(); + const module = new NFTModule(); + + const nftStore = module.stores.get(NFTStore); + const userStore = module.stores.get(UserStore); + const supportedNFTsSubstore = module.stores.get(SupportedNFTsStore); + const escrowStore = module.stores.get(EscrowStore); + + const createGenesisBlockContextFromGenesisAssets = (genesisAssets: object) => { + const encodedAsset = codec.encode(genesisNFTStoreSchema, genesisAssets); + + const context = createGenesisBlockContext({ + assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), + }).createInitGenesisStateContext(); + + return context; + }; + + describe('iniGenesisState', () => { + describe('validate nftSubstore schema', () => { + it.each(invalidSchemaNFTSubstoreGenesisAssets)('%s', async (_, input, err) => { + if (typeof input === 'string') { + return; + } + + const encodedAsset = codec.encode(genesisNFTStoreSchema, input); + + const context = createGenesisBlockContext({ + assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), + }).createInitGenesisStateContext(); + + await expect(module.initGenesisState(context)).rejects.toThrow(err as string); + }); + }); + + describe('validate userSubstore schema', () => { + it.each(invalidSchemaUserSubstoreGenesisAssests)('%s', async (_, input, err) => { + if (typeof input === 'string') { + return; + } + + const encodedAsset = codec.encode(genesisNFTStoreSchema, input); + + const context = createGenesisBlockContext({ + assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), + }).createInitGenesisStateContext(); + + await expect(module.initGenesisState(context)).rejects.toThrow(err as string); + }); + }); + + describe('validate escrowSubstore schema', () => { + it.each(invalidSchemaEscrowSubstoreGenesisAssets)('%s', async (_, input, err) => { + if (typeof input === 'string') { + return; + } + + const encodedAsset = codec.encode(genesisNFTStoreSchema, input); + + const context = createGenesisBlockContext({ + assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), + }).createInitGenesisStateContext(); + + await expect(module.initGenesisState(context)).rejects.toThrow(err as string); + }); + }); + + describe('validate supportedNFTsSubstore schema', () => { + it.each(invalidSchemaSupportedNFTsSubstoreGenesisAssets)('%s', async (_, input, err) => { + if (typeof input === 'string') { + return; + } + + const encodedAsset = codec.encode(genesisNFTStoreSchema, input); + + const context = createGenesisBlockContext({ + assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), + }).createInitGenesisStateContext(); + + await expect(module.initGenesisState(context)).rejects.toThrow(err as string); + }); + }); + + it('should throw if owner of the NFT is not a valid address', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS - 1), + attributesArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has invalid owner`, + ); + }); + + it('should throw if owner of the NFT is not a valid chain', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_CHAIN_ID + 1), + attributesArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has invalid owner`, + ); + }); + + it('should throw if nftID is duplicated', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} duplicated`, + ); + }); + + it('should throw if NFT does not have a corresponding entry for user or escrow store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString( + 'hex', + )} has no corresponding entry for UserSubstore or EscrowSubstore`, + ); + }); + + it('should throw if NFT has an entry for both user and escrow store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'pos', + }, + ], + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID, + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has an entry for both UserSubstore and EscrowSubstore`, + ); + }); + + it('should throw if NFT has multiple entries for user store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'pos', + }, + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'token', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has multiple entries for UserSubstore`, + ); + }); + + it('should throw if NFT has multiple entries for escrow store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + escrowSubstore: [ + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID, + }, + { + escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + nftID, + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has multiple entries for EscrowSubstore`, + ); + }); + + it('should throw if escrowed NFT has no corresponding entry for escrow store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'pos', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} should have a corresponding entry for EscrowSubstore only`, + ); + }); + + it('should throw if NFT has duplicate attribute for an array', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const moduleName = 'pos'; + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: moduleName, + attributes: Buffer.alloc(10), + }, + { + module: moduleName, + attributes: Buffer.alloc(0), + }, + ], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'pos', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${nftID.toString('hex')} has a duplicate attribute for pos module`, + ); + }); + + it('should throw if all NFTs are supported and SupportedNFTsSubstore contains more than one entry', async () => { + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID: Buffer.alloc(0), + supportedCollectionIDArray: [], + }, + { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + supportedCollectionIDArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + 'SupportedNFTsSubstore should contain only one entry if all NFTs are supported', + ); + }); + + it('should throw if all NFTs are supported and supportedCollectionIDArray is not empty', async () => { + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID: ALL_SUPPORTED_NFTS_KEY, + supportedCollectionIDArray: [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + 'supportedCollectionIDArray must be empty if all NFTs are supported', + ); + }); + + it('should throw if supported chain is duplicated', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID, + supportedCollectionIDArray: [], + }, + { + chainID, + supportedCollectionIDArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `chainID ${chainID.toString('hex')} duplicated`, + ); + }); + + it('should create entries for all NFTs lexicographically', async () => { + const nftID1 = Buffer.alloc(LENGTH_NFT_ID, 1); + const nftID2 = Buffer.alloc(LENGTH_NFT_ID, 0); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID: nftID1, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + { + nftID: nftID2, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: nftID1, + lockingModule: 'pos', + }, + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: nftID2, + lockingModule: 'pos', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const allNFTs = await nftStore.iterate(context.getMethodContext(), { + gte: Buffer.alloc(LENGTH_NFT_ID, 0), + lte: Buffer.alloc(LENGTH_NFT_ID, 255), + }); + + const expectedKeys = [nftID2, nftID1]; + + expect(expectedKeys).toEqual(allNFTs.map(nft => nft.key)); + }); + + it('should create entry for an NFT with attributesArray sorted lexicographically on module', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: 'token', + attributes: utils.getRandomBytes(10), + }, + { + module: 'pos', + attributes: utils.getRandomBytes(10), + }, + ], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'pos', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const nft = await nftStore.get(context.getMethodContext(), nftID); + + expect(nft.attributesArray.map(attribute => attribute.module)).toEqual(['pos', 'token']); + }); + + it('should remove entries in attributes array with empty attributes', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [ + { + module: 'token', + attributes: Buffer.alloc(0), + }, + ], + }, + ], + userSubstore: [ + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID, + lockingModule: 'token', + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const nft = await nftStore.get(context.getMethodContext(), nftID); + + expect(nft.attributesArray).toHaveLength(0); + }); + + it('should create an entry for ALL_SUPPORTED_NFTS_KEY with empty supportedCollectionIDArray if all NFTs are supported', async () => { + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID: ALL_SUPPORTED_NFTS_KEY, + supportedCollectionIDArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const supportedNFTs = await supportedNFTsSubstore.get( + context.getMethodContext(), + ALL_SUPPORTED_NFTS_KEY, + ); + + expect(supportedNFTs.supportedCollectionIDArray).toHaveLength(0); + }); + + it('should create entries for supported chains lexicographically', async () => { + const chainID1 = Buffer.alloc(LENGTH_CHAIN_ID, 1); + const chainID2 = Buffer.alloc(LENGTH_CHAIN_ID, 0); + + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID: chainID1, + supportedCollectionIDArray: [], + }, + { + chainID: chainID2, + supportedCollectionIDArray: [], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const allSupportedNFTs = await supportedNFTsSubstore.getAll(context.getMethodContext()); + + const expectedKeys = [chainID2, chainID1]; + + expect(expectedKeys).toEqual(allSupportedNFTs.map(supportedNFTs => supportedNFTs.key)); + }); + + it('should create entries for user and escrow store', async () => { + const nftID1 = utils.getRandomBytes(LENGTH_NFT_ID); + const nftID2 = utils.getRandomBytes(LENGTH_NFT_ID); + const nftID3 = utils.getRandomBytes(LENGTH_NFT_ID); + + const escrowedNFTID1 = utils.getRandomBytes(LENGTH_NFT_ID); + const escrowedNFTID2 = utils.getRandomBytes(LENGTH_NFT_ID); + + const owner1 = utils.getRandomBytes(LENGTH_ADDRESS); + const owner2 = utils.getRandomBytes(LENGTH_ADDRESS); + + const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID: nftID1, + owner: owner1, + attributesArray: [], + }, + { + nftID: nftID2, + owner: owner1, + attributesArray: [], + }, + { + nftID: nftID3, + owner: owner2, + attributesArray: [], + }, + { + nftID: escrowedNFTID1, + owner: escrowedChainID, + attributesArray: [], + }, + ], + userSubstore: [ + { + address: owner1, + nftID: nftID1, + lockingModule: 'pos', + }, + { + address: owner1, + nftID: nftID2, + lockingModule: 'token', + }, + { + address: owner2, + nftID: nftID3, + lockingModule: 'auth', + }, + ], + escrowSubstore: [ + { + escrowedChainID, + nftID: escrowedNFTID1, + }, + { + escrowedChainID, + nftID: escrowedNFTID2, + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + await expect( + userStore.get(context.getMethodContext(), userStore.getKey(owner1, nftID1)), + ).resolves.toEqual({ lockingModule: 'pos' }); + + await expect( + userStore.get(context.getMethodContext(), userStore.getKey(owner1, nftID2)), + ).resolves.toEqual({ lockingModule: 'token' }); + + await expect( + userStore.get(context.getMethodContext(), userStore.getKey(owner2, nftID3)), + ).resolves.toEqual({ lockingModule: 'auth' }); + + await expect( + escrowStore.get( + context.getMethodContext(), + escrowStore.getKey(escrowedChainID, escrowedNFTID1), + ), + ).resolves.toEqual({}); + + await expect( + escrowStore.get( + context.getMethodContext(), + escrowStore.getKey(escrowedChainID, escrowedNFTID2), + ), + ).resolves.toEqual({}); + }); + + it('should create an entry for supported chains with supportedCollectionIDArray sorted lexicographically', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + const collectionID1 = Buffer.alloc(LENGTH_COLLECTION_ID, 1); + const collectionID2 = Buffer.alloc(LENGTH_COLLECTION_ID, 0); + + const genesisAssets = { + ...validData, + supportedNFTsSubstore: [ + { + chainID, + supportedCollectionIDArray: [ + { + collectionID: collectionID1, + }, + { + collectionID: collectionID2, + }, + ], + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + const supportedNFT = await supportedNFTsSubstore.get(context.getMethodContext(), chainID); + + expect(supportedNFT.supportedCollectionIDArray).toEqual([ + { + collectionID: collectionID2, + }, + { + collectionID: collectionID1, + }, + ]); + }); }); }); From fbb7c2bab474b14aa9f3feb99961053a24612439 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Tue, 20 Jun 2023 03:59:49 +0200 Subject: [PATCH 067/170] Improve `bitwiseXOR()` and `getRandomSeed()` functions (#8572) * Improve performance of bitwiseXOR() * Improve `getRandomSeed()` function and `bitwiseXOR()` tests * Add check if argument contains no bytes to XOR * Update bitwiseXOR error message * Resolve lint formatting issue --- framework/src/modules/random/utils.ts | 36 +++++++++---------- .../test/unit/modules/random/method.spec.ts | 12 ++----- .../test/unit/modules/random/utils.spec.ts | 6 ++++ 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/framework/src/modules/random/utils.ts b/framework/src/modules/random/utils.ts index 207eae595c8..764ed7e4144 100644 --- a/framework/src/modules/random/utils.ts +++ b/framework/src/modules/random/utils.ts @@ -69,16 +69,16 @@ export const getRandomSeed = ( if (height < 0 || numberOfSeeds < 0) { throw new Error('Height or number of seeds cannot be negative.'); } - const initRandomBuffer = utils.intToBuffer(height + numberOfSeeds, 4); - let randomSeed = utils.hash(initRandomBuffer).slice(0, SEED_LENGTH); + const initRandomBuffer = utils.intToBuffer(height + numberOfSeeds, 4); + const currentSeeds = [utils.hash(initRandomBuffer).slice(0, 16)]; let isInFuture = true; - const currentSeeds = []; + for (const validatorReveal of validatorsReveal) { if (validatorReveal.height >= height) { isInFuture = false; - if (validatorReveal.height < height + numberOfSeeds) { - currentSeeds.push(validatorReveal); + if (validatorReveal.height < height + numberOfSeeds && validatorReveal.valid) { + currentSeeds.push(validatorReveal.seedReveal); } } } @@ -87,28 +87,28 @@ export const getRandomSeed = ( throw new Error('Height is in the future.'); } - for (const seedObject of currentSeeds) { - if (seedObject.valid) { - randomSeed = bitwiseXOR([randomSeed, seedObject.seedReveal]); - } - } - - return randomSeed; + return bitwiseXOR(currentSeeds); }; export const bitwiseXOR = (bufferArray: Buffer[]): Buffer => { + if (bufferArray.length === 0) { + throw new Error('bitwiseXOR requires at least one buffer for the input.'); + } + if (bufferArray.length === 1) { return bufferArray[0]; } - const bufferSizes = new Set(bufferArray.map(buffer => buffer.length)); - if (bufferSizes.size > 1) { - throw new Error('All input for XOR should be same size'); + const size = bufferArray[0].length; + for (let i = 1; i < bufferArray.length; i += 1) { + if (bufferArray[i].length !== size) { + throw new Error('All input for XOR should be same size'); + } } - const outputSize = [...bufferSizes][0]; - const result = Buffer.alloc(outputSize, 0); - for (let i = 0; i < outputSize; i += 1) { + const result = Buffer.alloc(size); + + for (let i = 0; i < size; i += 1) { // eslint-disable-next-line no-bitwise result[i] = bufferArray.map(b => b[i]).reduce((a, b) => a ^ b, 0); } diff --git a/framework/test/unit/modules/random/method.spec.ts b/framework/test/unit/modules/random/method.spec.ts index 8987d9ad666..af96ebd349d 100644 --- a/framework/test/unit/modules/random/method.spec.ts +++ b/framework/test/unit/modules/random/method.spec.ts @@ -320,10 +320,7 @@ describe('RandomModuleMethod', () => { Buffer.from(genesisValidators.validators[0].hashOnion.hashes[2], 'hex'), ]; // Do XOR of randomSeed with hashes of seed reveal with height >= randomStoreValidator.height >= height + numberOfSeeds - const xorExpected = bitwiseXOR([ - bitwiseXOR([randomSeed, hashesExpected[0]]), - hashesExpected[1], - ]); + const xorExpected = bitwiseXOR([randomSeed, ...hashesExpected]); expect(xorExpected).toHaveLength(16); await expect(randomMethod.getRandomBytes(context, height, numberOfSeeds)).resolves.toEqual( @@ -343,10 +340,7 @@ describe('RandomModuleMethod', () => { Buffer.from(genesisValidators.validators[1].hashOnion.hashes[1], 'hex'), ]; // Do XOR of randomSeed with hashes of seed reveal with height >= randomStoreValidator.height >= height + numberOfSeeds - const xorExpected = bitwiseXOR([ - bitwiseXOR([bitwiseXOR([randomSeed, hashesExpected[0]]), hashesExpected[1]]), - hashesExpected[2], - ]); + const xorExpected = bitwiseXOR([randomSeed, ...hashesExpected]); await expect(randomMethod.getRandomBytes(context, height, numberOfSeeds)).resolves.toEqual( xorExpected, @@ -385,7 +379,7 @@ describe('RandomModuleMethod', () => { Buffer.from(genesisValidators.validators[0].hashOnion.hashes[1], 'hex'), ]; // Do XOR of randomSeed with hashes of seed reveal with height >= randomStoreValidator.height >= height + numberOfSeeds - const xorExpected = bitwiseXOR([randomSeed, hashesExpected[0]]); + const xorExpected = bitwiseXOR([randomSeed, ...hashesExpected]); await expect(randomMethod.getRandomBytes(context, height, numberOfSeeds)).resolves.toEqual( xorExpected, diff --git a/framework/test/unit/modules/random/utils.spec.ts b/framework/test/unit/modules/random/utils.spec.ts index 75a638c128f..fa34bee8abe 100644 --- a/framework/test/unit/modules/random/utils.spec.ts +++ b/framework/test/unit/modules/random/utils.spec.ts @@ -17,6 +17,12 @@ import { bitwiseXORFixtures } from './bitwise_xor_fixtures'; describe('Random module utils', () => { describe('bitwiseXOR', () => { + it('should throw if an empty array is provided as an argument', () => { + expect(() => bitwiseXOR([])).toThrow( + 'bitwiseXOR requires at least one buffer for the input.', + ); + }); + it('should return the first element if there are no other elements', () => { const buffer = Buffer.from([0, 1, 1, 1]); const input = [buffer]; From 13e3c271bcc048fb2dfec663f5c4f47b29360c79 Mon Sep 17 00:00:00 2001 From: has5aan Date: Tue, 20 Jun 2023 13:08:31 +0200 Subject: [PATCH 068/170] :recycle: Removes redundant verification for NFTs owner --- framework/src/modules/nft/schemas.ts | 3 -- .../nft/init_genesis_state_fixtures.ts | 28 ------------------- 2 files changed, 31 deletions(-) diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index f35b837f389..dbe020974a6 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -20,7 +20,6 @@ import { MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, MAX_LENGTH_DATA, - LENGTH_ADDRESS, } from './constants'; export const transferParamsSchema = { @@ -409,8 +408,6 @@ export const genesisNFTStoreSchema = { }, owner: { dataType: 'bytes', - minLength: LENGTH_CHAIN_ID, - maxLength: LENGTH_ADDRESS, fieldNumber: 2, }, attributesArray: { diff --git a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts index eadffcd2bdf..1ae16bd1efc 100644 --- a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts +++ b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts @@ -114,34 +114,6 @@ export const invalidSchemaNFTSubstoreGenesisAssets = [ }, `nftID' maxLength exceeded`, ], - [ - 'Invalid owner - minimum length not satisfied', - { - ...validData, - nftSubstore: [ - { - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - owner: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), - attributesArray: [], - }, - ], - }, - `owner' minLength not satisfied`, - ], - [ - 'Invalid owner - maximum length exceeded', - { - ...validData, - nftSubstore: [ - { - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - owner: utils.getRandomBytes(LENGTH_ADDRESS + 1), - attributesArray: [], - }, - ], - }, - `owner' maxLength exceeded`, - ], [ 'Invalid attributesArray.module - minimum length not satisfied', { From ef4b00d02c03006caf0a4432eee8cd327c0d8d56 Mon Sep 17 00:00:00 2001 From: has5aan Date: Tue, 20 Jun 2023 23:19:42 +0200 Subject: [PATCH 069/170] :bug: :white_check_mark: Fixes setup code --- .../test/unit/modules/nft/init_genesis_state_fixtures.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts index 1ae16bd1efc..b8722c12a83 100644 --- a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts +++ b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts @@ -23,6 +23,7 @@ import { const nftID1 = utils.getRandomBytes(LENGTH_NFT_ID); const nftID2 = utils.getRandomBytes(LENGTH_NFT_ID); +const nftID3 = utils.getRandomBytes(LENGTH_NFT_ID); const owner = utils.getRandomBytes(LENGTH_ADDRESS); const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); @@ -56,6 +57,11 @@ export const validData: GenesisNFTStore = { }, ], }, + { + nftID: nftID3, + owner: escrowedChainID, + attributesArray: [], + }, ], userSubstore: [ { @@ -72,7 +78,7 @@ export const validData: GenesisNFTStore = { escrowSubstore: [ { escrowedChainID, - nftID: utils.getRandomBytes(LENGTH_NFT_ID), + nftID: nftID3, }, ], supportedNFTsSubstore: [ From 978fde0cee07cac3fb756630700d0044633737e6 Mon Sep 17 00:00:00 2001 From: has5aan Date: Tue, 20 Jun 2023 23:25:04 +0200 Subject: [PATCH 070/170] :pencil2: --- .../test/unit/modules/nft/init_genesis_state_fixtures.ts | 2 +- framework/test/unit/modules/nft/module.spec.ts | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts index b8722c12a83..6684b639c65 100644 --- a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts +++ b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts @@ -179,7 +179,7 @@ export const invalidSchemaNFTSubstoreGenesisAssets = [ ], ]; -export const invalidSchemaUserSubstoreGenesisAssests = [ +export const invalidSchemaUserSubstoreGenesisAssets = [ [ 'Invalid owner address', { diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index 51827723523..0ed79df983b 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -21,7 +21,7 @@ import { invalidSchemaEscrowSubstoreGenesisAssets, invalidSchemaNFTSubstoreGenesisAssets, invalidSchemaSupportedNFTsSubstoreGenesisAssets, - invalidSchemaUserSubstoreGenesisAssests, + invalidSchemaUserSubstoreGenesisAssets, validData, } from './init_genesis_state_fixtures'; import { genesisNFTStoreSchema } from '../../../../src/modules/nft/schemas'; @@ -55,7 +55,7 @@ describe('nft module', () => { return context; }; - describe('iniGenesisState', () => { + describe('initGenesisState', () => { describe('validate nftSubstore schema', () => { it.each(invalidSchemaNFTSubstoreGenesisAssets)('%s', async (_, input, err) => { if (typeof input === 'string') { @@ -73,7 +73,7 @@ describe('nft module', () => { }); describe('validate userSubstore schema', () => { - it.each(invalidSchemaUserSubstoreGenesisAssests)('%s', async (_, input, err) => { + it.each(invalidSchemaUserSubstoreGenesisAssets)('%s', async (_, input, err) => { if (typeof input === 'string') { return; } From ff248939b4b127230cc70a756e19d9f0eb56af35 Mon Sep 17 00:00:00 2001 From: has5aan Date: Wed, 21 Jun 2023 00:30:33 +0200 Subject: [PATCH 071/170] :bug: Removes check to return if supportedNFTsSubstore is empty --- framework/src/modules/nft/module.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index ca3fc41e111..b6288628f85 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -259,10 +259,6 @@ export class NFTModule extends BaseInteroperableModule { } } - if (genesisStore.supportedNFTsSubstore.length === 0) { - return; - } - const allNFTsSupported = genesisStore.supportedNFTsSubstore.some(supportedNFTs => supportedNFTs.chainID.equals(ALL_SUPPORTED_NFTS_KEY), ); From 87befde50b78dc4d56c3f15c8d3de350f2e14a67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Wed, 21 Jun 2023 13:31:10 +0200 Subject: [PATCH 072/170] Improve seed reveal check (#8625) --- framework/src/modules/random/endpoint.ts | 5 +- framework/src/modules/random/method.ts | 4 +- framework/src/modules/random/utils.ts | 32 ++-------- .../test/unit/modules/random/utils.spec.ts | 64 ++++++++++++++++++- 4 files changed, 74 insertions(+), 31 deletions(-) diff --git a/framework/src/modules/random/endpoint.ts b/framework/src/modules/random/endpoint.ts index 23a23b4fe2f..fa9f9c1d914 100644 --- a/framework/src/modules/random/endpoint.ts +++ b/framework/src/modules/random/endpoint.ts @@ -32,7 +32,7 @@ import { setHashOnionUsageRequest, } from './schemas'; import { ValidatorRevealsStore } from './stores/validator_reveals'; -import { getSeedRevealValidity } from './utils'; +import { isSeedValidInput } from './utils'; import { HashOnionStore } from './stores/hash_onion'; import { UsedHashOnionStoreObject, UsedHashOnionsStore } from './stores/used_hash_onions'; @@ -48,10 +48,11 @@ export class RandomEndpoint extends BaseEndpoint { const { validatorReveals } = await randomDataStore.get(ctx, EMPTY_KEY); return { - valid: getSeedRevealValidity( + valid: isSeedValidInput( cryptography.address.getAddressFromLisk32Address(generatorAddress), Buffer.from(seedReveal, 'hex'), validatorReveals, + false, ), }; } diff --git a/framework/src/modules/random/method.ts b/framework/src/modules/random/method.ts index 6b5cc7171d8..9e96f0d7945 100644 --- a/framework/src/modules/random/method.ts +++ b/framework/src/modules/random/method.ts @@ -20,7 +20,7 @@ import { EMPTY_KEY } from '../validators/constants'; import { blockHeaderAssetRandomModule } from './schemas'; import { ValidatorRevealsStore } from './stores/validator_reveals'; import { BlockHeaderAssetRandomModule } from './types'; -import { getSeedRevealValidity, getRandomSeed } from './utils'; +import { isSeedValidInput, getRandomSeed } from './utils'; export class RandomMethod extends BaseMethod { private readonly _moduleName: string; @@ -47,7 +47,7 @@ export class RandomMethod extends BaseMethod { asset, ); - return getSeedRevealValidity(generatorAddress, seedReveal, validatorReveals); + return isSeedValidInput(generatorAddress, seedReveal, validatorReveals, false); } public async getRandomBytes( diff --git a/framework/src/modules/random/utils.ts b/framework/src/modules/random/utils.ts index 764ed7e4144..279c5f790f5 100644 --- a/framework/src/modules/random/utils.ts +++ b/framework/src/modules/random/utils.ts @@ -19,45 +19,25 @@ import { ValidatorSeedReveal } from './stores/validator_reveals'; export const isSeedValidInput = ( generatorAddress: Buffer, seedReveal: Buffer, - validatorsReveal: ValidatorSeedReveal[], + validatorReveals: ValidatorSeedReveal[], + previousSeedRequired = true, ) => { let lastSeed: ValidatorSeedReveal | undefined; // by construction, validatorsReveal is order by height asc. Therefore, looping from end will give highest value. - for (let i = validatorsReveal.length - 1; i >= 0; i -= 1) { - const validatorReveal = validatorsReveal[i]; + for (let i = validatorReveals.length - 1; i >= 0; i -= 1) { + const validatorReveal = validatorReveals[i]; if (validatorReveal.generatorAddress.equals(generatorAddress)) { lastSeed = validatorReveal; break; } } - // if the last seed is does not exist, seed reveal is invalid for use + if (!lastSeed) { - return false; + return !previousSeedRequired; } return lastSeed.seedReveal.equals(utils.hash(seedReveal).slice(0, SEED_LENGTH)); }; -export const getSeedRevealValidity = ( - generatorAddress: Buffer, - seedReveal: Buffer, - validatorsReveal: ValidatorSeedReveal[], -) => { - let lastSeed: ValidatorSeedReveal | undefined; - let maxheight = 0; - for (const validatorReveal of validatorsReveal) { - if ( - validatorReveal.generatorAddress.equals(generatorAddress) && - validatorReveal.height > maxheight - ) { - maxheight = validatorReveal.height; - - lastSeed = validatorReveal; - } - } - - return !lastSeed || lastSeed.seedReveal.equals(utils.hash(seedReveal).slice(0, SEED_LENGTH)); -}; - export const getRandomSeed = ( height: number, numberOfSeeds: number, diff --git a/framework/test/unit/modules/random/utils.spec.ts b/framework/test/unit/modules/random/utils.spec.ts index fa34bee8abe..02ac79ac1d5 100644 --- a/framework/test/unit/modules/random/utils.spec.ts +++ b/framework/test/unit/modules/random/utils.spec.ts @@ -12,8 +12,11 @@ * Removal or modification of this copyright notice is prohibited. */ -import { bitwiseXOR } from '../../../../src/modules/random/utils'; +import { utils } from '@liskhq/lisk-cryptography'; +import { bitwiseXOR, isSeedValidInput } from '../../../../src/modules/random/utils'; import { bitwiseXORFixtures } from './bitwise_xor_fixtures'; +import { ValidatorSeedReveal } from '../../../../src/modules/random/stores/validator_reveals'; +import { SEED_LENGTH, ADDRESS_LENGTH } from '../../../../src/modules/random/constants'; describe('Random module utils', () => { describe('bitwiseXOR', () => { @@ -40,4 +43,63 @@ describe('Random module utils', () => { expect(() => bitwiseXOR(input)).toThrow('All input for XOR should be same size'); }); }); + + describe('isSeedValidInput', () => { + const generatorAddress = utils.getRandomBytes(ADDRESS_LENGTH); + const seed = utils.getRandomBytes(SEED_LENGTH); + const previousSeed = utils.hash(seed).slice(0, SEED_LENGTH); + let validatorSeedReveals: ValidatorSeedReveal[]; + + beforeEach(() => { + let height = 100; + validatorSeedReveals = Array(103) + .fill(0) + .map(() => { + height += 1; + return { + generatorAddress: utils.getRandomBytes(ADDRESS_LENGTH), + seedReveal: utils.getRandomBytes(SEED_LENGTH), + height, + valid: true, + }; + }); + }); + + it('should return true when a matching seed is provided corresponding to the highest seed from the generator', () => { + validatorSeedReveals[88].generatorAddress = generatorAddress; + validatorSeedReveals[88].seedReveal = previousSeed; + + expect(isSeedValidInput(generatorAddress, seed, validatorSeedReveals)).toBe(true); + }); + + it('should return false when a matching seed is provided, but not corresponding to the highest seed from the generator', () => { + validatorSeedReveals[88].generatorAddress = generatorAddress; + validatorSeedReveals[88].seedReveal = previousSeed; + + validatorSeedReveals[99].generatorAddress = generatorAddress; + + expect(isSeedValidInput(generatorAddress, seed, validatorSeedReveals)).toBe(false); + }); + + it('should return false when previous seed exists, but the provided seed does not match', () => { + validatorSeedReveals[88].generatorAddress = generatorAddress; + + expect(isSeedValidInput(generatorAddress, seed, validatorSeedReveals)).toBe(false); + }); + + it('should return false when previous seed is missing and previous seed is required', () => { + expect(isSeedValidInput(generatorAddress, seed, validatorSeedReveals)).toBe(false); + }); + + it('should return true for any provided seed when previous seed is missing, but it is not required', () => { + expect( + isSeedValidInput( + generatorAddress, + utils.getRandomBytes(SEED_LENGTH), + validatorSeedReveals, + false, + ), + ).toBe(true); + }); + }); }); From e6b43b3b4f710828f059155d0e7dab7a1d2b3600 Mon Sep 17 00:00:00 2001 From: has5aan Date: Wed, 21 Jun 2023 13:34:03 +0200 Subject: [PATCH 073/170] :bug: Adds NFT owner check to verify duplicate entries for an NFT in UserSubstore and EscrowSubstore and adds checks to throw if UserSubstore and EscrowSubstore has additional entries for an NFT not contained in NFTSubstore --- framework/src/modules/nft/module.ts | 40 ++++++- .../test/unit/modules/nft/module.spec.ts | 102 ++++++++++++++++-- 2 files changed, 131 insertions(+), 11 deletions(-) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index b6288628f85..55719225028 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -203,14 +203,15 @@ export class NFTModule extends BaseInteroperableModule { } for (const nft of genesisStore.nftSubstore) { - const ownerUsers = genesisStore.userSubstore.filter(userEntry => - userEntry.nftID.equals(nft.nftID), + const userStoreEntries = genesisStore.userSubstore.filter(userStoreEntry => + userStoreEntry.nftID.equals(nft.nftID), ); - const ownerChains = genesisStore.escrowSubstore.filter(escrowEntry => + + const escrowStoreEntries = genesisStore.escrowSubstore.filter(escrowEntry => escrowEntry.nftID.equals(nft.nftID), ); - if (ownerUsers.length === 0 && ownerChains.length === 0) { + if (userStoreEntries.length === 0 && escrowStoreEntries.length === 0) { throw new Error( `nftID ${nft.nftID.toString( 'hex', @@ -218,7 +219,7 @@ export class NFTModule extends BaseInteroperableModule { ); } - if (ownerUsers.length > 0 && ownerChains.length > 0) { + if (userStoreEntries.length > 0 && escrowStoreEntries.length > 0) { throw new Error( `nftID ${nft.nftID.toString( 'hex', @@ -226,6 +227,15 @@ export class NFTModule extends BaseInteroperableModule { ); } + const ownerUsers = genesisStore.userSubstore.filter( + userEntry => userEntry.nftID.equals(nft.nftID) && userEntry.address.equals(nft.owner), + ); + + const ownerChains = genesisStore.escrowSubstore.filter( + escrowEntry => + escrowEntry.nftID.equals(nft.nftID) && escrowEntry.escrowedChainID.equals(nft.owner), + ); + if (ownerUsers.length > 1) { throw new Error(`nftID ${nft.nftID.toString('hex')} has multiple entries for UserSubstore`); } @@ -259,6 +269,26 @@ export class NFTModule extends BaseInteroperableModule { } } + for (const user of genesisStore.userSubstore) { + if (!genesisStore.nftSubstore.some(nft => nft.nftID.equals(user.nftID))) { + throw new Error( + `nftID ${user.nftID.toString( + 'hex', + )} in UserSubstore has no corresponding entry for NFTSubstore`, + ); + } + } + + for (const escrow of genesisStore.escrowSubstore) { + if (!genesisStore.nftSubstore.some(nft => nft.nftID.equals(escrow.nftID))) { + throw new Error( + `nftID ${escrow.nftID.toString( + 'hex', + )} in EscrowSubstore has no corresponding entry for NFTSubstore`, + ); + } + } + const allNFTsSupported = genesisStore.supportedNFTsSubstore.some(supportedNFTs => supportedNFTs.chainID.equals(ALL_SUPPORTED_NFTS_KEY), ); diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index 0ed79df983b..c244432f8f9 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -213,6 +213,7 @@ describe('nft module', () => { it('should throw if NFT has an entry for both user and escrow store', async () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + // const owner = utils.getRandomBytes(LENGTH_ADDRESS); const genesisAssets = { ...validData, @@ -247,28 +248,30 @@ describe('nft module', () => { it('should throw if NFT has multiple entries for user store', async () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const owner = utils.getRandomBytes(LENGTH_ADDRESS); const genesisAssets = { ...validData, nftSubstore: [ { nftID, - owner: utils.getRandomBytes(LENGTH_ADDRESS), + owner, attributesArray: [], }, ], userSubstore: [ { - address: utils.getRandomBytes(LENGTH_ADDRESS), + address: owner, nftID, lockingModule: 'pos', }, { - address: utils.getRandomBytes(LENGTH_ADDRESS), + address: owner, nftID, lockingModule: 'token', }, ], + escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -280,23 +283,25 @@ describe('nft module', () => { it('should throw if NFT has multiple entries for escrow store', async () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); const genesisAssets = { ...validData, nftSubstore: [ { nftID, - owner: utils.getRandomBytes(LENGTH_ADDRESS), + owner: escrowedChainID, attributesArray: [], }, ], + userSubstore: [], escrowSubstore: [ { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowedChainID, nftID, }, { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowedChainID, nftID, }, ], @@ -328,6 +333,7 @@ describe('nft module', () => { lockingModule: 'pos', }, ], + escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -375,6 +381,82 @@ describe('nft module', () => { ); }); + it('should throw if an NFT in user store has no corresponding entry for nft store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const owner = utils.getRandomBytes(LENGTH_ADDRESS); + + const additionalNFTID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner, + attributesArray: [], + }, + ], + userSubstore: [ + { + address: owner, + nftID, + lockingModule: 'pos', + }, + { + address: utils.getRandomBytes(LENGTH_ADDRESS), + nftID: additionalNFTID, + lockingModule: 'pos', + }, + ], + escrowSubstore: [], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${additionalNFTID.toString( + 'hex', + )} in UserSubstore has no corresponding entry for NFTSubstore`, + ); + }); + + it('should throw if an NFT in escrow store has no corresponding entry for nft store', async () => { + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + + const additionalNFTID = utils.getRandomBytes(LENGTH_NFT_ID); + + const genesisAssets = { + ...validData, + nftSubstore: [ + { + nftID, + owner: escrowedChainID, + attributesArray: [], + }, + ], + userSubstore: [], + escrowSubstore: [ + { + nftID, + escrowedChainID, + }, + { + nftID: additionalNFTID, + escrowedChainID, + }, + ], + }; + + const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); + + await expect(module.initGenesisState(context)).rejects.toThrow( + `nftID ${additionalNFTID.toString( + 'hex', + )} in EscrowSubstore has no corresponding entry for NFTSubstore`, + ); + }); + it('should throw if all NFTs are supported and SupportedNFTsSubstore contains more than one entry', async () => { const genesisAssets = { ...validData, @@ -473,6 +555,7 @@ describe('nft module', () => { lockingModule: 'pos', }, ], + escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -517,6 +600,7 @@ describe('nft module', () => { lockingModule: 'pos', }, ], + escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -552,6 +636,7 @@ describe('nft module', () => { lockingModule: 'token', }, ], + escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -651,6 +736,11 @@ describe('nft module', () => { owner: escrowedChainID, attributesArray: [], }, + { + nftID: escrowedNFTID2, + owner: escrowedChainID, + attributesArray: [], + }, ], userSubstore: [ { From d0a501ddb72532eb4294abd5991120d7502b3d10 Mon Sep 17 00:00:00 2001 From: has5aan Date: Thu, 22 Jun 2023 11:31:58 +0200 Subject: [PATCH 074/170] :recycle: NFTModule.initGenesisState --- framework/src/modules/nft/module.ts | 110 +---- framework/src/modules/nft/schemas.ts | 54 +-- framework/src/modules/nft/types.ts | 9 - .../nft/init_genesis_state_fixtures.ts | 160 ------- .../test/unit/modules/nft/module.spec.ts | 443 ++---------------- 5 files changed, 59 insertions(+), 717 deletions(-) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 55719225028..019d87642b8 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -63,7 +63,12 @@ import { FeeMethod, GenesisNFTStore, TokenMethod } from './types'; import { CrossChainTransferCommand as CrossChainTransferMessageCommand } from './cc_commands/cc_transfer'; import { TransferCrossChainCommand } from './commands/transfer_cross_chain'; import { TransferCommand } from './commands/transfer'; -import { ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_CHAIN_ID } from './constants'; +import { + ALL_SUPPORTED_NFTS_KEY, + LENGTH_ADDRESS, + LENGTH_CHAIN_ID, + NFT_NOT_LOCKED, +} from './constants'; export class NFTModule extends BaseInteroperableModule { public method = new NFTMethod(this.stores, this.events); @@ -195,65 +200,11 @@ export class NFTModule extends BaseInteroperableModule { if (![LENGTH_CHAIN_ID, LENGTH_ADDRESS].includes(nft.owner.length)) { throw new Error(`nftID ${nft.nftID.toString('hex')} has invalid owner`); } + if (nftIDKeySet.has(nft.nftID)) { throw new Error(`nftID ${nft.nftID.toString('hex')} duplicated`); } - nftIDKeySet.add(nft.nftID); - } - - for (const nft of genesisStore.nftSubstore) { - const userStoreEntries = genesisStore.userSubstore.filter(userStoreEntry => - userStoreEntry.nftID.equals(nft.nftID), - ); - - const escrowStoreEntries = genesisStore.escrowSubstore.filter(escrowEntry => - escrowEntry.nftID.equals(nft.nftID), - ); - - if (userStoreEntries.length === 0 && escrowStoreEntries.length === 0) { - throw new Error( - `nftID ${nft.nftID.toString( - 'hex', - )} has no corresponding entry for UserSubstore or EscrowSubstore`, - ); - } - - if (userStoreEntries.length > 0 && escrowStoreEntries.length > 0) { - throw new Error( - `nftID ${nft.nftID.toString( - 'hex', - )} has an entry for both UserSubstore and EscrowSubstore`, - ); - } - - const ownerUsers = genesisStore.userSubstore.filter( - userEntry => userEntry.nftID.equals(nft.nftID) && userEntry.address.equals(nft.owner), - ); - - const ownerChains = genesisStore.escrowSubstore.filter( - escrowEntry => - escrowEntry.nftID.equals(nft.nftID) && escrowEntry.escrowedChainID.equals(nft.owner), - ); - - if (ownerUsers.length > 1) { - throw new Error(`nftID ${nft.nftID.toString('hex')} has multiple entries for UserSubstore`); - } - - if (ownerChains.length > 1) { - throw new Error( - `nftID ${nft.nftID.toString('hex')} has multiple entries for EscrowSubstore`, - ); - } - - if (nft.owner.length === LENGTH_CHAIN_ID && ownerChains.length !== 1) { - throw new Error( - `nftID ${nft.nftID.toString( - 'hex', - )} should have a corresponding entry for EscrowSubstore only`, - ); - } - const attributeSet: Record = {}; for (const attribute of nft.attributesArray) { @@ -267,26 +218,8 @@ export class NFTModule extends BaseInteroperableModule { ); } } - } - for (const user of genesisStore.userSubstore) { - if (!genesisStore.nftSubstore.some(nft => nft.nftID.equals(user.nftID))) { - throw new Error( - `nftID ${user.nftID.toString( - 'hex', - )} in UserSubstore has no corresponding entry for NFTSubstore`, - ); - } - } - - for (const escrow of genesisStore.escrowSubstore) { - if (!genesisStore.nftSubstore.some(nft => nft.nftID.equals(escrow.nftID))) { - throw new Error( - `nftID ${escrow.nftID.toString( - 'hex', - )} in EscrowSubstore has no corresponding entry for NFTSubstore`, - ); - } + nftIDKeySet.add(nft.nftID); } const allNFTsSupported = genesisStore.supportedNFTsSubstore.some(supportedNFTs => @@ -316,29 +249,24 @@ export class NFTModule extends BaseInteroperableModule { } const nftStore = this.stores.get(NFTStore); + const escrowStore = this.stores.get(EscrowStore); + const userStore = this.stores.get(UserStore); + for (const nft of genesisStore.nftSubstore) { - const { nftID, owner, attributesArray } = nft; + const { owner, nftID, attributesArray } = nft; await nftStore.save(context, nftID, { owner, attributesArray, }); - } - - const userStore = this.stores.get(UserStore); - for (const user of genesisStore.userSubstore) { - const { address, nftID, lockingModule } = user; - - await userStore.set(context, userStore.getKey(address, nftID), { - lockingModule, - }); - } - const escrowStore = this.stores.get(EscrowStore); - for (const escrow of genesisStore.escrowSubstore) { - const { escrowedChainID, nftID } = escrow; - - await escrowStore.set(context, escrowStore.getKey(escrowedChainID, nftID), {}); + if (owner.length === LENGTH_CHAIN_ID) { + await escrowStore.set(context, escrowStore.getKey(owner, nftID), {}); + } else { + await userStore.set(context, userStore.getKey(owner, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + } } for (const supportedNFT of genesisStore.supportedNFTsSubstore) { diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index dbe020974a6..c3bfbc15e69 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -391,7 +391,7 @@ export const isNFTSupportedResponseSchema = { export const genesisNFTStoreSchema = { $id: '/nft/module/genesis', type: 'object', - required: ['nftSubstore', 'userSubstore', 'escrowSubstore', 'supportedNFTsSubstore'], + required: ['nftSubstore', 'supportedNFTsSubstore'], properties: { nftSubstore: { type: 'array', @@ -434,59 +434,9 @@ export const genesisNFTStoreSchema = { }, }, }, - userSubstore: { - type: 'array', - fieldNumber: 2, - items: { - type: 'object', - required: ['address', 'nftID', 'lockingModule'], - properties: { - address: { - dataType: 'bytes', - format: 'lisk32', - fieldNumber: 1, - }, - nftID: { - dataType: 'bytes', - minLength: LENGTH_NFT_ID, - maxLength: LENGTH_NFT_ID, - fieldNumber: 2, - }, - lockingModule: { - dataType: 'string', - minLength: MIN_LENGTH_MODULE_NAME, - maxLength: MAX_LENGTH_MODULE_NAME, - pattern: '^[a-zA-Z0-9]*$', - fieldNumber: 3, - }, - }, - }, - }, - escrowSubstore: { - type: 'array', - fieldNumber: 3, - items: { - type: 'object', - required: ['escrowedChainID', 'nftID'], - properties: { - escrowedChainID: { - dataType: 'bytes', - minLength: LENGTH_CHAIN_ID, - maxLength: LENGTH_CHAIN_ID, - fieldNumber: 1, - }, - nftID: { - dataType: 'bytes', - minLength: LENGTH_NFT_ID, - maxLength: LENGTH_NFT_ID, - fieldNumber: 2, - }, - }, - }, - }, supportedNFTsSubstore: { type: 'array', - fieldNumber: 4, + fieldNumber: 2, items: { type: 'object', required: ['chainID', 'supportedCollectionIDArray'], diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 1438bc4daef..8b1647d67c2 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -74,15 +74,6 @@ export interface GenesisNFTStore { attributes: Buffer; }[]; }[]; - userSubstore: { - address: Buffer; - nftID: Buffer; - lockingModule: string; - }[]; - escrowSubstore: { - escrowedChainID: Buffer; - nftID: Buffer; - }[]; supportedNFTsSubstore: { chainID: Buffer; supportedCollectionIDArray: { diff --git a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts index 6684b639c65..3c7a8bb338e 100644 --- a/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts +++ b/framework/test/unit/modules/nft/init_genesis_state_fixtures.ts @@ -63,24 +63,6 @@ export const validData: GenesisNFTStore = { attributesArray: [], }, ], - userSubstore: [ - { - address: owner, - nftID: nftID1, - lockingModule: 'pos', - }, - { - address: owner, - nftID: nftID2, - lockingModule: 'token', - }, - ], - escrowSubstore: [ - { - escrowedChainID, - nftID: nftID3, - }, - ], supportedNFTsSubstore: [ { chainID: utils.getRandomBytes(LENGTH_CHAIN_ID), @@ -179,148 +161,6 @@ export const invalidSchemaNFTSubstoreGenesisAssets = [ ], ]; -export const invalidSchemaUserSubstoreGenesisAssets = [ - [ - 'Invalid owner address', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS - 1), - nftID: nftID1, - lockingModule: 'pos', - }, - ], - }, - `address' address length invalid`, - ], - [ - 'Invalid nftID - minimum length not satisified', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), - lockingModule: 'pos', - }, - ], - }, - `nftID' minLength not satisfied`, - ], - [ - 'Invalid nftID - maximum length exceeded', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), - lockingModule: 'pos', - }, - ], - }, - `nftID' maxLength exceeded`, - ], - [ - 'Invalid lockingModule - minimum length not satisfied', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - lockingModule: '', - }, - ], - }, - `lockingModule' must NOT have fewer than 1 characters`, - ], - [ - 'Invalid lockingModule - maximum length exceeded', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - lockingModule: 'pos'.repeat(33), - }, - ], - }, - `lockingModule' must NOT have more than 32 characters`, - ], - [ - 'lockingModule must match pattern - "^[a-zA-Z0-9]*$"', - { - ...validData, - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: utils.getRandomBytes(LENGTH_NFT_ID), - lockingModule: '$#pos"', - }, - ], - }, - `must match pattern "^[a-zA-Z0-9]*$"`, - ], -]; - -export const invalidSchemaEscrowSubstoreGenesisAssets = [ - [ - 'Invalid escrowedChainID - minimum length not satisfied', - { - ...validData, - escrowSubstore: [ - { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), - nftID: nftID1, - }, - ], - }, - `escrowedChainID' minLength not satisfied`, - ], - [ - 'Invalid escrowedChainID - maximum length exceeded', - { - ...validData, - escrowSubstore: [ - { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1), - nftID: nftID1, - }, - ], - }, - `escrowedChainID' maxLength exceeded`, - ], - [ - 'Invalid nftID - minimum length not satisfied', - { - ...validData, - escrowSubstore: [ - { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), - nftID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), - }, - ], - }, - `nftID' minLength not satisfied`, - ], - [ - 'Invalid nftID - maximum length exceeded', - { - ...validData, - escrowSubstore: [ - { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), - nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), - }, - ], - }, - `nftID' maxLength exceeded`, - ], -]; - export const invalidSchemaSupportedNFTsSubstoreGenesisAssets = [ [ 'Invalid collectionID - minimum length not satisfied', diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index c244432f8f9..397124fa95b 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -18,10 +18,8 @@ import { BlockAssets } from '@liskhq/lisk-chain'; import { NFTModule } from '../../../../src/modules/nft/module'; import { createGenesisBlockContext } from '../../../../src/testing'; import { - invalidSchemaEscrowSubstoreGenesisAssets, invalidSchemaNFTSubstoreGenesisAssets, invalidSchemaSupportedNFTsSubstoreGenesisAssets, - invalidSchemaUserSubstoreGenesisAssets, validData, } from './init_genesis_state_fixtures'; import { genesisNFTStoreSchema } from '../../../../src/modules/nft/schemas'; @@ -31,6 +29,7 @@ import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, + NFT_NOT_LOCKED, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; @@ -42,8 +41,8 @@ describe('nft module', () => { const nftStore = module.stores.get(NFTStore); const userStore = module.stores.get(UserStore); - const supportedNFTsSubstore = module.stores.get(SupportedNFTsStore); const escrowStore = module.stores.get(EscrowStore); + const supportedNFTsSubstore = module.stores.get(SupportedNFTsStore); const createGenesisBlockContextFromGenesisAssets = (genesisAssets: object) => { const encodedAsset = codec.encode(genesisNFTStoreSchema, genesisAssets); @@ -72,38 +71,6 @@ describe('nft module', () => { }); }); - describe('validate userSubstore schema', () => { - it.each(invalidSchemaUserSubstoreGenesisAssets)('%s', async (_, input, err) => { - if (typeof input === 'string') { - return; - } - - const encodedAsset = codec.encode(genesisNFTStoreSchema, input); - - const context = createGenesisBlockContext({ - assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), - }).createInitGenesisStateContext(); - - await expect(module.initGenesisState(context)).rejects.toThrow(err as string); - }); - }); - - describe('validate escrowSubstore schema', () => { - it.each(invalidSchemaEscrowSubstoreGenesisAssets)('%s', async (_, input, err) => { - if (typeof input === 'string') { - return; - } - - const encodedAsset = codec.encode(genesisNFTStoreSchema, input); - - const context = createGenesisBlockContext({ - assets: new BlockAssets([{ module: module.name, data: encodedAsset }]), - }).createInitGenesisStateContext(); - - await expect(module.initGenesisState(context)).rejects.toThrow(err as string); - }); - }); - describe('validate supportedNFTsSubstore schema', () => { it.each(invalidSchemaSupportedNFTsSubstoreGenesisAssets)('%s', async (_, input, err) => { if (typeof input === 'string') { @@ -188,162 +155,7 @@ describe('nft module', () => { ); }); - it('should throw if NFT does not have a corresponding entry for user or escrow store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner: utils.getRandomBytes(LENGTH_ADDRESS), - attributesArray: [], - }, - ], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${nftID.toString( - 'hex', - )} has no corresponding entry for UserSubstore or EscrowSubstore`, - ); - }); - - it('should throw if NFT has an entry for both user and escrow store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - // const owner = utils.getRandomBytes(LENGTH_ADDRESS); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner: utils.getRandomBytes(LENGTH_ADDRESS), - attributesArray: [], - }, - ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID, - lockingModule: 'pos', - }, - ], - escrowSubstore: [ - { - escrowedChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), - nftID, - }, - ], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${nftID.toString('hex')} has an entry for both UserSubstore and EscrowSubstore`, - ); - }); - - it('should throw if NFT has multiple entries for user store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - const owner = utils.getRandomBytes(LENGTH_ADDRESS); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner, - attributesArray: [], - }, - ], - userSubstore: [ - { - address: owner, - nftID, - lockingModule: 'pos', - }, - { - address: owner, - nftID, - lockingModule: 'token', - }, - ], - escrowSubstore: [], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${nftID.toString('hex')} has multiple entries for UserSubstore`, - ); - }); - - it('should throw if NFT has multiple entries for escrow store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner: escrowedChainID, - attributesArray: [], - }, - ], - userSubstore: [], - escrowSubstore: [ - { - escrowedChainID, - nftID, - }, - { - escrowedChainID, - nftID, - }, - ], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${nftID.toString('hex')} has multiple entries for EscrowSubstore`, - ); - }); - - it('should throw if escrowed NFT has no corresponding entry for escrow store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: [], - }, - ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID, - lockingModule: 'pos', - }, - ], - escrowSubstore: [], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${nftID.toString('hex')} should have a corresponding entry for EscrowSubstore only`, - ); - }); - - it('should throw if NFT has duplicate attribute for an array', async () => { + it('should throw if NFT has duplicate attribute for a module', async () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); const moduleName = 'pos'; @@ -365,13 +177,6 @@ describe('nft module', () => { ], }, ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID, - lockingModule: 'pos', - }, - ], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -381,82 +186,6 @@ describe('nft module', () => { ); }); - it('should throw if an NFT in user store has no corresponding entry for nft store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - const owner = utils.getRandomBytes(LENGTH_ADDRESS); - - const additionalNFTID = utils.getRandomBytes(LENGTH_NFT_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner, - attributesArray: [], - }, - ], - userSubstore: [ - { - address: owner, - nftID, - lockingModule: 'pos', - }, - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: additionalNFTID, - lockingModule: 'pos', - }, - ], - escrowSubstore: [], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${additionalNFTID.toString( - 'hex', - )} in UserSubstore has no corresponding entry for NFTSubstore`, - ); - }); - - it('should throw if an NFT in escrow store has no corresponding entry for nft store', async () => { - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - - const additionalNFTID = utils.getRandomBytes(LENGTH_NFT_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID, - owner: escrowedChainID, - attributesArray: [], - }, - ], - userSubstore: [], - escrowSubstore: [ - { - nftID, - escrowedChainID, - }, - { - nftID: additionalNFTID, - escrowedChainID, - }, - ], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).rejects.toThrow( - `nftID ${additionalNFTID.toString( - 'hex', - )} in EscrowSubstore has no corresponding entry for NFTSubstore`, - ); - }); - it('should throw if all NFTs are supported and SupportedNFTsSubstore contains more than one entry', async () => { const genesisAssets = { ...validData, @@ -525,6 +254,41 @@ describe('nft module', () => { ); }); + it('should create NFTs, their corresponding user or escrow entries and supported chains', async () => { + const context = createGenesisBlockContextFromGenesisAssets(validData); + + await expect(module.initGenesisState(context)).resolves.toBeUndefined(); + + for (const nft of validData.nftSubstore) { + const { nftID, owner, attributesArray } = nft; + + await expect(nftStore.get(context.getMethodContext(), nftID)).resolves.toEqual({ + owner, + attributesArray, + }); + + if (owner.length === LENGTH_CHAIN_ID) { + await expect( + escrowStore.get(context.getMethodContext(), escrowStore.getKey(owner, nftID)), + ).resolves.toEqual({}); + } else { + await expect( + userStore.get(context.getMethodContext(), userStore.getKey(owner, nftID)), + ).resolves.toEqual({ + lockingModule: NFT_NOT_LOCKED, + }); + } + } + + for (const supportedChain of validData.supportedNFTsSubstore) { + const { chainID, supportedCollectionIDArray } = supportedChain; + + await expect( + supportedNFTsSubstore.get(context.getMethodContext(), chainID), + ).resolves.toEqual({ supportedCollectionIDArray }); + } + }); + it('should create entries for all NFTs lexicographically', async () => { const nftID1 = Buffer.alloc(LENGTH_NFT_ID, 1); const nftID2 = Buffer.alloc(LENGTH_NFT_ID, 0); @@ -543,19 +307,6 @@ describe('nft module', () => { attributesArray: [], }, ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: nftID1, - lockingModule: 'pos', - }, - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID: nftID2, - lockingModule: 'pos', - }, - ], - escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -593,14 +344,6 @@ describe('nft module', () => { ], }, ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID, - lockingModule: 'pos', - }, - ], - escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -629,14 +372,6 @@ describe('nft module', () => { ], }, ], - userSubstore: [ - { - address: utils.getRandomBytes(LENGTH_ADDRESS), - nftID, - lockingModule: 'token', - }, - ], - escrowSubstore: [], }; const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); @@ -700,108 +435,6 @@ describe('nft module', () => { expect(expectedKeys).toEqual(allSupportedNFTs.map(supportedNFTs => supportedNFTs.key)); }); - it('should create entries for user and escrow store', async () => { - const nftID1 = utils.getRandomBytes(LENGTH_NFT_ID); - const nftID2 = utils.getRandomBytes(LENGTH_NFT_ID); - const nftID3 = utils.getRandomBytes(LENGTH_NFT_ID); - - const escrowedNFTID1 = utils.getRandomBytes(LENGTH_NFT_ID); - const escrowedNFTID2 = utils.getRandomBytes(LENGTH_NFT_ID); - - const owner1 = utils.getRandomBytes(LENGTH_ADDRESS); - const owner2 = utils.getRandomBytes(LENGTH_ADDRESS); - - const escrowedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - - const genesisAssets = { - ...validData, - nftSubstore: [ - { - nftID: nftID1, - owner: owner1, - attributesArray: [], - }, - { - nftID: nftID2, - owner: owner1, - attributesArray: [], - }, - { - nftID: nftID3, - owner: owner2, - attributesArray: [], - }, - { - nftID: escrowedNFTID1, - owner: escrowedChainID, - attributesArray: [], - }, - { - nftID: escrowedNFTID2, - owner: escrowedChainID, - attributesArray: [], - }, - ], - userSubstore: [ - { - address: owner1, - nftID: nftID1, - lockingModule: 'pos', - }, - { - address: owner1, - nftID: nftID2, - lockingModule: 'token', - }, - { - address: owner2, - nftID: nftID3, - lockingModule: 'auth', - }, - ], - escrowSubstore: [ - { - escrowedChainID, - nftID: escrowedNFTID1, - }, - { - escrowedChainID, - nftID: escrowedNFTID2, - }, - ], - }; - - const context = createGenesisBlockContextFromGenesisAssets(genesisAssets); - - await expect(module.initGenesisState(context)).resolves.toBeUndefined(); - - await expect( - userStore.get(context.getMethodContext(), userStore.getKey(owner1, nftID1)), - ).resolves.toEqual({ lockingModule: 'pos' }); - - await expect( - userStore.get(context.getMethodContext(), userStore.getKey(owner1, nftID2)), - ).resolves.toEqual({ lockingModule: 'token' }); - - await expect( - userStore.get(context.getMethodContext(), userStore.getKey(owner2, nftID3)), - ).resolves.toEqual({ lockingModule: 'auth' }); - - await expect( - escrowStore.get( - context.getMethodContext(), - escrowStore.getKey(escrowedChainID, escrowedNFTID1), - ), - ).resolves.toEqual({}); - - await expect( - escrowStore.get( - context.getMethodContext(), - escrowStore.getKey(escrowedChainID, escrowedNFTID2), - ), - ).resolves.toEqual({}); - }); - it('should create an entry for supported chains with supportedCollectionIDArray sorted lexicographically', async () => { const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); From 67043ddca2039dc56ad895fc4394195fe9f85d94 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 23 Jun 2023 02:43:57 +0100 Subject: [PATCH 075/170] Update NFT module with additional checks and LIP updates (#8635) * Update per lip pr * Revert token module changes * Use context chain id per feedback --- .../modules/nft/cc_commands/cc_transfer.ts | 4 +-- .../nft/commands/transfer_cross_chain.ts | 4 +++ framework/src/modules/nft/method.ts | 17 ++++++++++- .../nft/cc_comands/cc_transfer.spec.ts | 28 +++++++++++++++-- .../nft/commands/transfer_cross_chain.spec.ts | 12 ++++++++ .../test/unit/modules/nft/method.spec.ts | 30 +++++++++++++++++++ 6 files changed, 90 insertions(+), 5 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index af4045cd2a8..ea61435c300 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -62,7 +62,7 @@ export class CrossChainTransferCommand extends BaseCCCommand { const { nftID } = params; const { sendingChainID } = ccm; const nftChainID = this._method.getChainID(nftID); - const ownChainID = this._internalMethod.getOwnChainID(); + const ownChainID = context.chainID; if (![ownChainID, sendingChainID].some(allowedChainID => nftChainID.equals(allowedChainID))) { throw new Error('NFT is not native to either the sending chain or the receiving chain'); @@ -94,7 +94,7 @@ export class CrossChainTransferCommand extends BaseCCCommand { const { sendingChainID, status } = ccm; const { nftID, senderAddress, attributesArray: receivedAttributes } = params; const nftChainID = this._method.getChainID(nftID); - const ownChainID = this._internalMethod.getOwnChainID(); + const ownChainID = context.chainID; const nftStore = this.stores.get(NFTStore); const escrowStore = this.stores.get(EscrowStore); let recipientAddress: Buffer; diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index 97fd17a8826..fad28836ca6 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -66,6 +66,10 @@ export class TransferCrossChainCommand extends BaseCommand { const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(context.getMethodContext(), params.nftID); + if (params.receivingChainID.equals(context.chainID)) { + throw new Error('Receiving chain cannot be the sending chain'); + } + if (!nftExists) { throw new Error('NFT substore entry does not exist'); } diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 5c4e38e4af5..3d1fa187352 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -527,6 +527,22 @@ export class NFTMethod extends BaseMethod { data: string, includeAttributes: boolean, ): Promise { + const ownChainID = this._internalMethod.getOwnChainID(); + if (receivingChainID.equals(ownChainID)) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + NftEventResult.INVALID_RECEIVING_CHAIN, + ); + throw new Error('Receiving chain cannot be the sending chain'); + } + if (data.length > MAX_LENGTH_DATA) { this.events.get(TransferCrossChainEvent).error( methodContext, @@ -576,7 +592,6 @@ export class NFTMethod extends BaseMethod { } const nftChainID = this.getChainID(nftID); - const ownChainID = this._internalMethod.getOwnChainID(); if (![ownChainID, receivingChainID].some(allowedChainID => nftChainID.equals(allowedChainID))) { this.events.get(TransferCrossChainEvent).error( methodContext, diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 62f7e01c207..354f587b1b2 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -174,7 +174,7 @@ describe('CrossChain Transfer Command', () => { eventQueue: new EventQueue(0), getStore, logger: fakeLogger, - chainID, + chainID: ownChainID, }; }); @@ -315,6 +315,18 @@ describe('CrossChain Transfer Command', () => { method.init(newConfig); internalMethod.addDependencies(method, interopMethod); internalMethod.init(newConfig); + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID: newConfig.ownChainID, + }; await expect(command.verify(context)).rejects.toThrow('NFT substore entry already exists'); }); @@ -435,7 +447,7 @@ describe('CrossChain Transfer Command', () => { eventQueue: new EventQueue(0), getStore, logger: fakeLogger, - chainID, + chainID: ownChainID, }; await expect(command.execute(context)).resolves.toBeUndefined(); @@ -524,6 +536,18 @@ describe('CrossChain Transfer Command', () => { method.init(newConfig); internalMethod.addDependencies(method, interopMethod); internalMethod.init(newConfig); + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID: newConfig.ownChainID, + }; const supportedNFTsStore = module.stores.get(SupportedNFTsStore); await supportedNFTsStore.set(methodContext, ALL_SUPPORTED_NFTS_KEY, { supportedCollectionIDArray: [], diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts index ba942e60893..897a8ae0732 100644 --- a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -204,6 +204,18 @@ describe('TransferCrossChainComand', () => { }); describe('verify', () => { + it('should fail if receiving chain id is same as the own chain id', async () => { + const receivingChainIDContext = createTransactionContextWithOverridingParams({ + receivingChainID: ownChainID, + }); + + await expect( + command.verify( + receivingChainIDContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ), + ).rejects.toThrow('Receiving chain cannot be the sending chain'); + }); + it('should fail if NFT does not have valid length', async () => { const nftMinLengthContext = createTransactionContextWithOverridingParams({ nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 2dd3d81568a..7e214516514 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -882,6 +882,36 @@ describe('NFTMethod', () => { receivingChainID = existingNFT.nftID.slice(0, LENGTH_CHAIN_ID); }); + it('should throw and emit error transfer cross chain event if receiving chain id is same as the own chain id', async () => { + receivingChainID = config.ownChainID; + await expect( + method.transferCrossChain( + methodContext, + existingNFT.owner, + recipientAddress, + existingNFT.nftID, + receivingChainID, + messageFee, + data, + includeAttributes, + ), + ).rejects.toThrow('Receiving chain cannot be the sending chain'); + checkEventResult( + methodContext.eventQueue, + 1, + TransferCrossChainEvent, + 0, + { + senderAddress: existingNFT.owner, + recipientAddress, + receivingChainID, + nftID: existingNFT.nftID, + includeAttributes, + }, + NftEventResult.INVALID_RECEIVING_CHAIN, + ); + }); + it('should throw and emit error transfer cross chain event if nft does not exist', async () => { receivingChainID = nftID.slice(0, LENGTH_CHAIN_ID); await expect( From 284b9cafbe877b09f99e2c25b73fb1e438808d10 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 23 Jun 2023 11:39:20 +0800 Subject: [PATCH 076/170] :bug: Fix duplicate schema id --- .../initializeMessageRecovery.ts | 2 +- .../interop/messageRecovery/messageRecovery.ts | 15 +-------------- .../interop/messageRecovery/parse_events.ts | 17 ++--------------- examples/interop/messageRecovery/schema.ts | 15 +++++++++++++++ 4 files changed, 19 insertions(+), 30 deletions(-) create mode 100644 examples/interop/messageRecovery/schema.ts diff --git a/examples/interop/messageRecovery/initializeMessageRecovery.ts b/examples/interop/messageRecovery/initializeMessageRecovery.ts index b1fa9ab03bc..998a5a4c05f 100644 --- a/examples/interop/messageRecovery/initializeMessageRecovery.ts +++ b/examples/interop/messageRecovery/initializeMessageRecovery.ts @@ -161,7 +161,7 @@ const inboxOutboxProps = { // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#channel-data-substore const channelSchema = { - $id: '/modules/interoperability/channel', + $id: '/example/modules/interoperability/channel', type: 'object', required: [ 'inbox', diff --git a/examples/interop/messageRecovery/messageRecovery.ts b/examples/interop/messageRecovery/messageRecovery.ts index 106cf9fb538..546e28193cd 100644 --- a/examples/interop/messageRecovery/messageRecovery.ts +++ b/examples/interop/messageRecovery/messageRecovery.ts @@ -18,6 +18,7 @@ import { checkDBError } from '@liskhq/lisk-framework-chain-connector-plugin/dist import { MerkleTree } from '@liskhq/lisk-tree'; import { utils } from '@liskhq/lisk-cryptography'; import * as os from 'os'; +import { ccmsInfoSchema } from './schema'; export const relayerKeyInfo = { address: 'lsk952ztknjoa3h58es4vgu5ovnoscv3amo7zg4zz', @@ -40,20 +41,6 @@ export const relayerKeyInfo = { encrypted: {}, }; -const ccmsInfoSchema = { - $id: 'msgRecoveryPlugin/ccmsFromEvents', - type: 'object', - properties: { - ccms: { - type: 'array', - fieldNumber: 1, - items: { - ...ccmSchema, - }, - }, - }, -}; - interface CCMsInfo { ccms: CCMsg[]; } diff --git a/examples/interop/messageRecovery/parse_events.ts b/examples/interop/messageRecovery/parse_events.ts index 8b0e4593cf6..ae355b35a6d 100644 --- a/examples/interop/messageRecovery/parse_events.ts +++ b/examples/interop/messageRecovery/parse_events.ts @@ -12,11 +12,12 @@ import { db as liskDB, } from 'lisk-sdk'; import { codec } from '@liskhq/lisk-codec'; -import { CcmSendSuccessEventData, CcmProcessedEventData, ccmSchema } from 'lisk-framework'; +import { CcmSendSuccessEventData, CcmProcessedEventData } from 'lisk-framework'; import { EVENT_NAME_CCM_PROCESSED } from 'lisk-framework/dist-node/modules/interoperability/constants'; import { join } from 'path'; import * as os from 'os'; import { ensureDir } from 'fs-extra'; +import { ccmsInfoSchema } from './schema'; export const checkDBError = (error: Error | unknown) => { if (!(error instanceof liskDB.NotFoundError)) { @@ -66,20 +67,6 @@ const getDBInstance = async (dataPath: string, dbName = 'events.db'): Promise Date: Mon, 26 Jun 2023 10:13:30 +0800 Subject: [PATCH 077/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20semver=20fr?= =?UTF-8?q?om=207.3.8=20to=207.5.2=20(#8639)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :arrow_up: Bump semver from 7.3.8 to 7.5.2 Bumps [semver](https://github.com/npm/node-semver) from 7.3.8 to 7.5.2. - [Release notes](https://github.com/npm/node-semver/releases) - [Changelog](https://github.com/npm/node-semver/blob/main/CHANGELOG.md) - [Commits](https://github.com/npm/node-semver/compare/v7.3.8...v7.5.2) --- updated-dependencies: - dependency-name: semver dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- commander/package.json | 2 +- elements/lisk-p2p/package.json | 2 +- elements/lisk-validator/package.json | 2 +- .../package.json | 2 +- .../lisk-framework-faucet-plugin/package.json | 2 +- yarn.lock | 31 ++++--------------- 6 files changed, 11 insertions(+), 30 deletions(-) mode change 100755 => 100644 commander/package.json diff --git a/commander/package.json b/commander/package.json old mode 100755 new mode 100644 index bf0001556d9..aad1540f482 --- a/commander/package.json +++ b/commander/package.json @@ -124,7 +124,7 @@ "lisk-framework": "^0.10.0-beta.2", "listr": "0.14.3", "progress": "2.0.3", - "semver": "7.3.8", + "semver": "7.5.2", "strip-ansi": "6.0.1", "tar": "6.1.12", "ts-morph": "17.0.1", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index e0d92185bde..1d8be5d0fe6 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -45,7 +45,7 @@ "@liskhq/lisk-cryptography": "^4.0.0-beta.2", "@liskhq/lisk-validator": "^0.7.0-beta.2", "lodash.shuffle": "4.2.0", - "semver": "7.3.8", + "semver": "7.5.2", "socketcluster-client": "14.3.1", "socketcluster-server": "14.6.0" }, diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 25e8c983b3f..f0d669450e4 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -40,7 +40,7 @@ "ajv": "8.1.0", "ajv-formats": "2.1.1", "debug": "4.3.4", - "semver": "7.3.8", + "semver": "7.5.2", "validator": "13.7.0" }, "devDependencies": { diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 3d13ee9efc4..077ad0ae10a 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -89,7 +89,7 @@ "parcel": "2.8.0", "prettier": "2.8.0", "regenerator-runtime": "0.13.9", - "semver": "7.3.8", + "semver": "7.5.2", "source-map-support": "0.5.21", "ts-jest": "29.0.3", "ts-node": "10.9.1", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 2e679477aec..9cab5cc1fd8 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -92,7 +92,7 @@ "parcel": "2.8.0", "prettier": "2.8.0", "regenerator-runtime": "0.13.9", - "semver": "7.3.8", + "semver": "7.5.2", "source-map-support": "0.5.21", "ts-jest": "29.0.3", "ts-node": "10.9.1", diff --git a/yarn.lock b/yarn.lock index 448aa20b91f..38728d2da86 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11225,16 +11225,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" -lru-cache@^7.4.4, lru-cache@^7.5.1: +lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1: version "7.14.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== -lru-cache@^7.7.1: - version "7.14.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" - integrity sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ== - lunr@^2.3.9: version "2.3.9" resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" @@ -14174,17 +14169,17 @@ semver-regex@^2.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@7.3.4, semver@7.x, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2: +semver@7.3.4: version "7.3.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== dependencies: lru-cache "^6.0.0" -semver@7.3.8, semver@^7.0.0, semver@^7.3.8: - version "7.3.8" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" - integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== +semver@7.5.2, semver@7.x, semver@^7.0.0, semver@^7.1.1, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: + version "7.5.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.2.tgz#5b851e66d1be07c1cdaf37dfc856f543325a2beb" + integrity sha512-SoftuTROv/cRjCze/scjGyiDtcUyxw1rgYQSZY7XTmtR5hX+dm76iDbTH8TkLPHCQmlbQVSSbNZCPM2hb0knnQ== dependencies: lru-cache "^6.0.0" @@ -14193,20 +14188,6 @@ semver@^6.0.0, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.1.1, semver@^7.3.4, semver@^7.3.5: - version "7.3.5" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" - integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== - dependencies: - lru-cache "^6.0.0" - -semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - send@0.18.0: version "0.18.0" resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" From c9e7c1a097a22deaf8fd3e90cde6702e6f33f537 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Mon, 26 Jun 2023 08:27:54 +0100 Subject: [PATCH 078/170] Update methods createNFTEntry and create of NFT module with additional checks (#8654) Update per lip --- framework/src/modules/nft/internal_method.ts | 9 +++++++++ framework/src/modules/nft/method.ts | 8 ++++++++ .../unit/modules/nft/internal_method.spec.ts | 19 ++++++++++++++++++- .../test/unit/modules/nft/method.spec.ts | 17 +++++++++++++++++ 4 files changed, 52 insertions(+), 1 deletion(-) diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index ac271432678..6b87cbb7aea 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -67,6 +67,15 @@ export class InternalMethod extends BaseMethod { nftID: Buffer, attributesArray: NFTAttributes[], ): Promise { + const moduleNames = []; + for (const item of attributesArray) { + moduleNames.push(item.module); + } + + if (new Set(moduleNames).size !== attributesArray.length) { + throw new Error('Invalid attributes array provided'); + } + const nftStore = this.stores.get(NFTStore); await nftStore.save(methodContext, nftID, { owner: address, diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 3d1fa187352..3039737a15d 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -301,6 +301,14 @@ export class NFTMethod extends BaseMethod { collectionID: Buffer, attributesArray: NFTAttributes[], ): Promise { + const moduleNames = []; + for (const item of attributesArray) { + moduleNames.push(item.module); + } + if (new Set(moduleNames).size !== attributesArray.length) { + throw new Error('Invalid attributes array provided'); + } + const index = await this.getNextAvailableIndex(methodContext, collectionID); const nftID = Buffer.concat([ this._config.ownChainID, diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 31dcd1a99c4..4817a89d45d 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -104,7 +104,24 @@ describe('InternalMethod', () => { }); describe('createNFTEntry', () => { - it('should create an entry in NFStore with attributes sorted by module', async () => { + it('should throw for duplicate module names in attributes array', async () => { + const attributesArray = [ + { + module: 'token', + attributes: Buffer.alloc(8, 1), + }, + { + module: 'token', + attributes: Buffer.alloc(8, 2), + }, + ]; + + await expect( + internalMethod.createNFTEntry(methodContext, address, nftID, attributesArray), + ).rejects.toThrow('Invalid attributes array provided'); + }); + + it('should create an entry in NFStore with attributes sorted by module if there is no duplicate module name', async () => { const unsortedAttributesArray = [ { module: 'token', diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 7e214516514..d5bcd5a9c07 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -556,6 +556,23 @@ describe('NFTMethod', () => { jest.spyOn(feeMethod, 'payFee'); }); + it('should throw for duplicate module names in attributes array', async () => { + const attributesArray = [ + { + module: 'token', + attributes: Buffer.alloc(8, 1), + }, + { + module: 'token', + attributes: Buffer.alloc(8, 2), + }, + ]; + + await expect( + method.create(methodContext, address, collectionID, attributesArray), + ).rejects.toThrow('Invalid attributes array provided'); + }); + it('should set data to stores with correct key and emit successfull create event when there is no entry in the nft substore', async () => { const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('0')]); From 4e09d473f387b986a6e30878d6fdcd748895518a Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Mon, 26 Jun 2023 14:47:33 +0200 Subject: [PATCH 079/170] Add nft module to example app --- .../config/default/genesis_assets.json | 87 +++++++++++++++++++ framework/src/application.ts | 7 ++ 2 files changed, 94 insertions(+) diff --git a/examples/pos-mainchain/config/default/genesis_assets.json b/examples/pos-mainchain/config/default/genesis_assets.json index d575139c811..fbe304669c9 100644 --- a/examples/pos-mainchain/config/default/genesis_assets.json +++ b/examples/pos-mainchain/config/default/genesis_assets.json @@ -1041,6 +1041,93 @@ } } }, + { + "module": "nft", + "data": { + "nftSubstore": [], + "supportedNFTsSubstore": [] + }, + "schema": { + "$id": "/nft/module/genesis", + "type": "object", + "required": ["nftSubstore", "supportedNFTsSubstore"], + "properties": { + "nftSubstore": { + "type": "array", + "fieldNumber": 1, + "items": { + "type": "object", + "required": ["nftID", "owner", "attributesArray"], + "properties": { + "nftID": { + "dataType": "bytes", + "minLength": 16, + "maxLength": 16, + "fieldNumber": 1 + }, + "owner": { + "dataType": "bytes", + "fieldNumber": 2 + }, + "attributesArray": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["module", "attributes"], + "properties": { + "module": { + "dataType": "string", + "minLength": 1, + "maxLength": 1, + "pattern": "^[a-zA-Z0-9]*$", + "fieldNumber": 1 + }, + "attributes": { + "dataType": "bytes", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "supportedNFTsSubstore": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["chainID", "supportedCollectionIDArray"], + "properties": { + "chainID": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 8, + "maxLength": 8 + }, + "supportedCollectionIDArray": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["collectionID"], + "properties": { + "collectionID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + } + } + } + } + } + } + } + } + } + }, { "module": "pos", "data": { diff --git a/framework/src/application.ts b/framework/src/application.ts index e91be0fecd0..20a01d59eb5 100644 --- a/framework/src/application.ts +++ b/framework/src/application.ts @@ -55,6 +55,7 @@ import { } from './modules/interoperability'; import { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_rewards'; import { Engine } from './engine'; +import { NFTMethod, NFTModule } from './modules/nft'; const isPidRunning = async (pid: number): Promise => psList().then(list => list.some(x => x.pid === pid)); @@ -108,6 +109,7 @@ interface DefaultApplication { validator: ValidatorsMethod; auth: AuthMethod; token: TokenMethod; + nft: NFTMethod; fee: FeeMethod; random: RandomMethod; reward: DynamicRewardMethod; @@ -163,6 +165,7 @@ export class Application { // create module instances const authModule = new AuthModule(); const tokenModule = new TokenModule(); + const nftModule = new NFTModule(); const feeModule = new FeeModule(); const rewardModule = new DynamicRewardModule(); const randomModule = new RandomModule(); @@ -192,9 +195,11 @@ export class Application { feeModule.method, ); tokenModule.addDependencies(interoperabilityModule.method, feeModule.method); + nftModule.addDependencies(interoperabilityModule.method, feeModule.method, tokenModule.method); // resolve interoperability dependencies interoperabilityModule.registerInteroperableModule(tokenModule); + interoperabilityModule.registerInteroperableModule(nftModule); interoperabilityModule.registerInteroperableModule(feeModule); // register modules @@ -202,6 +207,7 @@ export class Application { application._registerModule(authModule); application._registerModule(validatorModule); application._registerModule(tokenModule); + application._registerModule(nftModule); application._registerModule(rewardModule); application._registerModule(randomModule); application._registerModule(posModule); @@ -212,6 +218,7 @@ export class Application { method: { validator: validatorModule.method, token: tokenModule.method, + nft: nftModule.method, auth: authModule.method, fee: feeModule.method, pos: posModule.method, From dd83a93f38ce944e36e7a417828c892be9980b60 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Wed, 28 Jun 2023 19:57:23 +0200 Subject: [PATCH 080/170] Add & register custom module and nft module --- .../config/default/genesis_assets.json | 4 +- examples/pos-mainchain/package.json | 1 + examples/pos-mainchain/src/app/app.ts | 15 ++++- examples/pos-mainchain/src/app/modules.ts | 6 +- .../app/modules/testNft/commands/mint_nft.ts | 61 +++++++++++++++++++ .../src/app/modules/testNft/constants.ts | 17 ++++++ .../src/app/modules/testNft/endpoint.ts | 17 ++++++ .../src/app/modules/testNft/method.ts | 17 ++++++ .../src/app/modules/testNft/module.ts | 51 ++++++++++++++++ .../src/app/modules/testNft/types.ts | 60 ++++++++++++++++++ framework/src/application.ts | 7 --- framework/src/index.ts | 1 + 12 files changed, 244 insertions(+), 13 deletions(-) create mode 100644 examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts create mode 100644 examples/pos-mainchain/src/app/modules/testNft/constants.ts create mode 100644 examples/pos-mainchain/src/app/modules/testNft/endpoint.ts create mode 100644 examples/pos-mainchain/src/app/modules/testNft/method.ts create mode 100644 examples/pos-mainchain/src/app/modules/testNft/module.ts create mode 100644 examples/pos-mainchain/src/app/modules/testNft/types.ts diff --git a/examples/pos-mainchain/config/default/genesis_assets.json b/examples/pos-mainchain/config/default/genesis_assets.json index fbe304669c9..dd8d2b096fc 100644 --- a/examples/pos-mainchain/config/default/genesis_assets.json +++ b/examples/pos-mainchain/config/default/genesis_assets.json @@ -1102,9 +1102,7 @@ "properties": { "chainID": { "dataType": "bytes", - "fieldNumber": 1, - "minLength": 8, - "maxLength": 8 + "fieldNumber": 1 }, "supportedCollectionIDArray": { "type": "array", diff --git a/examples/pos-mainchain/package.json b/examples/pos-mainchain/package.json index 275cce15599..f26366b8cd4 100755 --- a/examples/pos-mainchain/package.json +++ b/examples/pos-mainchain/package.json @@ -111,6 +111,7 @@ } }, "dependencies": { + "@liskhq/lisk-validator": "^0.7.0-beta.0", "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-alpha.7", "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-alpha.7", "@liskhq/lisk-framework-forger-plugin": "^0.3.0-alpha.7", diff --git a/examples/pos-mainchain/src/app/app.ts b/examples/pos-mainchain/src/app/app.ts index d4c1f2407cb..ead5c491919 100644 --- a/examples/pos-mainchain/src/app/app.ts +++ b/examples/pos-mainchain/src/app/app.ts @@ -1,9 +1,22 @@ -import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { + Application, + FeeModule, + MainchainInteroperabilityModule, + PartialApplicationConfig, + TokenModule, + NFTModule, +} from 'lisk-sdk'; import { registerModules } from './modules'; import { registerPlugins } from './plugins'; export const getApplication = (config: PartialApplicationConfig): Application => { const { app } = Application.defaultApplication(config, true); + const tokenModule = new TokenModule(); + const nftModule = new NFTModule(); + const feeModule = new FeeModule(); + const interoperabilityModule = new MainchainInteroperabilityModule(); + interoperabilityModule.registerInteroperableModule(nftModule); + nftModule.addDependencies(interoperabilityModule.method, feeModule.method, tokenModule.method); registerModules(app); registerPlugins(app); diff --git a/examples/pos-mainchain/src/app/modules.ts b/examples/pos-mainchain/src/app/modules.ts index d69352da8ae..f332892b447 100644 --- a/examples/pos-mainchain/src/app/modules.ts +++ b/examples/pos-mainchain/src/app/modules.ts @@ -1,4 +1,6 @@ -/* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; +import { TestNftModule } from './modules/testNft/module'; -export const registerModules = (_app: Application): void => {}; +export const registerModules = (app: Application): void => { + app.registerModule(new TestNftModule()); +}; diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts new file mode 100644 index 00000000000..c49bf2e016c --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts @@ -0,0 +1,61 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; +import { + BaseCommand, + CommandVerifyContext, + CommandExecuteContext, + VerificationResult, + VerifyStatus, + NFTMethod, +} from 'lisk-sdk'; +import { NFTAttributes, mintNftParamsSchema } from '../types'; + +interface Params { + address: Buffer; + collectionID: Buffer; + attributesArray: NFTAttributes[]; +} + +export class MintNftCommand extends BaseCommand { + public schema = mintNftParamsSchema; + private _nftMethod!: NFTMethod; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async verify(context: CommandVerifyContext): Promise { + const { params } = context; + + validator.validate(this.schema, params); + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.create( + context.getMethodContext(), + params.address, + params.collectionID, + params.attributesArray, + ); + } +} diff --git a/examples/pos-mainchain/src/app/modules/testNft/constants.ts b/examples/pos-mainchain/src/app/modules/testNft/constants.ts new file mode 100644 index 00000000000..59561a22073 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/constants.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export const LENGTH_COLLECTION_ID = 4; +export const MIN_LENGTH_MODULE_NAME = 1; +export const MAX_LENGTH_MODULE_NAME = 32; diff --git a/examples/pos-mainchain/src/app/modules/testNft/endpoint.ts b/examples/pos-mainchain/src/app/modules/testNft/endpoint.ts new file mode 100644 index 00000000000..1d091013741 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/endpoint.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEndpoint } from 'lisk-sdk'; + +export class TestNftEndpoint extends BaseEndpoint {} diff --git a/examples/pos-mainchain/src/app/modules/testNft/method.ts b/examples/pos-mainchain/src/app/modules/testNft/method.ts new file mode 100644 index 00000000000..5bab789e7f1 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from 'lisk-sdk'; + +export class TestNftMethod extends BaseMethod {} diff --git a/examples/pos-mainchain/src/app/modules/testNft/module.ts b/examples/pos-mainchain/src/app/modules/testNft/module.ts new file mode 100644 index 00000000000..cea623bd0e1 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/module.ts @@ -0,0 +1,51 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseModule, ModuleInitArgs, ModuleMetadata, NFTMethod } from 'lisk-sdk'; +import { TestNftEndpoint } from './endpoint'; +import { TestNftMethod } from './method'; +import { MintNftCommand } from './commands/mint_nft'; + +export class TestNftModule extends BaseModule { + public endpoint = new TestNftEndpoint(this.stores, this.offchainStores); + public method = new TestNftMethod(this.stores, this.events); + public mintNftCommand = new MintNftCommand(this.stores, this.events); + public commands = [this.mintNftCommand]; + + private _nftMethod!: NFTMethod; + + public addDependencies(nftMethod: NFTMethod) { + this._nftMethod = nftMethod; + } + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [], + commands: this.commands.map(command => ({ + name: command.name, + params: command.schema, + })), + events: [], + assets: [], + }; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async init(_args: ModuleInitArgs) { + this.mintNftCommand.init({ + nftMethod: this._nftMethod, + }); + } +} diff --git a/examples/pos-mainchain/src/app/modules/testNft/types.ts b/examples/pos-mainchain/src/app/modules/testNft/types.ts new file mode 100644 index 00000000000..395c8fd1db5 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/types.ts @@ -0,0 +1,60 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { LENGTH_COLLECTION_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from './constants'; + +export interface NFTAttributes { + module: string; + attributes: Buffer; +} + +export const mintNftParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['nftID', 'recipientAddress', 'data'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; diff --git a/framework/src/application.ts b/framework/src/application.ts index 20a01d59eb5..e91be0fecd0 100644 --- a/framework/src/application.ts +++ b/framework/src/application.ts @@ -55,7 +55,6 @@ import { } from './modules/interoperability'; import { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_rewards'; import { Engine } from './engine'; -import { NFTMethod, NFTModule } from './modules/nft'; const isPidRunning = async (pid: number): Promise => psList().then(list => list.some(x => x.pid === pid)); @@ -109,7 +108,6 @@ interface DefaultApplication { validator: ValidatorsMethod; auth: AuthMethod; token: TokenMethod; - nft: NFTMethod; fee: FeeMethod; random: RandomMethod; reward: DynamicRewardMethod; @@ -165,7 +163,6 @@ export class Application { // create module instances const authModule = new AuthModule(); const tokenModule = new TokenModule(); - const nftModule = new NFTModule(); const feeModule = new FeeModule(); const rewardModule = new DynamicRewardModule(); const randomModule = new RandomModule(); @@ -195,11 +192,9 @@ export class Application { feeModule.method, ); tokenModule.addDependencies(interoperabilityModule.method, feeModule.method); - nftModule.addDependencies(interoperabilityModule.method, feeModule.method, tokenModule.method); // resolve interoperability dependencies interoperabilityModule.registerInteroperableModule(tokenModule); - interoperabilityModule.registerInteroperableModule(nftModule); interoperabilityModule.registerInteroperableModule(feeModule); // register modules @@ -207,7 +202,6 @@ export class Application { application._registerModule(authModule); application._registerModule(validatorModule); application._registerModule(tokenModule); - application._registerModule(nftModule); application._registerModule(rewardModule); application._registerModule(randomModule); application._registerModule(posModule); @@ -218,7 +212,6 @@ export class Application { method: { validator: validatorModule.method, token: tokenModule.method, - nft: nftModule.method, auth: authModule.method, fee: feeModule.method, pos: posModule.method, diff --git a/framework/src/index.ts b/framework/src/index.ts index 0d35d808c97..24d61b6e2cc 100644 --- a/framework/src/index.ts +++ b/framework/src/index.ts @@ -67,6 +67,7 @@ export { genesisTokenStoreSchema as tokenGenesisStoreSchema, CROSS_CHAIN_COMMAND_NAME_TRANSFER, } from './modules/token'; +export { NFTModule, NFTMethod } from './modules/nft'; export { PoSMethod, PoSModule, From 24545623fa4a253484cf214dad68c6a2ef7e5bef Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Wed, 28 Jun 2023 19:57:59 +0200 Subject: [PATCH 081/170] Init nft module --- .../nft/commands/transfer_cross_chain.ts | 3 +-- framework/src/modules/nft/module.ts | 25 ++++++++++++++++--- framework/src/modules/nft/types.ts | 3 +-- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index fad28836ca6..2dcc3ea7575 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -17,8 +17,7 @@ import { crossChainTransferParamsSchema } from '../schemas'; import { NFTStore } from '../stores/nft'; import { NFTMethod } from '../method'; import { LENGTH_CHAIN_ID, NFT_NOT_LOCKED } from '../constants'; -import { TokenMethod } from '../../token'; -import { InteroperabilityMethod } from '../types'; +import { InteroperabilityMethod, TokenMethod } from '../types'; import { BaseCommand } from '../../base_command'; import { CommandExecuteContext, diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 019d87642b8..0c7e7f7c657 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -81,10 +81,11 @@ export class NFTModule extends BaseInteroperableModule { private readonly _ccTransferCommand = new TransferCrossChainCommand(this.stores, this.events); private readonly _internalMethod = new InternalMethod(this.stores, this.events); private _interoperabilityMethod!: InteroperabilityMethod; + private _feeMethod!: FeeMethod; + private _tokenMethod!: TokenMethod; public commands = [this._transferCommand, this._ccTransferCommand]; - // eslint-disable-next-line no-useless-constructor public constructor() { super(); this.events.register(TransferEvent, new TransferEvent(this.name)); @@ -127,6 +128,8 @@ export class NFTModule extends BaseInteroperableModule { tokenMethod: TokenMethod, ) { this._interoperabilityMethod = interoperabilityMethod; + this._feeMethod = feeMethod; + this._tokenMethod = tokenMethod; this.method.addDependencies( interoperabilityMethod, this._internalMethod, @@ -181,8 +184,24 @@ export class NFTModule extends BaseInteroperableModule { }; } - // eslint-disable-next-line @typescript-eslint/no-empty-function - public async init(_args: ModuleInitArgs) {} + // eslint-disable-next-line @typescript-eslint/require-await + public async init(args: ModuleInitArgs) { + const ownChainID = Buffer.from(args.genesisConfig.chainID, 'hex'); + this._internalMethod.init({ ownChainID }); + this.method.init({ ownChainID }); + this.crossChainTransferCommand.init({ + method: this.method, + internalMethod: this._internalMethod, + feeMethod: this._feeMethod, + }); + + this._ccTransferCommand.init({ + internalMethod: this._internalMethod, + interoperabilityMethod: this._interoperabilityMethod, + nftMethod: this.method, + tokenMethod: this._tokenMethod, + }); + } public async initGenesisState(context: GenesisBlockExecuteContext): Promise { const assetBytes = context.assets.getAsset(this.name); diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 8b1647d67c2..173ff3d1227 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -15,7 +15,6 @@ import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { CCMsg } from '../interoperability'; -// eslint-disable-next-line @typescript-eslint/no-empty-interface export interface ModuleConfig { ownChainID: Buffer; } @@ -43,7 +42,7 @@ export interface FeeMethod { export interface TokenMethod { getAvailableBalance( - methodContext: MethodContext, + methodContext: ImmutableMethodContext, address: Buffer, tokenID: Buffer, ): Promise; From 0f30c3ddd52e554f4aaaf80560d123c31a32e3f4 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 29 Jun 2023 13:11:37 +0200 Subject: [PATCH 082/170] Remove param validation --- .../app/modules/testNft/commands/mint_nft.ts | 24 +---------- .../src/app/modules/testNft/types.ts | 42 ------------------- 2 files changed, 2 insertions(+), 64 deletions(-) diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts index c49bf2e016c..f3d73c4571b 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts @@ -12,16 +12,8 @@ * Removal or modification of this copyright notice is prohibited. */ -import { validator } from '@liskhq/lisk-validator'; -import { - BaseCommand, - CommandVerifyContext, - CommandExecuteContext, - VerificationResult, - VerifyStatus, - NFTMethod, -} from 'lisk-sdk'; -import { NFTAttributes, mintNftParamsSchema } from '../types'; +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { NFTAttributes } from '../types'; interface Params { address: Buffer; @@ -30,24 +22,12 @@ interface Params { } export class MintNftCommand extends BaseCommand { - public schema = mintNftParamsSchema; private _nftMethod!: NFTMethod; public init(args: { nftMethod: NFTMethod }): void { this._nftMethod = args.nftMethod; } - // eslint-disable-next-line @typescript-eslint/require-await - public async verify(context: CommandVerifyContext): Promise { - const { params } = context; - - validator.validate(this.schema, params); - - return { - status: VerifyStatus.OK, - }; - } - public async execute(context: CommandExecuteContext): Promise { const { params } = context; diff --git a/examples/pos-mainchain/src/app/modules/testNft/types.ts b/examples/pos-mainchain/src/app/modules/testNft/types.ts index 395c8fd1db5..8d1af2d969a 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/types.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/types.ts @@ -12,49 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { LENGTH_COLLECTION_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME } from './constants'; - export interface NFTAttributes { module: string; attributes: Buffer; } - -export const mintNftParamsSchema = { - $id: '/lisk/nftTransferParams', - type: 'object', - required: ['nftID', 'recipientAddress', 'data'], - properties: { - address: { - dataType: 'bytes', - format: 'lisk32', - fieldNumber: 1, - }, - collectionID: { - dataType: 'bytes', - minLength: LENGTH_COLLECTION_ID, - maxLength: LENGTH_COLLECTION_ID, - fieldNumber: 2, - }, - attributesArray: { - type: 'array', - fieldNumber: 4, - items: { - type: 'object', - required: ['module', 'attributes'], - properties: { - module: { - dataType: 'string', - minLength: MIN_LENGTH_MODULE_NAME, - maxLength: MAX_LENGTH_MODULE_NAME, - pattern: '^[a-zA-Z0-9]*$', - fieldNumber: 1, - }, - attributes: { - dataType: 'bytes', - fieldNumber: 2, - }, - }, - }, - }, - }, -}; From e98e2c3fdbb040dc912af9d44c88bc428b494fc9 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 30 Jun 2023 13:20:36 +0200 Subject: [PATCH 083/170] Update schema per feedback --- examples/pos-mainchain/config/default/genesis_assets.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/pos-mainchain/config/default/genesis_assets.json b/examples/pos-mainchain/config/default/genesis_assets.json index dd8d2b096fc..b92a196593d 100644 --- a/examples/pos-mainchain/config/default/genesis_assets.json +++ b/examples/pos-mainchain/config/default/genesis_assets.json @@ -1079,7 +1079,7 @@ "module": { "dataType": "string", "minLength": 1, - "maxLength": 1, + "maxLength": 32, "pattern": "^[a-zA-Z0-9]*$", "fieldNumber": 1 }, @@ -1102,6 +1102,8 @@ "properties": { "chainID": { "dataType": "bytes", + "minLength": 4, + "maxLength": 4, "fieldNumber": 1 }, "supportedCollectionIDArray": { From 0175d2bc4e6afe5e2be29a571f2dcc9ca50232f1 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 6 Jul 2023 00:47:12 +0200 Subject: [PATCH 084/170] Update per feedback --- examples/pos-mainchain/src/app/app.ts | 30 ++++----- .../modules/testNft/commands/destroy_nft.ts | 36 ++++++++++ .../app/modules/testNft/commands/mint_nft.ts | 3 +- .../src/app/modules/testNft/constants.ts | 1 + .../src/app/modules/testNft/types.ts | 66 +++++++++++++++++++ 5 files changed, 118 insertions(+), 18 deletions(-) create mode 100644 examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts diff --git a/examples/pos-mainchain/src/app/app.ts b/examples/pos-mainchain/src/app/app.ts index ead5c491919..291b8987d12 100644 --- a/examples/pos-mainchain/src/app/app.ts +++ b/examples/pos-mainchain/src/app/app.ts @@ -1,24 +1,20 @@ -import { - Application, - FeeModule, - MainchainInteroperabilityModule, - PartialApplicationConfig, - TokenModule, - NFTModule, -} from 'lisk-sdk'; -import { registerModules } from './modules'; -import { registerPlugins } from './plugins'; +import { Application, PartialApplicationConfig, NFTModule } from 'lisk-sdk'; +import { TestNftModule } from './modules/testNft/module'; export const getApplication = (config: PartialApplicationConfig): Application => { - const { app } = Application.defaultApplication(config, true); - const tokenModule = new TokenModule(); + const { app, method } = Application.defaultApplication(config, true); const nftModule = new NFTModule(); - const feeModule = new FeeModule(); - const interoperabilityModule = new MainchainInteroperabilityModule(); + const testNftModule = new TestNftModule(); + // eslint-disable-next-line @typescript-eslint/dot-notation + const interoperabilityModule = app['_registeredModules'].find( + mod => mod.name === 'interoperability', + ); interoperabilityModule.registerInteroperableModule(nftModule); - nftModule.addDependencies(interoperabilityModule.method, feeModule.method, tokenModule.method); - registerModules(app); - registerPlugins(app); + nftModule.addDependencies(method.interoperability, method.fee, method.token); + testNftModule.addDependencies(nftModule.method); + + app.registerModule(nftModule); + app.registerModule(testNftModule); return app; }; diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts new file mode 100644 index 00000000000..59f56628175 --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { destroyNftParamsSchema } from '../types'; + +interface Params { + address: Buffer; + nftID: Buffer; +} + +export class DestroyNftCommand extends BaseCommand { + private _nftMethod!: NFTMethod; + public schema = destroyNftParamsSchema; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.destroy(context.getMethodContext(), params.address, params.nftID); + } +} diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts index f3d73c4571b..b6e425c5198 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts @@ -13,7 +13,7 @@ */ import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; -import { NFTAttributes } from '../types'; +import { NFTAttributes, mintNftParamsSchema } from '../types'; interface Params { address: Buffer; @@ -23,6 +23,7 @@ interface Params { export class MintNftCommand extends BaseCommand { private _nftMethod!: NFTMethod; + public schema = mintNftParamsSchema; public init(args: { nftMethod: NFTMethod }): void { this._nftMethod = args.nftMethod; diff --git a/examples/pos-mainchain/src/app/modules/testNft/constants.ts b/examples/pos-mainchain/src/app/modules/testNft/constants.ts index 59561a22073..a0150fad36f 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/constants.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/constants.ts @@ -15,3 +15,4 @@ export const LENGTH_COLLECTION_ID = 4; export const MIN_LENGTH_MODULE_NAME = 1; export const MAX_LENGTH_MODULE_NAME = 32; +export const LENGTH_NFT_ID = 16; diff --git a/examples/pos-mainchain/src/app/modules/testNft/types.ts b/examples/pos-mainchain/src/app/modules/testNft/types.ts index 8d1af2d969a..a591883b9f0 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/types.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/types.ts @@ -12,7 +12,73 @@ * Removal or modification of this copyright notice is prohibited. */ +import { + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, +} from './constants'; + export interface NFTAttributes { module: string; attributes: Buffer; } + +export const mintNftParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['address', 'collectionID', 'attributesArray'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export const destroyNftParamsSchema = { + $id: '/lisk/nftDestroyParams', + type: 'object', + required: ['address', 'nftID'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + }, +}; From 462e2b7496e5bccf1e78ddc940345b717d4c5ced Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 6 Jul 2023 11:10:14 +0200 Subject: [PATCH 085/170] Skip lint check for examples app --- examples/pos-mainchain/.eslintignore | 1 + examples/pos-mainchain/src/app/app.ts | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/pos-mainchain/.eslintignore b/examples/pos-mainchain/.eslintignore index 00a15e70c20..eee8bf98e19 100644 --- a/examples/pos-mainchain/.eslintignore +++ b/examples/pos-mainchain/.eslintignore @@ -12,3 +12,4 @@ scripts config test/_setup.js ecosystem.config.js +src/app/app.ts diff --git a/examples/pos-mainchain/src/app/app.ts b/examples/pos-mainchain/src/app/app.ts index 291b8987d12..6a99bf61049 100644 --- a/examples/pos-mainchain/src/app/app.ts +++ b/examples/pos-mainchain/src/app/app.ts @@ -5,7 +5,6 @@ export const getApplication = (config: PartialApplicationConfig): Application => const { app, method } = Application.defaultApplication(config, true); const nftModule = new NFTModule(); const testNftModule = new TestNftModule(); - // eslint-disable-next-line @typescript-eslint/dot-notation const interoperabilityModule = app['_registeredModules'].find( mod => mod.name === 'interoperability', ); From 2b04b3613f1c83d08a99d08a3fad2913076dc309 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 6 Jul 2023 11:44:44 +0200 Subject: [PATCH 086/170] Init transfer command --- framework/src/modules/nft/module.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 0c7e7f7c657..619f8cd4133 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -201,6 +201,7 @@ export class NFTModule extends BaseInteroperableModule { nftMethod: this.method, tokenMethod: this._tokenMethod, }); + this._transferCommand.init({ method: this.method, internalMethod: this._internalMethod }); } public async initGenesisState(context: GenesisBlockExecuteContext): Promise { From 6ab91007c3d43de2ea9c89ff5f0cd057a40f01d2 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 6 Jul 2023 13:07:09 +0200 Subject: [PATCH 087/170] Init destroy command --- examples/pos-mainchain/src/app/modules/testNft/module.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/examples/pos-mainchain/src/app/modules/testNft/module.ts b/examples/pos-mainchain/src/app/modules/testNft/module.ts index cea623bd0e1..a228abff3af 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/module.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/module.ts @@ -16,12 +16,14 @@ import { BaseModule, ModuleInitArgs, ModuleMetadata, NFTMethod } from 'lisk-sdk' import { TestNftEndpoint } from './endpoint'; import { TestNftMethod } from './method'; import { MintNftCommand } from './commands/mint_nft'; +import { DestroyNftCommand } from './commands/destroy_nft'; export class TestNftModule extends BaseModule { public endpoint = new TestNftEndpoint(this.stores, this.offchainStores); public method = new TestNftMethod(this.stores, this.events); public mintNftCommand = new MintNftCommand(this.stores, this.events); - public commands = [this.mintNftCommand]; + public destroyNftCommand = new DestroyNftCommand(this.stores, this.events); + public commands = [this.mintNftCommand, this.destroyNftCommand]; private _nftMethod!: NFTMethod; @@ -47,5 +49,8 @@ export class TestNftModule extends BaseModule { this.mintNftCommand.init({ nftMethod: this._nftMethod, }); + this.destroyNftCommand.init({ + nftMethod: this._nftMethod, + }); } } From 906103a2236c07fff6d2521b7d6e680097660714 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 6 Jul 2023 15:09:33 +0200 Subject: [PATCH 088/170] NFTMethod.create generates nftID of length LENGTH_NFT_ID (#8692) :bug: NFTMethod.create generates nftID of length LENGTH_NFT_ID --- framework/src/modules/nft/method.ts | 5 ++++- framework/test/unit/modules/nft/method.spec.ts | 16 ++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 3039737a15d..aba6cc6da31 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -310,10 +310,13 @@ export class NFTMethod extends BaseMethod { } const index = await this.getNextAvailableIndex(methodContext, collectionID); + const indexBytes = Buffer.from(index.toString()); + const nftID = Buffer.concat([ this._config.ownChainID, collectionID, - Buffer.from(index.toString()), + Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - indexBytes.length, 0), + indexBytes, ]); this._feeMethod.payFee(methodContext, BigInt(FEE_CREATE_NFT)); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index d5bcd5a9c07..23c46664d09 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -574,7 +574,13 @@ describe('NFTMethod', () => { }); it('should set data to stores with correct key and emit successfull create event when there is no entry in the nft substore', async () => { - const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('0')]); + const index = Buffer.from('0'); + const expectedKey = Buffer.concat([ + config.ownChainID, + collectionID, + Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - index.length, 0), + index, + ]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); @@ -594,6 +600,7 @@ describe('NFTMethod', () => { }); it('should set data to stores with correct key and emit successfull create event when there is some entry in the nft substore', async () => { + const index = Buffer.from('2'); await nftStore.save(methodContext, nftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: attributesArray1, @@ -603,7 +610,12 @@ describe('NFTMethod', () => { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: attributesArray2, }); - const expectedKey = Buffer.concat([config.ownChainID, collectionID, Buffer.from('2')]); + const expectedKey = Buffer.concat([ + config.ownChainID, + collectionID, + Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - index.length, 0), + index, + ]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); From c2aa925ac28f4bdce9397850abe5c9635832c4d4 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 7 Jul 2023 09:07:29 +0200 Subject: [PATCH 089/170] Move relevant types to schema --- .../modules/testNft/commands/destroy_nft.ts | 2 +- .../app/modules/testNft/commands/mint_nft.ts | 3 +- .../src/app/modules/testNft/schema.ts | 79 +++++++++++++++++++ .../src/app/modules/testNft/types.ts | 66 ---------------- 4 files changed, 82 insertions(+), 68 deletions(-) create mode 100644 examples/pos-mainchain/src/app/modules/testNft/schema.ts diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts index 59f56628175..822ad1b174f 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/destroy_nft.ts @@ -13,7 +13,7 @@ */ import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; -import { destroyNftParamsSchema } from '../types'; +import { destroyNftParamsSchema } from '../schema'; interface Params { address: Buffer; diff --git a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts index b6e425c5198..bc5638846d4 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/commands/mint_nft.ts @@ -13,7 +13,8 @@ */ import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; -import { NFTAttributes, mintNftParamsSchema } from '../types'; +import { NFTAttributes } from '../types'; +import { mintNftParamsSchema } from '../schema'; interface Params { address: Buffer; diff --git a/examples/pos-mainchain/src/app/modules/testNft/schema.ts b/examples/pos-mainchain/src/app/modules/testNft/schema.ts new file mode 100644 index 00000000000..c183e9fb8ff --- /dev/null +++ b/examples/pos-mainchain/src/app/modules/testNft/schema.ts @@ -0,0 +1,79 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, +} from './constants'; + +export const mintNftParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['address', 'collectionID', 'attributesArray'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export const destroyNftParamsSchema = { + $id: '/lisk/nftDestroyParams', + type: 'object', + required: ['address', 'nftID'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + }, +}; diff --git a/examples/pos-mainchain/src/app/modules/testNft/types.ts b/examples/pos-mainchain/src/app/modules/testNft/types.ts index a591883b9f0..8d1af2d969a 100644 --- a/examples/pos-mainchain/src/app/modules/testNft/types.ts +++ b/examples/pos-mainchain/src/app/modules/testNft/types.ts @@ -12,73 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { - LENGTH_COLLECTION_ID, - LENGTH_NFT_ID, - MAX_LENGTH_MODULE_NAME, - MIN_LENGTH_MODULE_NAME, -} from './constants'; - export interface NFTAttributes { module: string; attributes: Buffer; } - -export const mintNftParamsSchema = { - $id: '/lisk/nftTransferParams', - type: 'object', - required: ['address', 'collectionID', 'attributesArray'], - properties: { - address: { - dataType: 'bytes', - format: 'lisk32', - fieldNumber: 1, - }, - collectionID: { - dataType: 'bytes', - minLength: LENGTH_COLLECTION_ID, - maxLength: LENGTH_COLLECTION_ID, - fieldNumber: 2, - }, - attributesArray: { - type: 'array', - fieldNumber: 4, - items: { - type: 'object', - required: ['module', 'attributes'], - properties: { - module: { - dataType: 'string', - minLength: MIN_LENGTH_MODULE_NAME, - maxLength: MAX_LENGTH_MODULE_NAME, - pattern: '^[a-zA-Z0-9]*$', - fieldNumber: 1, - }, - attributes: { - dataType: 'bytes', - fieldNumber: 2, - }, - }, - }, - }, - }, -}; - -export const destroyNftParamsSchema = { - $id: '/lisk/nftDestroyParams', - type: 'object', - required: ['address', 'nftID'], - properties: { - address: { - dataType: 'bytes', - format: 'lisk32', - fieldNumber: 1, - }, - nftID: { - dataType: 'bytes', - minLength: LENGTH_NFT_ID, - maxLength: LENGTH_NFT_ID, - fieldNumber: 2, - }, - }, -}; From 6b7c91721ceae037011014c70b0ea3574694a2a8 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 7 Jul 2023 11:48:04 +0200 Subject: [PATCH 090/170] Adds NFTModule.name property (#8673) :seedling: Adds NFTModule.name property --- framework/src/modules/nft/module.ts | 4 ++++ framework/test/unit/modules/nft/module.spec.ts | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 619f8cd4133..f925126ac78 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -122,6 +122,10 @@ export class NFTModule extends BaseInteroperableModule { this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 4)); } + public get name(): string { + return 'nft'; + } + public addDependencies( interoperabilityMethod: InteroperabilityMethod, feeMethod: FeeMethod, diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index 397124fa95b..99ae42a0dd3 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -54,6 +54,10 @@ describe('nft module', () => { return context; }; + it('should have the name "nft"', () => { + expect(module.name).toBe('nft'); + }); + describe('initGenesisState', () => { describe('validate nftSubstore schema', () => { it.each(invalidSchemaNFTSubstoreGenesisAssets)('%s', async (_, input, err) => { From 8a2dec0d48b9ce1f688717a6649e9686ce64475a Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 13 Jul 2023 11:51:35 +0200 Subject: [PATCH 091/170] Fixes NFTEndpoint.getNFT for escrowed NFTs (#8716) * :bug: NFTMethod.getNFT for escrowed NFTs * :recycle: NFTEndpoint.getNFT --- framework/src/modules/nft/endpoint.ts | 32 ++++++++++++------- framework/src/modules/nft/module.ts | 1 + framework/src/modules/nft/types.ts | 2 +- .../test/unit/modules/nft/endpoint.spec.ts | 31 +++++++++++++++++- 4 files changed, 53 insertions(+), 13 deletions(-) diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts index 1999d881077..b41b4afd07d 100644 --- a/framework/src/modules/nft/endpoint.ts +++ b/framework/src/modules/nft/endpoint.ts @@ -26,7 +26,7 @@ import { isNFTSupportedRequestSchema, } from './schemas'; import { NFTStore } from './stores/nft'; -import { LENGTH_NFT_ID } from './constants'; +import { LENGTH_ADDRESS, LENGTH_NFT_ID } from './constants'; import { UserStore } from './stores/user'; import { NFT } from './types'; import { SupportedNFTsStore } from './stores/supported_nfts'; @@ -111,18 +111,28 @@ export class NFTEndpoint extends BaseEndpoint { const userStore = this.stores.get(UserStore); const nftData = await nftStore.get(context.getImmutableMethodContext(), nftID); - const userData = await userStore.get( - context.getImmutableMethodContext(), - userStore.getKey(nftData.owner, nftID), - ); + const owner = nftData.owner.toString('hex'); + const attributesArray = nftData.attributesArray.map(attribute => ({ + module: attribute.module, + attributes: attribute.attributes.toString('hex'), + })); + + if (nftData.owner.length === LENGTH_ADDRESS) { + const userData = await userStore.get( + context.getImmutableMethodContext(), + userStore.getKey(nftData.owner, nftID), + ); + + return { + owner, + attributesArray, + lockingModule: userData.lockingModule, + }; + } return { - owner: nftData.owner.toString('hex'), - attributesArray: nftData.attributesArray.map(attribute => ({ - module: attribute.module, - attributes: attribute.attributes.toString('hex'), - })), - lockingModule: userData.lockingModule, + owner, + attributesArray, }; } diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index f925126ac78..cba0c0b2211 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -142,6 +142,7 @@ export class NFTModule extends BaseInteroperableModule { ); this._internalMethod.addDependencies(this.method, this._interoperabilityMethod); this.crossChainMethod.addDependencies(interoperabilityMethod); + this.endpoint.addDependencies(this.method); } public metadata(): ModuleMetadata { diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 173ff3d1227..d56e3343ebb 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -61,7 +61,7 @@ export interface NFTAttributes { export interface NFT { owner: string; attributesArray: NFTAttributes[]; - lockingModule: string; + lockingModule?: string; } export interface GenesisNFTStore { diff --git a/framework/test/unit/modules/nft/endpoint.spec.ts b/framework/test/unit/modules/nft/endpoint.spec.ts index d179db7dbeb..64fbabfe0d3 100644 --- a/framework/test/unit/modules/nft/endpoint.spec.ts +++ b/framework/test/unit/modules/nft/endpoint.spec.ts @@ -46,6 +46,7 @@ import { hasNFTResponseSchema, isNFTSupportedResponseSchema, } from '../../../../src/modules/nft/schemas'; +import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; type NFTofOwner = Omit & { id: Buffer }; @@ -60,6 +61,7 @@ describe('NFTEndpoint', () => { const nftStore = module.stores.get(NFTStore); const userStore = module.stores.get(UserStore); + const escrowStore = module.stores.get(EscrowStore); const supportedNFTsStore = module.stores.get(SupportedNFTsStore); let stateStore: PrefixedStateReadWriter; @@ -67,6 +69,7 @@ describe('NFTEndpoint', () => { const owner = utils.getRandomBytes(LENGTH_ADDRESS); const ownerAddress = address.getLisk32AddressFromAddress(owner); + const escrowChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); const nfts: NFTofOwner[] = [ { @@ -100,7 +103,8 @@ describe('NFTEndpoint', () => { }); await userStore.set(methodContext, userStore.getKey(owner, nft.id), { - lockingModule: nft.lockingModule, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + lockingModule: nft.lockingModule!, }); } @@ -161,6 +165,31 @@ describe('NFTEndpoint', () => { validator.validate(getNFTsResponseSchema, expectedNFTs); }); + + it('should return NFT details for escrowed NFT', async () => { + await escrowStore.set(methodContext, escrowChainID, {}); + + await nftStore.save(methodContext, nfts[0].id, { + owner: escrowChainID, + attributesArray: [], + }); + + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + id: nfts[0].id.toString('hex'), + }, + }); + + const expectedNFT: JSONObject = { + owner: escrowChainID.toString('hex'), + attributesArray: [], + }; + + await expect(endpoint.getNFT(context)).resolves.toEqual(expectedNFT); + + validator.validate(getNFTResponseSchema, expectedNFT); + }); }); describe('hasNFT', () => { From 5d68f3e781f8cdafaf6f244c7fa8d087a955a513 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 13 Jul 2023 14:41:30 +0200 Subject: [PATCH 092/170] Add nft to interop example app --- .../interop/pos-mainchain-fast/.eslintignore | 1 + .../interop/pos-mainchain-fast/src/app/app.ts | 17 +++- .../modules/testNft/commands/destroy_nft.ts | 36 +++++++++ .../app/modules/testNft/commands/mint_nft.ts | 43 ++++++++++ .../src/app/modules/testNft/constants.ts | 18 +++++ .../src/app/modules/testNft/endpoint.ts | 17 ++++ .../src/app/modules/testNft/method.ts | 17 ++++ .../src/app/modules/testNft/module.ts | 56 +++++++++++++ .../src/app/modules/testNft/schema.ts | 79 +++++++++++++++++++ .../src/app/modules/testNft/types.ts | 18 +++++ .../pos-sidechain-example-one/.eslintignore | 1 + .../pos-sidechain-example-one/src/app/app.ts | 17 +++- .../modules/testNft/commands/destroy_nft.ts | 36 +++++++++ .../app/modules/testNft/commands/mint_nft.ts | 43 ++++++++++ .../src/app/modules/testNft/constants.ts | 18 +++++ .../src/app/modules/testNft/endpoint.ts | 17 ++++ .../src/app/modules/testNft/method.ts | 17 ++++ .../src/app/modules/testNft/module.ts | 56 +++++++++++++ .../src/app/modules/testNft/schema.ts | 79 +++++++++++++++++++ .../src/app/modules/testNft/types.ts | 18 +++++ examples/pos-mainchain/src/app/modules.ts | 6 +- 21 files changed, 602 insertions(+), 8 deletions(-) create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/destroy_nft.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/mint_nft.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/constants.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/endpoint.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/method.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/module.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/schema.ts create mode 100644 examples/interop/pos-mainchain-fast/src/app/modules/testNft/types.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/destroy_nft.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/mint_nft.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/constants.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/endpoint.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/method.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/module.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/schema.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/testNft/types.ts diff --git a/examples/interop/pos-mainchain-fast/.eslintignore b/examples/interop/pos-mainchain-fast/.eslintignore index 06500399046..f65e71dc66b 100644 --- a/examples/interop/pos-mainchain-fast/.eslintignore +++ b/examples/interop/pos-mainchain-fast/.eslintignore @@ -11,3 +11,4 @@ build scripts config test/_setup.js +src/app/app.ts diff --git a/examples/interop/pos-mainchain-fast/src/app/app.ts b/examples/interop/pos-mainchain-fast/src/app/app.ts index c506c018be8..3250d66e460 100644 --- a/examples/interop/pos-mainchain-fast/src/app/app.ts +++ b/examples/interop/pos-mainchain-fast/src/app/app.ts @@ -1,9 +1,22 @@ -import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { Application, PartialApplicationConfig, NFTModule } from 'lisk-sdk'; +import { TestNftModule } from './modules/testNft/module'; import { registerModules } from './modules'; import { registerPlugins } from './plugins'; export const getApplication = (config: PartialApplicationConfig): Application => { - const { app } = Application.defaultApplication(config, true); + const { app, method } = Application.defaultApplication(config, true); + + const nftModule = new NFTModule(); + const testNftModule = new TestNftModule(); + const interoperabilityModule = app['_registeredModules'].find( + mod => mod.name === 'interoperability', + ); + interoperabilityModule.registerInteroperableModule(nftModule); + nftModule.addDependencies(method.interoperability, method.fee, method.token); + testNftModule.addDependencies(nftModule.method); + + app.registerModule(nftModule); + app.registerModule(testNftModule); registerModules(app); registerPlugins(app); diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/destroy_nft.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/destroy_nft.ts new file mode 100644 index 00000000000..822ad1b174f --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/destroy_nft.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { destroyNftParamsSchema } from '../schema'; + +interface Params { + address: Buffer; + nftID: Buffer; +} + +export class DestroyNftCommand extends BaseCommand { + private _nftMethod!: NFTMethod; + public schema = destroyNftParamsSchema; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.destroy(context.getMethodContext(), params.address, params.nftID); + } +} diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/mint_nft.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/mint_nft.ts new file mode 100644 index 00000000000..bc5638846d4 --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/commands/mint_nft.ts @@ -0,0 +1,43 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { NFTAttributes } from '../types'; +import { mintNftParamsSchema } from '../schema'; + +interface Params { + address: Buffer; + collectionID: Buffer; + attributesArray: NFTAttributes[]; +} + +export class MintNftCommand extends BaseCommand { + private _nftMethod!: NFTMethod; + public schema = mintNftParamsSchema; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.create( + context.getMethodContext(), + params.address, + params.collectionID, + params.attributesArray, + ); + } +} diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/constants.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/constants.ts new file mode 100644 index 00000000000..a0150fad36f --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/constants.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export const LENGTH_COLLECTION_ID = 4; +export const MIN_LENGTH_MODULE_NAME = 1; +export const MAX_LENGTH_MODULE_NAME = 32; +export const LENGTH_NFT_ID = 16; diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/endpoint.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/endpoint.ts new file mode 100644 index 00000000000..1d091013741 --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/endpoint.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEndpoint } from 'lisk-sdk'; + +export class TestNftEndpoint extends BaseEndpoint {} diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/method.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/method.ts new file mode 100644 index 00000000000..5bab789e7f1 --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from 'lisk-sdk'; + +export class TestNftMethod extends BaseMethod {} diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/module.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/module.ts new file mode 100644 index 00000000000..a228abff3af --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/module.ts @@ -0,0 +1,56 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseModule, ModuleInitArgs, ModuleMetadata, NFTMethod } from 'lisk-sdk'; +import { TestNftEndpoint } from './endpoint'; +import { TestNftMethod } from './method'; +import { MintNftCommand } from './commands/mint_nft'; +import { DestroyNftCommand } from './commands/destroy_nft'; + +export class TestNftModule extends BaseModule { + public endpoint = new TestNftEndpoint(this.stores, this.offchainStores); + public method = new TestNftMethod(this.stores, this.events); + public mintNftCommand = new MintNftCommand(this.stores, this.events); + public destroyNftCommand = new DestroyNftCommand(this.stores, this.events); + public commands = [this.mintNftCommand, this.destroyNftCommand]; + + private _nftMethod!: NFTMethod; + + public addDependencies(nftMethod: NFTMethod) { + this._nftMethod = nftMethod; + } + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [], + commands: this.commands.map(command => ({ + name: command.name, + params: command.schema, + })), + events: [], + assets: [], + }; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async init(_args: ModuleInitArgs) { + this.mintNftCommand.init({ + nftMethod: this._nftMethod, + }); + this.destroyNftCommand.init({ + nftMethod: this._nftMethod, + }); + } +} diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/schema.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/schema.ts new file mode 100644 index 00000000000..c183e9fb8ff --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/schema.ts @@ -0,0 +1,79 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, +} from './constants'; + +export const mintNftParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['address', 'collectionID', 'attributesArray'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export const destroyNftParamsSchema = { + $id: '/lisk/nftDestroyParams', + type: 'object', + required: ['address', 'nftID'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + }, +}; diff --git a/examples/interop/pos-mainchain-fast/src/app/modules/testNft/types.ts b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/types.ts new file mode 100644 index 00000000000..8d1af2d969a --- /dev/null +++ b/examples/interop/pos-mainchain-fast/src/app/modules/testNft/types.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export interface NFTAttributes { + module: string; + attributes: Buffer; +} diff --git a/examples/interop/pos-sidechain-example-one/.eslintignore b/examples/interop/pos-sidechain-example-one/.eslintignore index 06500399046..f65e71dc66b 100644 --- a/examples/interop/pos-sidechain-example-one/.eslintignore +++ b/examples/interop/pos-sidechain-example-one/.eslintignore @@ -11,3 +11,4 @@ build scripts config test/_setup.js +src/app/app.ts diff --git a/examples/interop/pos-sidechain-example-one/src/app/app.ts b/examples/interop/pos-sidechain-example-one/src/app/app.ts index d9dc8b2ad28..3250d66e460 100644 --- a/examples/interop/pos-sidechain-example-one/src/app/app.ts +++ b/examples/interop/pos-sidechain-example-one/src/app/app.ts @@ -1,9 +1,22 @@ -import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { Application, PartialApplicationConfig, NFTModule } from 'lisk-sdk'; +import { TestNftModule } from './modules/testNft/module'; import { registerModules } from './modules'; import { registerPlugins } from './plugins'; export const getApplication = (config: PartialApplicationConfig): Application => { - const { app } = Application.defaultApplication(config); + const { app, method } = Application.defaultApplication(config, true); + + const nftModule = new NFTModule(); + const testNftModule = new TestNftModule(); + const interoperabilityModule = app['_registeredModules'].find( + mod => mod.name === 'interoperability', + ); + interoperabilityModule.registerInteroperableModule(nftModule); + nftModule.addDependencies(method.interoperability, method.fee, method.token); + testNftModule.addDependencies(nftModule.method); + + app.registerModule(nftModule); + app.registerModule(testNftModule); registerModules(app); registerPlugins(app); diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/destroy_nft.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/destroy_nft.ts new file mode 100644 index 00000000000..822ad1b174f --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/destroy_nft.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { destroyNftParamsSchema } from '../schema'; + +interface Params { + address: Buffer; + nftID: Buffer; +} + +export class DestroyNftCommand extends BaseCommand { + private _nftMethod!: NFTMethod; + public schema = destroyNftParamsSchema; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.destroy(context.getMethodContext(), params.address, params.nftID); + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/mint_nft.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/mint_nft.ts new file mode 100644 index 00000000000..bc5638846d4 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/commands/mint_nft.ts @@ -0,0 +1,43 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCommand, CommandExecuteContext, NFTMethod } from 'lisk-sdk'; +import { NFTAttributes } from '../types'; +import { mintNftParamsSchema } from '../schema'; + +interface Params { + address: Buffer; + collectionID: Buffer; + attributesArray: NFTAttributes[]; +} + +export class MintNftCommand extends BaseCommand { + private _nftMethod!: NFTMethod; + public schema = mintNftParamsSchema; + + public init(args: { nftMethod: NFTMethod }): void { + this._nftMethod = args.nftMethod; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + await this._nftMethod.create( + context.getMethodContext(), + params.address, + params.collectionID, + params.attributesArray, + ); + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/constants.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/constants.ts new file mode 100644 index 00000000000..a0150fad36f --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/constants.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export const LENGTH_COLLECTION_ID = 4; +export const MIN_LENGTH_MODULE_NAME = 1; +export const MAX_LENGTH_MODULE_NAME = 32; +export const LENGTH_NFT_ID = 16; diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/endpoint.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/endpoint.ts new file mode 100644 index 00000000000..1d091013741 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/endpoint.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEndpoint } from 'lisk-sdk'; + +export class TestNftEndpoint extends BaseEndpoint {} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/method.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/method.ts new file mode 100644 index 00000000000..5bab789e7f1 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from 'lisk-sdk'; + +export class TestNftMethod extends BaseMethod {} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/module.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/module.ts new file mode 100644 index 00000000000..a228abff3af --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/module.ts @@ -0,0 +1,56 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseModule, ModuleInitArgs, ModuleMetadata, NFTMethod } from 'lisk-sdk'; +import { TestNftEndpoint } from './endpoint'; +import { TestNftMethod } from './method'; +import { MintNftCommand } from './commands/mint_nft'; +import { DestroyNftCommand } from './commands/destroy_nft'; + +export class TestNftModule extends BaseModule { + public endpoint = new TestNftEndpoint(this.stores, this.offchainStores); + public method = new TestNftMethod(this.stores, this.events); + public mintNftCommand = new MintNftCommand(this.stores, this.events); + public destroyNftCommand = new DestroyNftCommand(this.stores, this.events); + public commands = [this.mintNftCommand, this.destroyNftCommand]; + + private _nftMethod!: NFTMethod; + + public addDependencies(nftMethod: NFTMethod) { + this._nftMethod = nftMethod; + } + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [], + commands: this.commands.map(command => ({ + name: command.name, + params: command.schema, + })), + events: [], + assets: [], + }; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async init(_args: ModuleInitArgs) { + this.mintNftCommand.init({ + nftMethod: this._nftMethod, + }); + this.destroyNftCommand.init({ + nftMethod: this._nftMethod, + }); + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/schema.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/schema.ts new file mode 100644 index 00000000000..c183e9fb8ff --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/schema.ts @@ -0,0 +1,79 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { + LENGTH_COLLECTION_ID, + LENGTH_NFT_ID, + MAX_LENGTH_MODULE_NAME, + MIN_LENGTH_MODULE_NAME, +} from './constants'; + +export const mintNftParamsSchema = { + $id: '/lisk/nftTransferParams', + type: 'object', + required: ['address', 'collectionID', 'attributesArray'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + collectionID: { + dataType: 'bytes', + minLength: LENGTH_COLLECTION_ID, + maxLength: LENGTH_COLLECTION_ID, + fieldNumber: 2, + }, + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: MIN_LENGTH_MODULE_NAME, + maxLength: MAX_LENGTH_MODULE_NAME, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export const destroyNftParamsSchema = { + $id: '/lisk/nftDestroyParams', + type: 'object', + required: ['address', 'nftID'], + properties: { + address: { + dataType: 'bytes', + format: 'lisk32', + fieldNumber: 1, + }, + nftID: { + dataType: 'bytes', + minLength: LENGTH_NFT_ID, + maxLength: LENGTH_NFT_ID, + fieldNumber: 2, + }, + }, +}; diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/types.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/types.ts new file mode 100644 index 00000000000..8d1af2d969a --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/testNft/types.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export interface NFTAttributes { + module: string; + attributes: Buffer; +} diff --git a/examples/pos-mainchain/src/app/modules.ts b/examples/pos-mainchain/src/app/modules.ts index f332892b447..d69352da8ae 100644 --- a/examples/pos-mainchain/src/app/modules.ts +++ b/examples/pos-mainchain/src/app/modules.ts @@ -1,6 +1,4 @@ +/* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; -import { TestNftModule } from './modules/testNft/module'; -export const registerModules = (app: Application): void => { - app.registerModule(new TestNftModule()); -}; +export const registerModules = (_app: Application): void => {}; From fb86ace9fbfbe536f78e9e1ecdbcc415ed7a05d1 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 20 Jul 2023 11:03:26 +0200 Subject: [PATCH 093/170] NFTModule's stores registration (#8762) :bug: Fixes indexes for NFTModule's store registration --- framework/src/modules/nft/module.ts | 8 ++++---- framework/test/unit/modules/nft/method.spec.ts | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index cba0c0b2211..5a13661163f 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -116,10 +116,10 @@ export class NFTModule extends BaseInteroperableModule { AllNFTsFromCollectionSupportRemovedEvent, new AllNFTsFromCollectionSupportRemovedEvent(this.name), ); - this.stores.register(NFTStore, new NFTStore(this.name, 1)); - this.stores.register(UserStore, new UserStore(this.name, 2)); - this.stores.register(EscrowStore, new EscrowStore(this.name, 3)); - this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 4)); + this.stores.register(NFTStore, new NFTStore(this.name, 0)); + this.stores.register(UserStore, new UserStore(this.name, 1)); + this.stores.register(EscrowStore, new EscrowStore(this.name, 2)); + this.stores.register(SupportedNFTsStore, new SupportedNFTsStore(this.name, 3)); } public get name(): string { diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 23c46664d09..4452dcb29d8 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1606,7 +1606,7 @@ describe('NFTMethod', () => { describe('recover', () => { const terminatedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - const substorePrefix = Buffer.from('8000', 'hex'); + const substorePrefix = Buffer.from('0000', 'hex'); const storeKey = utils.getRandomBytes(LENGTH_NFT_ID); const storeValue = codec.encode(nftStoreSchema, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), From 0f1296c62585f26f3291f3e1a01aa757801f93f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 12:12:17 +0200 Subject: [PATCH 094/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20protobufjs?= =?UTF-8?q?=20from=206.11.3=20to=207.2.4=20(#8704)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :arrow_up: Bump protobufjs from 6.11.3 to 7.2.4 Bumps [protobufjs](https://github.com/protobufjs/protobuf.js) from 6.11.3 to 7.2.4. - [Release notes](https://github.com/protobufjs/protobuf.js/releases) - [Changelog](https://github.com/protobufjs/protobuf.js/blob/master/CHANGELOG.md) - [Commits](https://github.com/protobufjs/protobuf.js/compare/v6.11.3...protobufjs-v7.2.4) --- updated-dependencies: - dependency-name: protobufjs dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- protocol-specs/package.json | 2 +- yarn.lock | 37 +++++++++++++------------------------ 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/protocol-specs/package.json b/protocol-specs/package.json index ceb4ebf9502..9e3dc4406e3 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -23,7 +23,7 @@ "@liskhq/lisk-cryptography": "4.0.0-beta.2", "@liskhq/lisk-passphrase": "4.0.0-beta.2", "@liskhq/lisk-validator": "0.7.0-beta.2", - "protobufjs": "6.11.3" + "protobufjs": "7.2.4" }, "devDependencies": { "eslint": "8.28.0", diff --git a/yarn.lock b/yarn.lock index 38728d2da86..9cf41dd1864 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3656,11 +3656,6 @@ resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.149.tgz#1342d63d948c6062838fbf961012f74d4e638440" integrity sha512-ijGqzZt/b7BfzcK9vTrS6MFljQRPn5BFWOx8oE0GYxribu6uV+aA9zZuXI1zc/etK9E8nrgdoF2+LgUw7+9tJQ== -"@types/long@^4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9" - integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== - "@types/mem-fs-editor@*": version "7.0.0" resolved "https://registry.yarnpkg.com/@types/mem-fs-editor/-/mem-fs-editor-7.0.0.tgz#e6576e0f66e20055481b2cdbf193457f1a2c4e65" @@ -3701,10 +3696,10 @@ resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.1.tgz#283f669ff76d7b8260df8ab7a4262cc83d988256" integrity sha512-fZQQafSREFyuZcdWFAExYjBiCL7AUCdgsk80iO0q4yihYYdcIiH28CcuPTGFgLOCC8RlW49GSQxdHwZP+I7CNg== -"@types/node@*", "@types/node@>=13.7.0": - version "17.0.38" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.38.tgz#f8bb07c371ccb1903f3752872c89f44006132947" - integrity sha512-5jY9RhV7c0Z4Jy09G+NIDTsCZ5G0L5n+Z+p+Y7t5VJHM30bgwzSjVtlcBxqAj+6L/swIlvtOSzr8rBk/aNyV2g== +"@types/node@*", "@types/node@>=13.7.0", "@types/node@^16 || ^18": + version "18.16.16" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.16.16.tgz#3b64862856c7874ccf7439e6bab872d245c86d8e" + integrity sha512-NpaM49IGQQAUlBhHMF82QH80J08os4ZmyF9MkpCzWAGuOHqE4gTEbhzd7L3l5LmWuZ6E0OiC1FweQ4tsiW35+g== "@types/node@11.11.2": version "11.11.2" @@ -3721,11 +3716,6 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.3.tgz#f0b991c32cfc6a4e7f3399d6cb4b8cf9a0315014" integrity sha512-p6ua9zBxz5otCmbpb5D3U4B5Nanw6Pk3PPyX05xnxbB/fRv71N7CPmORg7uAD5P70T0xmx1pzAx/FUfa5X+3cw== -"@types/node@^16 || ^18": - version "18.16.16" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.16.16.tgz#3b64862856c7874ccf7439e6bab872d245c86d8e" - integrity sha512-NpaM49IGQQAUlBhHMF82QH80J08os4ZmyF9MkpCzWAGuOHqE4gTEbhzd7L3l5LmWuZ6E0OiC1FweQ4tsiW35+g== - "@types/normalize-package-data@^2.4.0": version "2.4.0" resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" @@ -11206,10 +11196,10 @@ log-update@^4.0.0: slice-ansi "^4.0.0" wrap-ansi "^6.2.0" -long@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" - integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== +long@^5.0.0: + version "5.2.3" + resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1" + integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: version "1.4.0" @@ -13266,10 +13256,10 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@6.11.3: - version "6.11.3" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.3.tgz#637a527205a35caa4f3e2a9a4a13ddffe0e7af74" - integrity sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg== +protobufjs@7.2.4: + version "7.2.4" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.4.tgz#3fc1ec0cdc89dd91aef9ba6037ba07408485c3ae" + integrity sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ== dependencies: "@protobufjs/aspromise" "^1.1.2" "@protobufjs/base64" "^1.1.2" @@ -13281,9 +13271,8 @@ protobufjs@6.11.3: "@protobufjs/path" "^1.1.2" "@protobufjs/pool" "^1.1.0" "@protobufjs/utf8" "^1.1.0" - "@types/long" "^4.0.1" "@types/node" ">=13.7.0" - long "^4.0.0" + long "^5.0.0" protocols@^2.0.0, protocols@^2.0.1: version "2.0.1" From a8fa079b5a12a45a3153e48c8eaa42f87a81c93f Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 21 Jul 2023 16:18:27 +0200 Subject: [PATCH 095/170] NFTMethod.create uses integer as the index segment of NFT ID (#8725) :bug: NFTMethod.create uses integer as the index segment of NFT ID --- framework/src/modules/nft/method.ts | 10 +++------ .../test/unit/modules/nft/method.spec.ts | 22 +++++++------------ 2 files changed, 11 insertions(+), 21 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index aba6cc6da31..3e7c412b563 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -310,14 +310,10 @@ export class NFTMethod extends BaseMethod { } const index = await this.getNextAvailableIndex(methodContext, collectionID); - const indexBytes = Buffer.from(index.toString()); + const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); + indexBytes.writeBigInt64BE(BigInt(index)); - const nftID = Buffer.concat([ - this._config.ownChainID, - collectionID, - Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - indexBytes.length, 0), - indexBytes, - ]); + const nftID = Buffer.concat([this._config.ownChainID, collectionID, indexBytes]); this._feeMethod.payFee(methodContext, BigInt(FEE_CREATE_NFT)); const nftStore = this.stores.get(NFTStore); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 4452dcb29d8..c06f98f33c0 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -574,13 +574,10 @@ describe('NFTMethod', () => { }); it('should set data to stores with correct key and emit successfull create event when there is no entry in the nft substore', async () => { - const index = Buffer.from('0'); - const expectedKey = Buffer.concat([ - config.ownChainID, - collectionID, - Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - index.length, 0), - index, - ]); + const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); + indexBytes.writeBigInt64BE(BigInt(0)); + + const expectedKey = Buffer.concat([config.ownChainID, collectionID, indexBytes]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); @@ -600,7 +597,9 @@ describe('NFTMethod', () => { }); it('should set data to stores with correct key and emit successfull create event when there is some entry in the nft substore', async () => { - const index = Buffer.from('2'); + const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); + indexBytes.writeBigInt64BE(BigInt(2)); + await nftStore.save(methodContext, nftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: attributesArray1, @@ -610,12 +609,7 @@ describe('NFTMethod', () => { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: attributesArray2, }); - const expectedKey = Buffer.concat([ - config.ownChainID, - collectionID, - Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID - index.length, 0), - index, - ]); + const expectedKey = Buffer.concat([config.ownChainID, collectionID, indexBytes]); await method.create(methodContext, address, collectionID, attributesArray3); const nftStoreData = await nftStore.get(methodContext, expectedKey); From 9902905c3b61f9e9579c92d7de50c863fba558e4 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 21 Jul 2023 17:52:23 +0200 Subject: [PATCH 096/170] NFTMethod.getNextAvailableIndex returns the highest index incremented by 1 within a collection (#8730) * :bug: NFTMethod.getNextAvailableIndex iterates over collections within ownChain * :recycle: Removes redundant counting logic in NFTStore.getNextAvailableIndex * :necktie: NFTMethod.getNextAvailableIndex increments the largest index within a collection of a chain * :recycle: NFTMethod.getNextAvailableIndex --- framework/src/modules/nft/method.ts | 28 +++-- .../test/unit/modules/nft/method.spec.ts | 103 ++++++++++-------- 2 files changed, 78 insertions(+), 53 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 3e7c412b563..7571db37d66 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -278,21 +278,29 @@ export class NFTMethod extends BaseMethod { public async getNextAvailableIndex( methodContext: MethodContext, collectionID: Buffer, - ): Promise { + ): Promise { + const indexLength = LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID; const nftStore = this.stores.get(NFTStore); + const nftStoreData = await nftStore.iterate(methodContext, { - gte: Buffer.alloc(LENGTH_NFT_ID, 0), - lte: Buffer.alloc(LENGTH_NFT_ID, 255), + gte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(indexLength, 0)]), + lte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(indexLength, 255)]), }); - let count = 0; - for (const { key } of nftStoreData) { - if (key.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID).equals(collectionID)) { - count += 1; - } + if (nftStoreData.length === 0) { + return BigInt(0); + } + + const latestKey = nftStoreData[nftStoreData.length - 1].key; + const indexBytes = latestKey.slice(LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID, LENGTH_NFT_ID); + const index = indexBytes.readBigUInt64BE(); + const largestIndex = BigInt(BigInt(2 ** 64) - BigInt(1)); + + if (index === largestIndex) { + throw new Error('No more available indexes'); } - return count; + return index + BigInt(1); } public async create( @@ -311,7 +319,7 @@ export class NFTMethod extends BaseMethod { const index = await this.getNextAvailableIndex(methodContext, collectionID); const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); - indexBytes.writeBigInt64BE(BigInt(index)); + indexBytes.writeBigInt64BE(index); const nftID = Buffer.concat([this._config.ownChainID, collectionID, indexBytes]); this._feeMethod.payFee(methodContext, BigInt(FEE_CREATE_NFT)); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index c06f98f33c0..628ca3902ae 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -96,7 +96,13 @@ describe('NFTMethod', () => { const userStore = module.stores.get(UserStore); const supportedNFTsStore = module.stores.get(SupportedNFTsStore); - const nftID = utils.getRandomBytes(LENGTH_NFT_ID); + const firstIndex = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID, 0); + firstIndex.writeBigUInt64BE(BigInt(0)); + const nftID = Buffer.concat([ + config.ownChainID, + utils.getRandomBytes(LENGTH_CHAIN_ID), + firstIndex, + ]); let owner: Buffer; const checkEventResult = ( @@ -424,14 +430,20 @@ describe('NFTMethod', () => { }); it('should return false if nft chain id does not equal own chain id and nft chain id is supported but corresponding supported collection id array does not include collection id for nft id', async () => { - await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + const foreignNFT = utils.getRandomBytes(LENGTH_NFT_ID); + await nftStore.save(methodContext, foreignNFT, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + + await supportedNFTsStore.set(methodContext, foreignNFT.slice(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, ], }); - const isSupported = await method.isNFTSupported(methodContext, nftID); + const isSupported = await method.isNFTSupported(methodContext, foreignNFT); expect(isSupported).toBe(false); }); }); @@ -494,27 +506,23 @@ describe('NFTMethod', () => { }); describe('getNextAvailableIndex', () => { - const attributesArray1 = [ + const attributesArray = [ { module: 'customMod1', attributes: Buffer.alloc(5) }, { module: 'customMod2', attributes: Buffer.alloc(2) }, ]; - const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); beforeEach(async () => { await nftStore.save(methodContext, nftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: attributesArray1, + attributesArray, }); }); - it('should return index count 0 if entry does not exist in the nft substore for the nft id', async () => { + it('should return index count 0 if there is no entry in nft substore', async () => { await nftStore.del(methodContext, nftID); - const returnedIndex = await method.getNextAvailableIndex( - methodContext, - utils.getRandomBytes(LENGTH_COLLECTION_ID), - ); - expect(returnedIndex).toBe(0); + const returnedIndex = await method.getNextAvailableIndex(methodContext, collectionID); + expect(returnedIndex).toBe(BigInt(0)); }); it('should return index count 0 if entry exists in the nft substore for the nft id and no key matches the given collection id', async () => { @@ -522,32 +530,40 @@ describe('NFTMethod', () => { methodContext, utils.getRandomBytes(LENGTH_COLLECTION_ID), ); - expect(returnedIndex).toBe(0); + expect(returnedIndex).toBe(BigInt(0)); }); - it('should return index count 1 if entry exists in the nft substore for the nft id and a key matches the given collection id', async () => { + it('should return existing highest index incremented by 1 within the given collection id', async () => { + const highestIndex = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID, 0); + highestIndex.writeBigUInt64BE(BigInt(419)); + const nftIDHighestIndex = Buffer.concat([config.ownChainID, collectionID, highestIndex]); + await nftStore.save(methodContext, nftIDHighestIndex, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray, + }); + const returnedIndex = await method.getNextAvailableIndex(methodContext, collectionID); - expect(returnedIndex).toBe(1); + expect(returnedIndex).toBe(BigInt(420)); }); - it('should return non zero index count if entry exists in the nft substore for the nft id and more than 1 key matches the given collection id', async () => { - const newKey = Buffer.concat([utils.getRandomBytes(LENGTH_CHAIN_ID), collectionID]); - await nftStore.save(methodContext, newKey, { + it('should throw if indexes within a collection are consumed', async () => { + const largestIndex = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID, 0); + largestIndex.writeBigUInt64BE(BigInt(BigInt(2 ** 64) - BigInt(1))); + const nftIDHighestIndex = Buffer.concat([config.ownChainID, collectionID, largestIndex]); + await nftStore.save(methodContext, nftIDHighestIndex, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: attributesArray2, + attributesArray, }); - const returnedIndex = await method.getNextAvailableIndex(methodContext, collectionID); - expect(returnedIndex).toBe(2); + + await expect(method.getNextAvailableIndex(methodContext, collectionID)).rejects.toThrow( + 'No more available indexes', + ); }); }); describe('create', () => { - const attributesArray1 = [ - { module: 'customMod1', attributes: Buffer.alloc(5) }, - { module: 'customMod2', attributes: Buffer.alloc(2) }, - ]; - const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; - const attributesArray3 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; + const attributesArray1 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; + const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); const address = utils.getRandomBytes(LENGTH_ADDRESS); @@ -579,7 +595,7 @@ describe('NFTMethod', () => { const expectedKey = Buffer.concat([config.ownChainID, collectionID, indexBytes]); - await method.create(methodContext, address, collectionID, attributesArray3); + await method.create(methodContext, address, collectionID, attributesArray2); const nftStoreData = await nftStore.get(methodContext, expectedKey); const userStoreData = await userStore.get( methodContext, @@ -587,7 +603,7 @@ describe('NFTMethod', () => { ); expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); expect(nftStoreData.owner).toStrictEqual(address); - expect(nftStoreData.attributesArray).toEqual(attributesArray3); + expect(nftStoreData.attributesArray).toEqual(attributesArray2); expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, @@ -598,20 +614,20 @@ describe('NFTMethod', () => { it('should set data to stores with correct key and emit successfull create event when there is some entry in the nft substore', async () => { const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); - indexBytes.writeBigInt64BE(BigInt(2)); - - await nftStore.save(methodContext, nftID, { - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: attributesArray1, - }); - const newKey = Buffer.concat([utils.getRandomBytes(LENGTH_CHAIN_ID), collectionID]); + indexBytes.writeBigUint64BE(BigInt(911)); + const newKey = Buffer.concat([config.ownChainID, collectionID, indexBytes]); await nftStore.save(methodContext, newKey, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: attributesArray2, + attributesArray: attributesArray1, }); - const expectedKey = Buffer.concat([config.ownChainID, collectionID, indexBytes]); - await method.create(methodContext, address, collectionID, attributesArray3); + const expectedIndexBytes = Buffer.alloc( + LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID, + ); + expectedIndexBytes.writeBigUint64BE(BigInt(912)); + const expectedKey = Buffer.concat([config.ownChainID, collectionID, expectedIndexBytes]); + + await method.create(methodContext, address, collectionID, attributesArray2); const nftStoreData = await nftStore.get(methodContext, expectedKey); const userStoreData = await userStore.get( methodContext, @@ -619,7 +635,7 @@ describe('NFTMethod', () => { ); expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); expect(nftStoreData.owner).toStrictEqual(address); - expect(nftStoreData.attributesArray).toEqual(attributesArray3); + expect(nftStoreData.attributesArray).toEqual(attributesArray2); expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, @@ -936,13 +952,14 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if nft does not exist', async () => { - receivingChainID = nftID.slice(0, LENGTH_CHAIN_ID); + const nonExistingNFTID = utils.getRandomBytes(LENGTH_NFT_ID); + receivingChainID = nonExistingNFTID.slice(0, LENGTH_CHAIN_ID); await expect( method.transferCrossChain( methodContext, senderAddress, recipientAddress, - nftID, + nonExistingNFTID, receivingChainID, messageFee, data, @@ -958,7 +975,7 @@ describe('NFTMethod', () => { senderAddress, recipientAddress, receivingChainID, - nftID, + nftID: nonExistingNFTID, includeAttributes, }, NftEventResult.RESULT_NFT_DOES_NOT_EXIST, From 6e9d7b4bd42329c138c6e967e4f931d9f6a7bd1b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 09:39:18 +0200 Subject: [PATCH 097/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20word-wrap?= =?UTF-8?q?=20from=201.2.3=20to=201.2.4=20(#8752)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :arrow_up: Bump word-wrap from 1.2.3 to 1.2.4 Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.4. - [Release notes](https://github.com/jonschlinkert/word-wrap/releases) - [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.4) --- updated-dependencies: - dependency-name: word-wrap dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: !shan --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index 9cf41dd1864..4417bad78a2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16129,9 +16129,9 @@ widest-line@^3.1.0: string-width "^4.0.0" word-wrap@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + version "1.2.4" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.4.tgz#cb4b50ec9aca570abd1f52f33cd45b6c61739a9f" + integrity sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA== wordwrap@^1.0.0: version "1.0.0" From f5948ed87c346c2ceb2c726619d984a8d234b20b Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 28 Jul 2023 09:35:02 +0100 Subject: [PATCH 098/170] Fix build on branch feature/6917-implement-nft-module (#8777) * Fix build * Update tests --- framework/src/modules/nft/internal_method.ts | 2 -- framework/src/modules/nft/module.ts | 3 +-- framework/src/modules/nft/types.ts | 1 - framework/test/unit/modules/nft/internal_method.spec.ts | 5 ----- 4 files changed, 1 insertion(+), 10 deletions(-) diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 6b87cbb7aea..95cd702c0de 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -22,7 +22,6 @@ import { UserStore } from './stores/user'; import { CROSS_CHAIN_COMMAND_NAME_TRANSFER, MODULE_NAME_NFT, NFT_NOT_LOCKED } from './constants'; import { EscrowStore } from './stores/escrow'; import { TransferCrossChainEvent } from './events/transfer_cross_chain'; -import { CCM_STATUS_OK } from '../token/constants'; import { crossChainNFTTransferMessageParamsSchema } from './schemas'; export class InternalMethod extends BaseMethod { @@ -166,7 +165,6 @@ export class InternalMethod extends BaseMethod { CROSS_CHAIN_COMMAND_NAME_TRANSFER, receivingChainID, messageFee, - CCM_STATUS_OK, codec.encode(crossChainNFTTransferMessageParamsSchema, { nftID, senderAddress, diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 5a13661163f..a772194a149 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -18,7 +18,7 @@ import { validator } from '@liskhq/lisk-validator'; import { GenesisBlockExecuteContext } from '../../state_machine'; import { ModuleInitArgs, ModuleMetadata } from '../base_module'; import { BaseInteroperableModule } from '../interoperability'; -import { InteroperabilityMethod } from '../token/types'; +import { InteroperabilityMethod, FeeMethod, GenesisNFTStore, TokenMethod } from './types'; import { NFTInteroperableMethod } from './cc_method'; import { NFTEndpoint } from './endpoint'; import { AllNFTsFromChainSupportedEvent } from './events/all_nfts_from_chain_suported'; @@ -59,7 +59,6 @@ import { EscrowStore } from './stores/escrow'; import { NFTStore } from './stores/nft'; import { SupportedNFTsStore } from './stores/supported_nfts'; import { UserStore } from './stores/user'; -import { FeeMethod, GenesisNFTStore, TokenMethod } from './types'; import { CrossChainTransferCommand as CrossChainTransferMessageCommand } from './cc_commands/cc_transfer'; import { TransferCrossChainCommand } from './commands/transfer_cross_chain'; import { TransferCommand } from './commands/transfer'; diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index d56e3343ebb..64ccefa1a54 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -27,7 +27,6 @@ export interface InteroperabilityMethod { crossChainCommand: string, receivingChainID: Buffer, fee: bigint, - status: number, parameters: Buffer, timestamp?: number, ): Promise; diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 4817a89d45d..72e158fa376 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -40,7 +40,6 @@ import { TransferCrossChainEventData, } from '../../../../src/modules/nft/events/transfer_cross_chain'; import { DestroyEvent, DestroyEventData } from '../../../../src/modules/nft/events/destroy'; -import { CCM_STATUS_OK } from '../../../../src/modules/token/constants'; import { crossChainNFTTransferMessageParamsSchema } from '../../../../src/modules/nft/schemas'; describe('InternalMethod', () => { @@ -294,7 +293,6 @@ describe('InternalMethod', () => { CROSS_CHAIN_COMMAND_NAME_TRANSFER, receivingChainID, messageFee, - CCM_STATUS_OK, ccmParameters, ); }); @@ -363,7 +361,6 @@ describe('InternalMethod', () => { CROSS_CHAIN_COMMAND_NAME_TRANSFER, receivingChainID, messageFee, - CCM_STATUS_OK, ccmParameters, ); }); @@ -449,7 +446,6 @@ describe('InternalMethod', () => { CROSS_CHAIN_COMMAND_NAME_TRANSFER, receivingChainID, messageFee, - CCM_STATUS_OK, ccmParameters, ); }); @@ -525,7 +521,6 @@ describe('InternalMethod', () => { CROSS_CHAIN_COMMAND_NAME_TRANSFER, receivingChainID, messageFee, - CCM_STATUS_OK, ccmParameters, ); }); From 79113b45ca3f10c6dd2e65b4ea78f0c9309ff381 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 31 Jul 2023 16:22:54 +0200 Subject: [PATCH 099/170] Update lisk-db and add backup feature (#8667) * :seedling: Add backup feature * :bug: Fix schema * :fire: Remove lisk-db and fix tests * :white_check_mark: Fix tests * :bowtie: Add PR CI * :recycle: Apply format * :bug: Update to remove old backup * :arrow_up: Update version --- .github/ISSUE_TEMPLATE/feature-request.md | 4 - .../ISSUE_TEMPLATE/feature_specification.md | 17 + .github/actions/prepare-build/action.yml | 22 + .github/actions/prepare-install/action.yml | 14 + .github/codeql/codeql-config.yml | 4 + .github/workflows/branch.yaml | 25 + .github/workflows/code_ql.yaml | 59 +++ .github/workflows/dependency.yaml | 20 + .github/workflows/pr.yaml | 91 ++++ .jenkins/Jenkinsfile.audit | 13 - .jenkins/Jenkinsfile.test | 301 ------------ commander/jest.config.js | 2 +- commander/package.json | 14 +- .../bootstrapping/commands/blockchain/hash.ts | 5 +- commander/src/utils/db.ts | 4 +- elements/lisk-api-client/package.json | 4 +- elements/lisk-bft/package.json | 4 +- elements/lisk-chain/package.json | 4 +- elements/lisk-chain/src/chain.ts | 4 +- .../lisk-chain/src/data_access/data_access.ts | 4 +- .../lisk-chain/src/data_access/storage.ts | 111 +++-- .../src/state_store/account_store.ts | 8 +- .../src/state_store/chain_state_store.ts | 6 +- .../src/state_store/consensus_state_store.ts | 6 +- .../lisk-chain/src/state_store/state_store.ts | 12 +- .../integration/data_access/accounts.spec.ts | 14 +- .../integration/data_access/blocks.spec.ts | 103 +++-- .../data_access/transactions.spec.ts | 12 +- .../integration/state_store/save_diff.spec.ts | 62 +-- elements/lisk-chain/test/unit/chain.spec.ts | 70 ++- .../test/unit/data_access/data_access.spec.ts | 71 +-- elements/lisk-chain/test/unit/process.spec.ts | 10 +- .../test/unit/state_store/account.spec.ts | 26 +- .../test/unit/state_store/chain_state.spec.ts | 18 +- .../unit/state_store/consensus_state.spec.ts | 29 +- elements/lisk-chain/test/utils/state_store.ts | 7 +- elements/lisk-client/package.json | 4 +- elements/lisk-db/.eslintignore | 4 - elements/lisk-db/.eslintrc.js | 7 - elements/lisk-db/.npmignore | 1 - elements/lisk-db/.npmrc | 1 - elements/lisk-db/.prettierignore | 1 - elements/lisk-db/.prettierrc.json | 1 - elements/lisk-db/README.md | 88 ---- elements/lisk-db/benchmark/databases/index.js | 16 - .../lisk-db/benchmark/databases/leveldb.js | 40 -- .../lisk-db/benchmark/databases/rocksdb.js | 34 -- elements/lisk-db/benchmark/index.js | 56 --- elements/lisk-db/benchmark/tests/batch.js | 53 --- elements/lisk-db/benchmark/tests/del.js | 40 -- elements/lisk-db/benchmark/tests/fixtures.js | 9 - elements/lisk-db/benchmark/tests/get.js | 40 -- elements/lisk-db/benchmark/tests/put.js | 38 -- elements/lisk-db/jest.config.js | 1 - elements/lisk-db/package.json | 70 --- elements/lisk-db/scripts | 1 - elements/lisk-db/src/errors.ts | 20 - elements/lisk-db/src/index.ts | 17 - elements/lisk-db/src/kv_store.ts | 124 ----- elements/lisk-db/src/utils.ts | 47 -- elements/lisk-db/test/.eslintrc.js | 7 - elements/lisk-db/test/_setup.js | 3 - elements/lisk-db/test/kv_store.spec.ts | 431 ------------------ elements/lisk-db/test/tsconfig.json | 1 - elements/lisk-db/test/utils.spec.ts | 71 --- elements/lisk-db/tsconfig.json | 1 - elements/lisk-elements/package.json | 13 +- elements/lisk-elements/src/index.ts | 1 - .../test/__snapshots__/index.spec.ts.snap | 1 - elements/lisk-genesis/package.json | 4 +- elements/lisk-p2p/package.json | 2 +- elements/lisk-p2p/src/utils/network.ts | 4 +- .../lisk-p2p/test/unit/utils/network.spec.ts | 4 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 8 +- .../lisk-framework-forger-plugin/package.json | 12 +- .../src/controllers/forging_info.ts | 4 +- .../src/controllers/voters.ts | 4 +- .../lisk-framework-forger-plugin/src/db.ts | 22 +- .../src/forger_plugin.ts | 7 +- .../package.json | 6 +- .../package.json | 6 +- .../package.json | 10 +- .../src/db.ts | 47 +- .../src/report_misbehavior_plugin.ts | 7 +- framework/package.json | 14 +- framework/src/application.ts | 18 +- framework/src/node/forger/data_access.ts | 35 +- framework/src/node/forger/forger.ts | 6 +- framework/src/node/network/network.ts | 20 +- framework/src/node/node.ts | 39 +- framework/src/node/processor/processor.ts | 2 +- .../src/schema/application_config_schema.ts | 25 +- framework/src/testing/block_processing_env.ts | 8 +- framework/src/testing/utils.ts | 6 +- framework/src/types.ts | 3 + .../node/forger/seed_reveal.spec.ts | 4 +- .../node/forger/transaction_pool.spec.ts | 10 +- .../integration/node/genesis_block.spec.ts | 10 +- .../node/processor/delete_block.spec.ts | 11 +- .../__snapshots__/application.spec.ts.snap | 3 + .../test/unit/node/forger/forger.spec.ts | 66 +-- .../test/unit/node/network/network.spec.ts | 12 +- framework/test/unit/node/node.spec.ts | 14 +- .../block_synchronization_mechanism.spec.ts | 4 +- .../fast_chain_switching_mechanism.spec.ts | 4 +- .../node/synchronizer/synchronizer.spec.ts | 4 +- .../application_schema.spec.ts.snap | 17 + framework/test/utils/kv_store.ts | 26 +- framework/test/utils/node/node.ts | 10 +- sdk/package.json | 24 +- yarn.lock | 227 ++------- 112 files changed, 966 insertions(+), 2231 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/feature_specification.md create mode 100644 .github/actions/prepare-build/action.yml create mode 100644 .github/actions/prepare-install/action.yml create mode 100644 .github/codeql/codeql-config.yml create mode 100644 .github/workflows/branch.yaml create mode 100644 .github/workflows/code_ql.yaml create mode 100644 .github/workflows/dependency.yaml create mode 100644 .github/workflows/pr.yaml delete mode 100644 .jenkins/Jenkinsfile.audit delete mode 100644 .jenkins/Jenkinsfile.test delete mode 100644 elements/lisk-db/.eslintignore delete mode 100644 elements/lisk-db/.eslintrc.js delete mode 120000 elements/lisk-db/.npmignore delete mode 120000 elements/lisk-db/.npmrc delete mode 120000 elements/lisk-db/.prettierignore delete mode 120000 elements/lisk-db/.prettierrc.json delete mode 100644 elements/lisk-db/README.md delete mode 100644 elements/lisk-db/benchmark/databases/index.js delete mode 100644 elements/lisk-db/benchmark/databases/leveldb.js delete mode 100644 elements/lisk-db/benchmark/databases/rocksdb.js delete mode 100644 elements/lisk-db/benchmark/index.js delete mode 100644 elements/lisk-db/benchmark/tests/batch.js delete mode 100644 elements/lisk-db/benchmark/tests/del.js delete mode 100644 elements/lisk-db/benchmark/tests/fixtures.js delete mode 100644 elements/lisk-db/benchmark/tests/get.js delete mode 100644 elements/lisk-db/benchmark/tests/put.js delete mode 120000 elements/lisk-db/jest.config.js delete mode 100644 elements/lisk-db/package.json delete mode 120000 elements/lisk-db/scripts delete mode 100644 elements/lisk-db/src/errors.ts delete mode 100644 elements/lisk-db/src/index.ts delete mode 100644 elements/lisk-db/src/kv_store.ts delete mode 100644 elements/lisk-db/src/utils.ts delete mode 100644 elements/lisk-db/test/.eslintrc.js delete mode 100644 elements/lisk-db/test/_setup.js delete mode 100644 elements/lisk-db/test/kv_store.spec.ts delete mode 120000 elements/lisk-db/test/tsconfig.json delete mode 100644 elements/lisk-db/test/utils.spec.ts delete mode 120000 elements/lisk-db/tsconfig.json diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index 23cc5120644..813859bd00e 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -14,8 +14,4 @@ Please describe what functionality is needed Please describe why it is needed -### Acceptance Criteria - -Please describe the conditions which must be met for this issue to close - ### Additional Information diff --git a/.github/ISSUE_TEMPLATE/feature_specification.md b/.github/ISSUE_TEMPLATE/feature_specification.md new file mode 100644 index 00000000000..a2bd88afc38 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_specification.md @@ -0,0 +1,17 @@ +--- +name: Feature specification +about: Specification of new features/improvements +title: '' +labels: '' +assignees: '' +--- + +### Description + +Please describe the specification of new features/improvements + +### Acceptance Criteria + +Please describe the conditions which must be met for this issue to close + +### Additional Information diff --git a/.github/actions/prepare-build/action.yml b/.github/actions/prepare-build/action.yml new file mode 100644 index 00000000000..bc1b5418e4e --- /dev/null +++ b/.github/actions/prepare-build/action.yml @@ -0,0 +1,22 @@ +name: 'Prepare: Build and cache packages' +description: 'Prepares the repo for a job by building dependencies' +inputs: + cache-key: + description: 'Key of the cache to use' + required: true + +runs: + using: 'composite' + steps: + - uses: actions/cache@v3 + id: nx-cache + with: + path: | + node_modules + **/node_modules/** + key: cache-nx-${{ hashFiles('yarn.lock') }}-${{ inputs.cache-key }}-${{ github.sha }} + restore-keys: | + cache-nx-${{ hashFiles('yarn.lock') }}-${{ inputs.cache-key }}-${{ github.sha }} + - name: Build + shell: bash + run: yarn build diff --git a/.github/actions/prepare-install/action.yml b/.github/actions/prepare-install/action.yml new file mode 100644 index 00000000000..6011880895d --- /dev/null +++ b/.github/actions/prepare-install/action.yml @@ -0,0 +1,14 @@ +name: 'Prepare: Checkout and Install' +description: 'Prepares the repo for a job by checking out and installing dependencies' + +runs: + using: 'composite' + steps: + - name: Use Node.js 16 + uses: actions/setup-node@v3 + with: + node-version: 16 + cache: 'yarn' + - name: Install dependencies + shell: bash + run: yarn --prefer-offline diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml new file mode 100644 index 00000000000..77f53b9b325 --- /dev/null +++ b/.github/codeql/codeql-config.yml @@ -0,0 +1,4 @@ +paths-ignore: + - ./protocol-specs + - ./scripts + - ./templates diff --git a/.github/workflows/branch.yaml b/.github/workflows/branch.yaml new file mode 100644 index 00000000000..a2e02c7dd0b --- /dev/null +++ b/.github/workflows/branch.yaml @@ -0,0 +1,25 @@ +name: "Branch" + +on: + push: + branches: [ "development", feature/*, hotfix/*, main, release/* ] + +jobs: + code-cov-canary: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.ref }} + - run: npm config set //npm.lisk.com/:_authToken=$NPM_TOKEN + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + - name: Publish + run: npx lerna publish --canary --preid canary --registry https://npm.lisk.com --yes diff --git a/.github/workflows/code_ql.yaml b/.github/workflows/code_ql.yaml new file mode 100644 index 00000000000..95f5c6d62c2 --- /dev/null +++ b/.github/workflows/code_ql.yaml @@ -0,0 +1,59 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "development", feature/*, hotfix/*, main, release/* ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "development", feature/*, hotfix/*, main, release/* ] + schedule: + - cron: '30 3 * * 2' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: true + matrix: + language: [ 'typescript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + config-file: ./.github/codeql/codeql-config.yml + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/dependency.yaml b/.github/workflows/dependency.yaml new file mode 100644 index 00000000000..fe461b4243c --- /dev/null +++ b/.github/workflows/dependency.yaml @@ -0,0 +1,20 @@ +# Dependency Review Action +# +# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. +# +# Source repository: https://github.com/actions/dependency-review-action +# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement +name: 'Dependency Review' +on: [pull_request] + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: 'Checkout Repository' + uses: actions/checkout@v3 + - name: 'Dependency Review' + uses: actions/dependency-review-action@v2 diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 00000000000..5da1e249ca4 --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,91 @@ +name: 'PR' +on: + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.event.number }} + + lint: + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: build + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - run: git branch --track ${{ github.base_ref }} origin/${{ github.base_ref }} + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.event.number }} + - run: yarn format + - name: Check format + run: | + if [ -z "$(git status --untracked-files=no --porcelain)" ]; then + echo "All files formatted" + else + echo "Running format is required" + exit 1 + fi + - run: yarn lint + + unit-test: + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: build + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.event.number }} + - run: yarn test:elements + - run: yarn test:framework + - run: yarn test:commander + + integration-test: + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: build + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.event.number }} + - run: yarn nx run-many --target=test:integration --projects=lisk-framework + + functional-test: + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: build + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Install + uses: ./.github/actions/prepare-install + - name: Build + uses: ./.github/actions/prepare-build + with: + cache-key: ${{ github.event.number }} + - run: yarn nx run-many --target=test:debug --projects=lisk-sdk-test diff --git a/.jenkins/Jenkinsfile.audit b/.jenkins/Jenkinsfile.audit deleted file mode 100644 index 440276d93a2..00000000000 --- a/.jenkins/Jenkinsfile.audit +++ /dev/null @@ -1,13 +0,0 @@ -pipeline { - agent { node { label 'lisk-sdk' } } - stages { - stage('yarn audit') { - steps { - nvm(readFile(".nvmrc").trim()) { - sh 'yarn audit' - } - } - } - } -} -// vim: filetype=groovy diff --git a/.jenkins/Jenkinsfile.test b/.jenkins/Jenkinsfile.test deleted file mode 100644 index 26e43dd2149..00000000000 --- a/.jenkins/Jenkinsfile.test +++ /dev/null @@ -1,301 +0,0 @@ -@Library('lisk-jenkins') _ - -def setup() { - cleanWs() - unstash 'build' -} - -def run_jest(test_name) { - ansiColor('xterm') { - timestamps { - nvm(getNodejsVersion()) { - dir('framework') { - sh 'npm run test:' + "${test_name}" + ' ${JEST_OPTIONS}' - } - } - } - } -} - -def teardown_jest(test_name) { - try { - nvm(getNodejsVersion()) { - sh """ - rm -rf coverage_jest_${test_name}; mkdir -p coverage_jest_${test_name} - cp framework/.coverage/${test_name}/coverage-final.json coverage_jest_${test_name}/ || echo "Jest lcov coverage not found" - """ - } - } catch(err) { - println "Could gather coverage statistics from test:\n${err}" - } - stash name: "coverage_jest_${test_name}", includes: "coverage_jest_${test_name}/*", allowEmpty: true - timeout(1) { - sh 'killall --verbose --wait node || true' - } - dir('framework') { - sh """ - mv .app.log lisk_${test_name}.stdout.txt || true - mv logs/devnet/lisk.log lisk_${test_name}.log || true - """ - archiveArtifacts artifacts: 'lisk_*.log', allowEmptyArchive: true - archiveArtifacts artifacts: 'lisk_*.stdout.txt', allowEmptyArchive: true - } - cleanWs() -} - -properties([ - parameters([ - string(name: 'JEST_OPTIONS', defaultValue: '', description: 'Additional jest options that you want to provide to test runner. Example: `-- --config=`'), - // read by the application - string(name: 'LOG_LEVEL', defaultValue: 'error', description: 'To get desired build log output change the log level', ), - string(name: 'FILE_LOG_LEVEL', defaultValue: 'error', description: 'To get desired file log output change the log level', ), - // used by tests - string(name: 'SILENT', defaultValue: 'true', description: 'To turn off test debug logs.', ) - ]) -]) - -pipeline { - agent { node { label 'lisk-sdk' } } - - environment { - MAX_TASK_LIMIT = '20' - } - - stages { - stage('Cancel previous build') { - steps { - script { - if (env.CHANGE_ID) { - // we are build a pull request, try cancelling previous build - cancelPreviousBuild() - } - } - } - } - stage('Build') { - steps { - nvm(getNodejsVersion()) { - sh ''' - npm i -g yarn - yarn - yarn build - # needed by one of the "Functional HTTP GET tests" - git rev-parse HEAD > framework/REVISION - ''' - } - stash name: 'build' - } - } - stage('Lint') { - steps { - nvm(getNodejsVersion()) { - sh ''' - yarn lint - ''' - } - } - } - stage('Format') { - steps { - nvm(getNodejsVersion()) { - sh ''' - npm run format - if [ -z "$(git status --untracked-files=no --porcelain)" ]; then - echo "All files formatted" - else - echo "Running format is required" - exit 1 - fi - ''' - } - } - } - stage('Test') { - parallel { - stage('Lisk Elements') { - agent { node { label 'lisk-elements' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:ci --ignore lisk-{framework,commander,@liskhq/lisk-framework-*} --ignore @liskhq/lisk-p2p' - } - } - post { - cleanup { - // Save all the test results - sh ''' - rm -rf coverage_elements; mkdir -p coverage_elements - for f in $( find elements/*/.coverage/ -type f ); do - cp $f coverage_elements/$( sha1sum $f | cut -d ' ' -f 1 )_$( basename $f ) - done - ''' - stash name: "coverage_elements", includes: "coverage_elements/*", allowEmpty: true - } - } - } - stage('Lisk Elements / @liskhq/lisk-p2p') { - agent { node { label 'lisk-sdk-quirky' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:ci --scope @liskhq/lisk-p2p' - } - } - } - stage('Lisk Commander') { - agent { node { label 'lisk-commander' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:ci --scope=lisk-commander' - } - sh 'mv commander/.coverage/ coverage_commander/ || echo "Jest coverage not found"' - stash name: "coverage_commander", includes: "coverage_commander/*", allowEmpty: true - } - } - stage('Lisk Client End-to-End') { - agent { node { label 'lisk-client' }} - steps { - setup() - wrap([$class: 'Xvfb', parallelBuild: true, autoDisplayName: true]) { - nvm(getNodejsVersion()) { - sh 'npx lerna run test:e2e --scope=@liskhq/lisk-client' - } - } - } - } - stage('Lisk Framework Unit') { - agent { node { label 'lisk-framework' }} - steps { - setup() - run_jest('unit') - } - post { - cleanup { - teardown_jest('unit') - } - } - } - stage('Lisk Framework Integration') { - agent { node { label 'lisk-sdk-quirky' }} - steps { - setup() - run_jest('integration') - } - post { - cleanup { - teardown_jest('integration') - } - } - } - stage('Lisk Framework Functional') { - agent { node { label 'lisk-sdk-quirky' }} - steps { - setup() - run_jest('functional') - } - post { - cleanup { - teardown_jest('functional') - } - } - } - stage('Lisk Framework Plugins Unit') { - agent { node { label 'lisk-framework' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:unit --scope=@liskhq/lisk-framework-*' - } - } - post { - cleanup { - // Save all the test results - sh ''' - rm -rf coverage_framework_plugins; mkdir -p coverage_framework_plugins - for f in $( find framework-plugins/*/.coverage/ -type f ); do - cp $f coverage_framework_plugins/$( sha1sum $f | cut -d ' ' -f 1 )_$( basename $f ) - done - ''' - stash name: "coverage_framework_plugins", includes: "coverage_framework_plugins/*", allowEmpty: true - } - } - } - stage('Lisk Framework Plugins Integration') { - agent { node { label 'lisk-framework' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:integration --scope=@liskhq/lisk-framework-* --concurrency 1' - } - } - } - stage('Lisk Framework Plugins Functional') { - agent { node { label 'lisk-sdk-quirky' }} - steps { - setup() - nvm(getNodejsVersion()) { - sh 'npx lerna run test:functional --scope=@liskhq/lisk-framework-* --concurrency 1' - } - } - } - } - } - } - post { - always { - script { - try { - unstash "coverage_elements" - } catch(err) { - println "Could not unstash elements. Continuing." - } - try { - unstash "coverage_commander" - } catch(err) { - println "Could not unstash commander. Continuing." - } - try { - unstash "coverage_jest_unit" - } catch(err) { - println "Could not unstash jest_unit. Continuing." - } - try { - unstash "coverage_framework_plugins" - } catch(err) { - println "Could not unstash framework_plugins. Continuing." - } - sh ''' - rm -rf .nyc_output; mkdir -p .nyc_output - rm -rf ci_coverage; mkdir -p ci_coverage - cp -r coverage_elements/* ./.nyc_output - cp -r coverage_framework_plugins/* ./.nyc_output - cp -r coverage_commander/* ./.nyc_output - cp coverage_jest_unit/coverage-final.json ./.nyc_output/coverage-framework-unit.json - ''' - try { - nvm(getNodejsVersion()) { - sh ''' - npx nyc merge ./.nyc_output ci_coverage/coverage.json - node scripts/map_coverage.js ci_coverage/coverage.json - ''' - } - cobertura coberturaReportFile: 'coverage/cobertura-coverage.xml' - } catch(err) { - println "Could not report coverage statistics:\n${err}" - } - currentBuild.result = 'SUCCESS' - if (env.BRANCH_NAME == 'master') { - step([$class: 'MasterCoverageAction', scmVars: [GIT_URL: env.GIT_URL]]) - } - if (env.CHANGE_ID != null) { - step([$class: 'CompareCoverageAction', publishResultAs: 'statusCheck', scmVars: [GIT_URL: env.GIT_URL]]) - } - } - } - cleanup { - cleanWs() - } - } -} -// vim: filetype=groovy diff --git a/commander/jest.config.js b/commander/jest.config.js index 6dd91839853..a912db81e1b 100644 --- a/commander/jest.config.js +++ b/commander/jest.config.js @@ -10,7 +10,7 @@ module.exports = { '^.+\\.(ts|tsx)$': 'ts-jest', }, moduleNameMapper: { - "^axios$": "axios/dist/axios.js" + '^axios$': 'axios/dist/axios.js', }, verbose: false, collectCoverage: false, diff --git a/commander/package.json b/commander/package.json index eec489c31ef..0011b2fd392 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.10", + "version": "5.1.11-alpha.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,13 +91,13 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-chain": "^0.3.4", - "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", + "@liskhq/lisk-client": "^5.2.3-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", - "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-db": "^0.3.6", + "@liskhq/lisk-genesis": "^0.2.5-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.2", + "lisk-framework": "^0.9.3-alpha.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/commands/blockchain/hash.ts b/commander/src/bootstrapping/commands/blockchain/hash.ts index 98c3ba6d346..2ac78b414be 100644 --- a/commander/src/bootstrapping/commands/blockchain/hash.ts +++ b/commander/src/bootstrapping/commands/blockchain/hash.ts @@ -49,10 +49,7 @@ export class HashCommand extends Command { this.debug(` ${getFullPath(blockchainPath)}`); const db = getBlockchainDB(dataPath); - const stream = db.createReadStream({ - keys: false, - values: true, - }); + const stream = db.createReadStream(); const dbHash = crypto.createHash('sha256'); diff --git a/commander/src/utils/db.ts b/commander/src/utils/db.ts index 423146e3d77..533221bee42 100644 --- a/commander/src/utils/db.ts +++ b/commander/src/utils/db.ts @@ -16,5 +16,5 @@ import * as db from '@liskhq/lisk-db'; import { getBlockchainDBPath } from './path'; -export const getBlockchainDB = (dataPath: string): db.KVStore => - new db.KVStore(getBlockchainDBPath(dataPath)); +export const getBlockchainDB = (dataPath: string): db.Database => + new db.Database(getBlockchainDBPath(dataPath)); diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index a1b1616343a..d5831a0f7d4 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.6", + "version": "5.1.7-alpha.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -45,7 +45,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "16.11.26", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index 22fbd27ea70..a0088415da8 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.4", + "version": "0.3.5-alpha.0", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-utils": "^0.2.1", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 6e12e6b313f..227effae491 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.4", + "version": "0.3.5-alpha.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,7 +38,7 @@ "dependencies": { "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-db": "^0.3.6", "@liskhq/lisk-tree": "^0.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", diff --git a/elements/lisk-chain/src/chain.ts b/elements/lisk-chain/src/chain.ts index 42a79981e3f..960e086f549 100644 --- a/elements/lisk-chain/src/chain.ts +++ b/elements/lisk-chain/src/chain.ts @@ -13,7 +13,7 @@ */ import { codec, Schema } from '@liskhq/lisk-codec'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import * as createDebug from 'debug'; import { EventEmitter } from 'events'; import { validator, LiskValidationError } from '@liskhq/lisk-validator'; @@ -66,7 +66,7 @@ import { import { Transaction } from './transaction'; interface ChainConstructor { - readonly db: KVStore; + readonly db: Database; // Unique requirements readonly genesisBlock: GenesisBlock; readonly accountSchemas: { [name: string]: AccountSchema }; diff --git a/elements/lisk-chain/src/data_access/data_access.ts b/elements/lisk-chain/src/data_access/data_access.ts index 002b643c898..b1f48186a78 100644 --- a/elements/lisk-chain/src/data_access/data_access.ts +++ b/elements/lisk-chain/src/data_access/data_access.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { codec, Schema } from '@liskhq/lisk-codec'; import { Transaction } from '../transaction'; import { BlockHeader, Block, RawBlock, Account, BlockHeaderAsset } from '../types'; @@ -24,7 +24,7 @@ import { blockHeaderSchema, blockSchema } from '../schema'; import { DB_KEY_ACCOUNTS_ADDRESS } from './constants'; interface DAConstructor { - readonly db: KVStore; + readonly db: Database; readonly registeredBlockHeaders: { readonly [key: number]: Schema; }; diff --git a/elements/lisk-chain/src/data_access/storage.ts b/elements/lisk-chain/src/data_access/storage.ts index a496eda0e46..81b31f02a43 100644 --- a/elements/lisk-chain/src/data_access/storage.ts +++ b/elements/lisk-chain/src/data_access/storage.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { KVStore, formatInt, getFirstPrefix, getLastPrefix, NotFoundError } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { getAddressFromPublicKey, hash } from '@liskhq/lisk-cryptography'; import { RawBlock, StateDiff } from '../types'; @@ -31,10 +31,31 @@ import { import { keyString } from '../utils'; import { stateDiffSchema } from '../schema'; +export const formatInt = (num: number | bigint): string => { + let buf: Buffer; + if (typeof num === 'bigint') { + if (num < BigInt(0)) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(8); + buf.writeBigUInt64BE(num); + } else { + if (num < 0) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(4); + buf.writeUInt32BE(num, 0); + } + return buf.toString('binary'); +}; + +export const getFirstPrefix = (prefix: string): Buffer => Buffer.from(`${prefix}\x00`); +export const getLastPrefix = (prefix: string): Buffer => Buffer.from(`${prefix}\xFF`); + export class Storage { - private readonly _db: KVStore; + private readonly _db: Database; - public constructor(db: KVStore) { + public constructor(db: Database) { this._db = db; } @@ -42,7 +63,7 @@ export class Storage { Block headers */ public async getBlockHeaderByID(id: Buffer): Promise { - const block = await this._db.get(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`); + const block = await this._db.get(Buffer.from(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`)); return block; } @@ -50,7 +71,7 @@ export class Storage { const blocks = []; for (const id of arrayOfBlockIds) { try { - const block = await this._db.get(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`); + const block = await this._db.get(Buffer.from(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`)); blocks.push(block); } catch (dbError) { if (dbError instanceof NotFoundError) { @@ -64,7 +85,7 @@ export class Storage { public async getBlockHeaderByHeight(height: number): Promise { const stringHeight = formatInt(height); - const id = await this._db.get(`${DB_KEY_BLOCKS_HEIGHT}:${stringHeight}`); + const id = await this._db.get(Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${stringHeight}`)); return this.getBlockHeaderByID(id); } @@ -73,8 +94,8 @@ export class Storage { toHeight: number, ): Promise { const stream = this._db.createReadStream({ - gte: `${DB_KEY_BLOCKS_HEIGHT}:${formatInt(fromHeight)}`, - lte: `${DB_KEY_BLOCKS_HEIGHT}:${formatInt(toHeight)}`, + gte: Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${formatInt(fromHeight)}`), + lte: Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${formatInt(toHeight)}`), reverse: true, }); const blockIDs = await new Promise((resolve, reject) => { @@ -112,8 +133,8 @@ export class Storage { public async getLastBlockHeader(): Promise { const stream = this._db.createReadStream({ - gte: getFirstPrefix(DB_KEY_BLOCKS_HEIGHT), - lte: getLastPrefix(DB_KEY_BLOCKS_HEIGHT), + gte: Buffer.from(getFirstPrefix(DB_KEY_BLOCKS_HEIGHT)), + lte: Buffer.from(getLastPrefix(DB_KEY_BLOCKS_HEIGHT)), reverse: true, limit: 1, }); @@ -257,7 +278,7 @@ export class Storage { } public async isBlockPersisted(blockID: Buffer): Promise { - return this._db.exists(`${DB_KEY_BLOCKS_ID}:${keyString(blockID)}`); + return this._db.has(Buffer.from(`${DB_KEY_BLOCKS_ID}:${keyString(blockID)}`)); } /* @@ -265,7 +286,7 @@ export class Storage { */ public async getChainState(key: string): Promise { try { - const value = await this._db.get(`${DB_KEY_CHAIN_STATE}:${key}`); + const value = await this._db.get(Buffer.from(`${DB_KEY_CHAIN_STATE}:${key}`)); return value; } catch (error) { @@ -281,7 +302,7 @@ export class Storage { */ public async getConsensusState(key: string): Promise { try { - const value = await this._db.get(`${DB_KEY_CONSENSUS_STATE}:${key}`); + const value = await this._db.get(Buffer.from(`${DB_KEY_CONSENSUS_STATE}:${key}`)); return value; } catch (error) { @@ -296,14 +317,16 @@ export class Storage { // Warning: This function should never be used. This exist only for migration purpose. // Specifically, only to set genesis state between 5.1.2 => 5.1.3 public async setConsensusState(key: string, val: Buffer): Promise { - await this._db.put(`${DB_KEY_CONSENSUS_STATE}:${key}`, val); + await this._db.set(Buffer.from(`${DB_KEY_CONSENSUS_STATE}:${key}`), val); } /* Accounts */ public async getAccountByAddress(address: Buffer): Promise { - const account = await this._db.get(`${DB_KEY_ACCOUNTS_ADDRESS}:${keyString(address)}`); + const account = await this._db.get( + Buffer.from(`${DB_KEY_ACCOUNTS_ADDRESS}:${keyString(address)}`), + ); return account; } @@ -334,7 +357,9 @@ export class Storage { Transactions */ public async getTransactionByID(id: Buffer): Promise { - const transaction = await this._db.get(`${DB_KEY_TRANSACTIONS_ID}:${keyString(id)}`); + const transaction = await this._db.get( + Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${keyString(id)}`), + ); return transaction; } @@ -359,7 +384,7 @@ export class Storage { } public async isTransactionPersisted(transactionId: Buffer): Promise { - return this._db.exists(`${DB_KEY_TRANSACTIONS_ID}:${keyString(transactionId)}`); + return this._db.has(Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${keyString(transactionId)}`)); } /* @@ -375,22 +400,25 @@ export class Storage { removeFromTemp = false, ): Promise { const heightStr = formatInt(height); - const batch = this._db.batch(); - batch.put(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`, header); - batch.put(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`, id); + const batch = new Batch(); + batch.set(Buffer.from(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`), header); + batch.set(Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`), id); if (payload.length > 0) { const ids = []; for (const { id: txID, value } of payload) { ids.push(txID); - batch.put(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`, value); + batch.set(Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`), value); } - batch.put(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`, Buffer.concat(ids)); + batch.set( + Buffer.from(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`), + Buffer.concat(ids), + ); } if (removeFromTemp) { - batch.del(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`); + batch.del(Buffer.from(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`)); } stateStore.finalize(heightStr, batch); - await batch.write(); + await this._db.write(batch); await this._cleanUntil(finalizedHeight); } @@ -402,24 +430,24 @@ export class Storage { stateStore: StateStore, saveToTemp = false, ): Promise { - const batch = this._db.batch(); + const batch = new Batch(); const heightStr = formatInt(height); - batch.del(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`); - batch.del(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`); + batch.del(Buffer.from(`${DB_KEY_BLOCKS_ID}:${keyString(id)}`)); + batch.del(Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${heightStr}`)); if (txIDs.length > 0) { for (const txID of txIDs) { - batch.del(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`); + batch.del(Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`)); } - batch.del(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`); + batch.del(Buffer.from(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(id)}`)); } if (saveToTemp) { - batch.put(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`, fullBlock); + batch.set(Buffer.from(`${DB_KEY_TEMPBLOCKS_HEIGHT}:${heightStr}`), fullBlock); } // Take the diff to revert back states const diffKey = `${DB_KEY_DIFF_STATE}:${heightStr}`; // If there is no diff, the key might not exist - const stateDiff = await this._db.get(diffKey); + const stateDiff = await this._db.get(Buffer.from(diffKey)); const { created: createdStates, @@ -428,22 +456,22 @@ export class Storage { } = codec.decode(stateDiffSchema, stateDiff); // Delete all the newly created states for (const key of createdStates) { - batch.del(key); + batch.del(Buffer.from(key)); } // Revert all deleted values for (const { key, value: previousValue } of deletedStates) { - batch.put(key, previousValue); + batch.set(Buffer.from(key), previousValue); } for (const { key, value: previousValue } of updatedStates) { - batch.put(key, previousValue); + batch.set(Buffer.from(key), previousValue); } stateStore.finalize(heightStr, batch); // Delete stored diff at particular height - batch.del(diffKey); + batch.del(Buffer.from(diffKey)); // Persist the whole batch - await batch.write(); + await this._db.write(batch); return { deleted: deletedStates, created: createdStates, @@ -453,16 +481,19 @@ export class Storage { // This function is out of batch, but even if it fails, it will run again next time private async _cleanUntil(height: number): Promise { + const max = Math.max(0, height - 1); await this._db.clear({ - gte: `${DB_KEY_DIFF_STATE}:${formatInt(0)}`, - lt: `${DB_KEY_DIFF_STATE}:${formatInt(height)}`, + gte: Buffer.from(`${DB_KEY_DIFF_STATE}:${formatInt(0)}`), + lte: Buffer.from(`${DB_KEY_DIFF_STATE}:${formatInt(max)}`), }); } private async _getTransactions(blockID: Buffer): Promise { const txIDs: Buffer[] = []; try { - const ids = await this._db.get(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(blockID)}`); + const ids = await this._db.get( + Buffer.from(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${keyString(blockID)}`), + ); const idLength = 32; for (let i = 0; i < ids.length; i += idLength) { txIDs.push(ids.slice(i, i + idLength)); @@ -477,7 +508,7 @@ export class Storage { } const transactions = []; for (const txID of txIDs) { - const tx = await this._db.get(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`); + const tx = await this._db.get(Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${keyString(txID)}`)); transactions.push(tx); } diff --git a/elements/lisk-chain/src/state_store/account_store.ts b/elements/lisk-chain/src/state_store/account_store.ts index cc309240b95..b93edf23dd4 100644 --- a/elements/lisk-chain/src/state_store/account_store.ts +++ b/elements/lisk-chain/src/state_store/account_store.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { NotFoundError, BatchChain } from '@liskhq/lisk-db'; +import { NotFoundError, Batch } from '@liskhq/lisk-db'; import { objects, dataStructures } from '@liskhq/lisk-utils'; import { DataAccess } from '../data_access'; import { StateDiff, Account, AccountDefaultProps } from '../types'; @@ -142,14 +142,14 @@ export class AccountStore { this._data.delete(address); } - public finalize(batch: BatchChain): StateDiff { + public finalize(batch: Batch): StateDiff { const stateDiff = { updated: [], created: [], deleted: [] } as StateDiff; for (const updatedAccount of this._data.values()) { if (this._updatedKeys.has(updatedAccount.address)) { const encodedAccount = this._dataAccess.encodeAccount(updatedAccount); const dbKey = `${DB_KEY_ACCOUNTS_ADDRESS}:${keyString(updatedAccount.address)}`; - batch.put(dbKey, encodedAccount); + batch.set(Buffer.from(dbKey), encodedAccount); const initialAccount = this._initialAccountValue.get(updatedAccount.address); if (initialAccount !== undefined && !initialAccount.equals(encodedAccount)) { @@ -168,7 +168,7 @@ export class AccountStore { throw new Error('Deleting account should have initial account'); } const dbKey = `${DB_KEY_ACCOUNTS_ADDRESS}:${keyString(deletedAddress)}`; - batch.del(dbKey); + batch.del(Buffer.from(dbKey)); stateDiff.deleted.push({ key: dbKey, value: initialAccount, diff --git a/elements/lisk-chain/src/state_store/chain_state_store.ts b/elements/lisk-chain/src/state_store/chain_state_store.ts index 24b7f4b1fa9..465a2b1b508 100644 --- a/elements/lisk-chain/src/state_store/chain_state_store.ts +++ b/elements/lisk-chain/src/state_store/chain_state_store.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { BatchChain } from '@liskhq/lisk-db'; +import { Batch } from '@liskhq/lisk-db'; import { DataAccess } from '../data_access'; import { BlockHeader, StateDiff } from '../types'; import { DB_KEY_CHAIN_STATE } from '../data_access/constants'; @@ -105,7 +105,7 @@ export class ChainStateStore { this._updatedKeys.add(key); } - public finalize(batch: BatchChain): StateDiff { + public finalize(batch: Batch): StateDiff { const stateDiff = { updated: [], created: [], deleted: [] } as StateDiff; if (this._updatedKeys.size === 0) { @@ -115,7 +115,7 @@ export class ChainStateStore { for (const key of Array.from(this._updatedKeys)) { const dbKey = `${DB_KEY_CHAIN_STATE}:${key}`; const updatedValue = this._data[key] as Buffer; - batch.put(dbKey, updatedValue); + batch.set(Buffer.from(dbKey), updatedValue); const initialValue = this._initialValue[key]; if (initialValue !== undefined && !initialValue.equals(updatedValue)) { diff --git a/elements/lisk-chain/src/state_store/consensus_state_store.ts b/elements/lisk-chain/src/state_store/consensus_state_store.ts index 62fb751e5b5..6fbc2a748ee 100644 --- a/elements/lisk-chain/src/state_store/consensus_state_store.ts +++ b/elements/lisk-chain/src/state_store/consensus_state_store.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { BatchChain } from '@liskhq/lisk-db'; +import { Batch } from '@liskhq/lisk-db'; import { StateDiff } from '../types'; import { DB_KEY_CONSENSUS_STATE } from '../data_access/constants'; import { DataAccess } from '../data_access'; @@ -85,7 +85,7 @@ export class ConsensusStateStore { this._updatedKeys.add(key); } - public finalize(batch: BatchChain): StateDiff { + public finalize(batch: Batch): StateDiff { const stateDiff = { updated: [], created: [], deleted: [] } as StateDiff; if (this._updatedKeys.size === 0) { @@ -95,7 +95,7 @@ export class ConsensusStateStore { for (const key of Array.from(this._updatedKeys)) { const dbKey = `${DB_KEY_CONSENSUS_STATE}:${key}`; const updatedValue = this._data[key] as Buffer; - batch.put(dbKey, updatedValue); + batch.set(Buffer.from(dbKey), updatedValue); // finalized height should never be saved to diff, since it will not changed if (key === CONSENSUS_STATE_FINALIZED_HEIGHT_KEY) { diff --git a/elements/lisk-chain/src/state_store/state_store.ts b/elements/lisk-chain/src/state_store/state_store.ts index ccfe0020744..da8e6f90a1b 100644 --- a/elements/lisk-chain/src/state_store/state_store.ts +++ b/elements/lisk-chain/src/state_store/state_store.ts @@ -13,7 +13,7 @@ */ import { codec } from '@liskhq/lisk-codec'; -import { BatchChain } from '@liskhq/lisk-db'; +import { Batch } from '@liskhq/lisk-db'; import { BlockHeader, StateDiff, UpdatedDiff } from '../types'; import { AccountStore } from './account_store'; import { ChainStateStore } from './chain_state_store'; @@ -29,11 +29,7 @@ interface AdditionalInformation { readonly defaultAccount: Record; } -const saveDiff = ( - height: string, - stateDiffs: Array>, - batch: BatchChain, -): void => { +const saveDiff = (height: string, stateDiffs: Array>, batch: Batch): void => { const diffToEncode: { updated: UpdatedDiff[]; created: string[]; deleted: UpdatedDiff[] } = { updated: [], created: [], @@ -47,7 +43,7 @@ const saveDiff = ( } const encodedDiff = codec.encode(stateDiffSchema, diffToEncode); - batch.put(`${DB_KEY_DIFF_STATE}:${height}`, encodedDiff); + batch.set(Buffer.from(`${DB_KEY_DIFF_STATE}:${height}`), encodedDiff); }; export class StateStore { @@ -79,7 +75,7 @@ export class StateStore { this.chain.restoreSnapshot(); } - public finalize(height: string, batch: BatchChain): void { + public finalize(height: string, batch: Batch): void { const accountStateDiff = this.account.finalize(batch); const chainStateDiff = this.chain.finalize(batch); const consensusStateDiff = this.consensus.finalize(batch); diff --git a/elements/lisk-chain/test/integration/data_access/accounts.spec.ts b/elements/lisk-chain/test/integration/data_access/accounts.spec.ts index c08dab62b30..4953c473ce1 100644 --- a/elements/lisk-chain/test/integration/data_access/accounts.spec.ts +++ b/elements/lisk-chain/test/integration/data_access/accounts.spec.ts @@ -14,7 +14,7 @@ /* eslint-disable @typescript-eslint/restrict-template-expressions */ import * as path from 'path'; import * as fs from 'fs-extra'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { getRandomBytes } from '@liskhq/lisk-cryptography'; import { Storage } from '../../../src/data_access/storage'; import { @@ -26,7 +26,7 @@ import { DataAccess } from '../../../src/data_access'; import { registeredBlockHeaders } from '../../utils/block'; describe('dataAccess.transactions', () => { - let db: KVStore; + let db: Database; let storage: Storage; let dataAccess: DataAccess; let accounts: any; @@ -34,7 +34,7 @@ describe('dataAccess.transactions', () => { beforeAll(() => { const parentPath = path.join(__dirname, '../../tmp/accounts'); fs.ensureDirSync(parentPath); - db = new KVStore(path.join(parentPath, '/test-accounts.db')); + db = new Database(path.join(parentPath, '/test-accounts.db')); storage = new Storage(db); dataAccess = new DataAccess({ db, @@ -61,14 +61,14 @@ describe('dataAccess.transactions', () => { }, }), ]; - const batch = db.batch(); + const batch = new Batch(); for (const account of accounts) { - batch.put( - `accounts:address:${account.address.toString('binary')}`, + batch.set( + Buffer.from(`accounts:address:${account.address.toString('binary')}`), encodeDefaultAccount(account), ); } - await batch.write(); + await db.write(batch); }); afterEach(async () => { diff --git a/elements/lisk-chain/test/integration/data_access/blocks.spec.ts b/elements/lisk-chain/test/integration/data_access/blocks.spec.ts index bbcd552f6d4..c4ff0a28d5f 100644 --- a/elements/lisk-chain/test/integration/data_access/blocks.spec.ts +++ b/elements/lisk-chain/test/integration/data_access/blocks.spec.ts @@ -14,9 +14,9 @@ /* eslint-disable @typescript-eslint/restrict-template-expressions */ import * as path from 'path'; import * as fs from 'fs-extra'; -import { KVStore, formatInt, NotFoundError } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; -import { Storage } from '../../../src/data_access/storage'; +import { Storage, formatInt } from '../../../src/data_access/storage'; import { createValidDefaultBlock, encodeDefaultBlockHeader, @@ -36,7 +36,7 @@ describe('dataAccess.blocks', () => { updated: [], deleted: [], }); - let db: KVStore; + let db: Database; let storage: Storage; let dataAccess: DataAccess; let blocks: Block[]; @@ -44,7 +44,7 @@ describe('dataAccess.blocks', () => { beforeAll(() => { const parentPath = path.join(__dirname, '../../tmp/blocks'); fs.ensureDirSync(parentPath); - db = new KVStore(path.join(parentPath, '/test-blocks.db')); + db = new Database(path.join(parentPath, '/test-blocks.db')); storage = new Storage(db); }); @@ -72,36 +72,39 @@ describe('dataAccess.blocks', () => { const block303 = createValidDefaultBlock({ header: { height: 303 } }); blocks = [block300, block301, block302, block303]; - const batch = db.batch(); + const batch = new Batch(); for (const block of blocks) { const { payload, header } = block; - batch.put(`blocks:id:${header.id.toString('binary')}`, encodeDefaultBlockHeader(header)); - batch.put(`blocks:height:${formatInt(header.height)}`, header.id); + batch.set( + Buffer.from(`blocks:id:${header.id.toString('binary')}`), + encodeDefaultBlockHeader(header), + ); + batch.set(Buffer.from(`blocks:height:${formatInt(header.height)}`), header.id); if (payload.length) { - batch.put( - `transactions:blockID:${header.id.toString('binary')}`, + batch.set( + Buffer.from(`transactions:blockID:${header.id.toString('binary')}`), Buffer.concat(payload.map(tx => tx.id)), ); for (const tx of payload) { - batch.put(`transactions:id:${tx.id.toString('binary')}`, tx.getBytes()); + batch.set(Buffer.from(`transactions:id:${tx.id.toString('binary')}`), tx.getBytes()); } } - batch.put( - `tempBlocks:height:${formatInt(blocks[2].header.height)}`, + batch.set( + Buffer.from(`tempBlocks:height:${formatInt(blocks[2].header.height)}`), encodedDefaultBlock(blocks[2]), ); - batch.put( - `tempBlocks:height:${formatInt(blocks[3].header.height)}`, + batch.set( + Buffer.from(`tempBlocks:height:${formatInt(blocks[3].header.height)}`), encodedDefaultBlock(blocks[3]), ); - batch.put( + batch.set( // eslint-disable-next-line @typescript-eslint/restrict-plus-operands - `tempBlocks:height:${formatInt(blocks[3].header.height + 1)}`, + Buffer.from(`tempBlocks:height:${formatInt(blocks[3].header.height + 1)}`), encodedDefaultBlock(blocks[3]), ); - batch.put(`diff:${formatInt(block.header.height)}`, emptyEncodedDiff); + batch.set(Buffer.from(`diff:${formatInt(block.header.height)}`), emptyEncodedDiff); } - await batch.write(); + await db.write(batch); dataAccess.resetBlockHeaderCache(); }); @@ -333,22 +336,22 @@ describe('dataAccess.blocks', () => { await dataAccess.saveBlock(block, stateStore as any, 0); await expect( - db.exists(`blocks:id:${block.header.id.toString('binary')}`), + db.has(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`blocks:height:${formatInt(block.header.height)}`), + db.has(Buffer.from(`blocks:height:${formatInt(block.header.height)}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:blockID:${block.header.id.toString('binary')}`), + db.has(Buffer.from(`transactions:blockID:${block.header.id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:id:${block.payload[0].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${block.payload[0].id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:id:${block.payload[1].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${block.payload[1].id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`tempBlocks:height:${formatInt(block.header.height)}`), + db.has(Buffer.from(`tempBlocks:height:${formatInt(block.header.height)}`)), ).resolves.toBeTrue(); const createdBlock = await dataAccess.getBlockByID(block.header.id); expect(createdBlock.header).toStrictEqual(block.header); @@ -362,22 +365,22 @@ describe('dataAccess.blocks', () => { await dataAccess.saveBlock(block, stateStore as any, 0, true); await expect( - db.exists(`blocks:id:${block.header.id.toString('binary')}`), + db.has(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`blocks:height:${formatInt(block.header.height)}`), + db.has(Buffer.from(`blocks:height:${formatInt(block.header.height)}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:blockID:${block.header.id.toString('binary')}`), + db.has(Buffer.from(`transactions:blockID:${block.header.id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:id:${block.payload[0].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${block.payload[0].id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`transactions:id:${block.payload[1].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${block.payload[1].id.toString('binary')}`)), ).resolves.toBeTrue(); await expect( - db.exists(`tempBlocks:height:${formatInt(block.header.height)}`), + db.has(Buffer.from(`tempBlocks:height:${formatInt(block.header.height)}`)), ).resolves.toBeFalse(); const createdBlock = await dataAccess.getBlockByID(block.header.id); expect(createdBlock.header).toStrictEqual(block.header); @@ -388,12 +391,12 @@ describe('dataAccess.blocks', () => { }); it('should delete diff before the finalized height', async () => { - await db.put(`diff:${formatInt(99)}`, Buffer.from('random diff')); - await db.put(`diff:${formatInt(100)}`, Buffer.from('random diff 2')); + await db.set(Buffer.from(`diff:${formatInt(99)}`), Buffer.from('random diff')); + await db.set(Buffer.from(`diff:${formatInt(100)}`), Buffer.from('random diff 2')); await dataAccess.saveBlock(block, stateStore as any, 100, true); - await expect(db.exists(`diff:${formatInt(100)}`)).resolves.toBeTrue(); - await expect(db.exists(`diff:${formatInt(99)}`)).resolves.toBeFalse(); + await expect(db.has(Buffer.from(`diff:${formatInt(100)}`))).resolves.toBeTrue(); + await expect(db.has(Buffer.from(`diff:${formatInt(99)}`))).resolves.toBeFalse(); }); }); @@ -407,22 +410,22 @@ describe('dataAccess.blocks', () => { await dataAccess.deleteBlock(blocks[2], stateStore as any); await expect( - db.exists(`blocks:id:${blocks[2].header.id.toString('binary')}`), + db.has(Buffer.from(`blocks:id:${blocks[2].header.id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`blocks:height:${formatInt(blocks[2].header.height)}`), + db.has(Buffer.from(`blocks:height:${formatInt(blocks[2].header.height)}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:blockID:${blocks[2].header.id.toString('binary')}`), + db.has(Buffer.from(`transactions:blockID:${blocks[2].header.id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:id:${blocks[2].payload[0].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${blocks[2].payload[0].id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:id:${blocks[2].payload[1].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${blocks[2].payload[1].id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`tempBlocks:height:${formatInt(blocks[2].header.height)}`), + db.has(Buffer.from(`tempBlocks:height:${formatInt(blocks[2].header.height)}`)), ).resolves.toBeFalse(); }); @@ -439,8 +442,8 @@ describe('dataAccess.blocks', () => { balance: BigInt(100000000), }, }); - await db.put( - `diff:${formatInt(blocks[2].header.height)}`, + await db.set( + Buffer.from(`diff:${formatInt(blocks[2].header.height)}`), codec.encode(stateDiffSchema, { created: [], updated: [ @@ -470,11 +473,11 @@ describe('dataAccess.blocks', () => { it('should throw an error when there is no diff', async () => { // Deleting temp blocks to test the saving - await db.del(`diff:${formatInt(blocks[2].header.height)}`); + await db.del(Buffer.from(`diff:${formatInt(blocks[2].header.height)}`)); await dataAccess.clearTempBlocks(); await expect(dataAccess.deleteBlock(blocks[2], stateStore as any)).rejects.toThrow( - 'Specified key diff:0000012e does not exist', + 'Specified key 646966663a0000012e does not exist', ); }); @@ -484,22 +487,22 @@ describe('dataAccess.blocks', () => { await dataAccess.deleteBlock(blocks[2], stateStore as any, true); await expect( - db.exists(`blocks:id:${blocks[2].header.id.toString('binary')}`), + db.has(Buffer.from(`blocks:id:${blocks[2].header.id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`blocks:height:${formatInt(blocks[2].header.height)}`), + db.has(Buffer.from(`blocks:height:${formatInt(blocks[2].header.height)}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:blockID:${blocks[2].header.id.toString('binary')}`), + db.has(Buffer.from(`transactions:blockID:${blocks[2].header.id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:id:${blocks[2].payload[0].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${blocks[2].payload[0].id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`transactions:id:${blocks[2].payload[1].id.toString('binary')}`), + db.has(Buffer.from(`transactions:id:${blocks[2].payload[1].id.toString('binary')}`)), ).resolves.toBeFalse(); await expect( - db.exists(`tempBlocks:height:${formatInt(blocks[2].header.height)}`), + db.has(Buffer.from(`tempBlocks:height:${formatInt(blocks[2].header.height)}`)), ).resolves.toBeTrue(); const tempBlocks = await dataAccess.getTempBlocks(); diff --git a/elements/lisk-chain/test/integration/data_access/transactions.spec.ts b/elements/lisk-chain/test/integration/data_access/transactions.spec.ts index a01a4af2f26..f1222ccc405 100644 --- a/elements/lisk-chain/test/integration/data_access/transactions.spec.ts +++ b/elements/lisk-chain/test/integration/data_access/transactions.spec.ts @@ -14,21 +14,21 @@ /* eslint-disable @typescript-eslint/restrict-template-expressions */ import * as path from 'path'; import * as fs from 'fs-extra'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { DataAccess } from '../../../src/data_access'; import { defaultAccountSchema } from '../../utils/account'; import { registeredBlockHeaders } from '../../utils/block'; import { getTransaction } from '../../utils/transaction'; describe('dataAccess.transactions', () => { - let db: KVStore; + let db: Database; let dataAccess: DataAccess; let transactions: any; beforeAll(() => { const parentPath = path.join(__dirname, '../../tmp/transactions'); fs.ensureDirSync(parentPath); - db = new KVStore(path.join(parentPath, '/test-transactions.db')); + db = new Database(path.join(parentPath, '/test-transactions.db')); dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, @@ -40,11 +40,11 @@ describe('dataAccess.transactions', () => { beforeEach(async () => { transactions = [getTransaction({ nonce: BigInt(1000) }), getTransaction({ nonce: BigInt(0) })]; - const batch = db.batch(); + const batch = new Batch(); for (const tx of transactions) { - batch.put(`transactions:id:${tx.id.toString('binary')}`, tx.getBytes()); + batch.set(Buffer.from(`transactions:id:${tx.id.toString('binary')}`), tx.getBytes()); } - await batch.write(); + await db.write(batch); }); afterEach(async () => { diff --git a/elements/lisk-chain/test/integration/state_store/save_diff.spec.ts b/elements/lisk-chain/test/integration/state_store/save_diff.spec.ts index 50e329e5a1d..e140598a56b 100644 --- a/elements/lisk-chain/test/integration/state_store/save_diff.spec.ts +++ b/elements/lisk-chain/test/integration/state_store/save_diff.spec.ts @@ -15,7 +15,7 @@ import * as path from 'path'; import * as fs from 'fs-extra'; import { codec } from '@liskhq/lisk-codec'; -import { KVStore } from '@liskhq/lisk-db'; +import { Batch, Database } from '@liskhq/lisk-db'; import { createFakeDefaultAccount, defaultAccountSchema, @@ -39,7 +39,7 @@ describe('stateStore.finalize.saveDiff', () => { const defaultNetworkIdentifier = Buffer.from( '93d00fe5be70d90e7ae247936a2e7d83b50809c79b73fa14285f02c842348b3e', ); - let db: KVStore; + let db: Database; let dataAccess: DataAccess; let stateStore: StateStore; let accounts: Account[]; @@ -47,7 +47,7 @@ describe('stateStore.finalize.saveDiff', () => { beforeAll(() => { const parentPath = path.join(__dirname, '../../tmp/diff'); fs.ensureDirSync(parentPath); - db = new KVStore(path.join(parentPath, '/test-diff.db')); + db = new Database(path.join(parentPath, '/test-diff.db')); dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema as any, @@ -95,12 +95,12 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.account.set(account.address, account); } const fakeHeight = '1'; - const batch = db.batch(); + const batch = new Batch(); // Act stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -120,12 +120,12 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.account.set(account.address, account); } const fakeHeight = '1'; - const batch = db.batch(); + const batch = new Batch(); // Act await stateStore.account.del(accounts[0].address); stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -142,9 +142,9 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.account.set(account.address, account); } const fakeHeight = '1'; - const batch = db.batch(); + const batch = new Batch(); stateStore.finalize(fakeHeight, batch); - await batch.write(); + await db.write(batch); // Act const nextHeight = '2'; @@ -160,12 +160,12 @@ describe('stateStore.finalize.saveDiff', () => { const originalBuffer = encodeDefaultAccount(originalAccount); originalAccount.token.balance = BigInt(777); await newStateStore.account.set(accounts[0].address, originalAccount); - const nextBatch = db.batch(); + const nextBatch = new Batch(); await newStateStore.account.del(accounts[0].address); newStateStore.finalize(nextHeight, nextBatch); - await nextBatch.write(); + await db.write(nextBatch); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${nextHeight}`); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${nextHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -186,12 +186,12 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.chain.set('key1', Buffer.from('value1')); await stateStore.chain.set('key2', Buffer.from('value2')); const fakeHeight = '2'; - const batch = db.batch(); + const batch = new Batch(); // Act stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -207,12 +207,12 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.consensus.set('key3', Buffer.from('value3')); await stateStore.consensus.set('key4', Buffer.from('value4')); const fakeHeight = '3'; - const batch = db.batch(); + const batch = new Batch(); // Act stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -233,12 +233,12 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.consensus.set('key3', Buffer.from('value3')); await stateStore.consensus.set('key4', Buffer.from('value4')); const fakeHeight = '4'; - const batch = db.batch(); + const batch = new Batch(); // Act stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -262,9 +262,9 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.consensus.set('key1', Buffer.from('value1')); await stateStore.consensus.set('key2', Buffer.from('value2')); const fakeHeight = '5'; - const batch = db.batch(); + const batch = new Batch(); stateStore.finalize(fakeHeight, batch); - await batch.write(); + await db.write(batch); // Update stateStore = new StateStore(dataAccess, { @@ -282,10 +282,10 @@ describe('stateStore.finalize.saveDiff', () => { await stateStore.consensus.set('key1', updatedVal1); await stateStore.consensus.set('key2', updatedVal2); const updatedFakeHeight = '6'; - const updateBatch = db.batch(); + const updateBatch = new Batch(); stateStore.finalize(updatedFakeHeight, updateBatch); - await updateBatch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${updatedFakeHeight}`); + await db.write(updateBatch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${updatedFakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert @@ -295,12 +295,12 @@ describe('stateStore.finalize.saveDiff', () => { it('should save empty diff if state was not changed', async () => { // Arrange const fakeHeight = '3'; - const batch = db.batch(); + const batch = new Batch(); // Act stateStore.finalize(fakeHeight, batch); - await batch.write(); - const diff = await db.get(`${DB_KEY_DIFF_STATE}:${fakeHeight}`); + await db.write(batch); + const diff = await db.get(Buffer.from(`${DB_KEY_DIFF_STATE}:${fakeHeight}`)); const decodedDiff = codec.decode(stateDiffSchema, diff); // Assert diff --git a/elements/lisk-chain/test/unit/chain.spec.ts b/elements/lisk-chain/test/unit/chain.spec.ts index d41d80a7335..db1df030b23 100644 --- a/elements/lisk-chain/test/unit/chain.spec.ts +++ b/elements/lisk-chain/test/unit/chain.spec.ts @@ -14,7 +14,7 @@ import { Readable } from 'stream'; import { when } from 'jest-when'; -import { KVStore, NotFoundError, formatInt } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { getRandomBytes } from '@liskhq/lisk-cryptography'; import { Chain } from '../../src/chain'; @@ -31,6 +31,7 @@ import { Block, Validator } from '../../src/types'; import { createFakeDefaultAccount, defaultAccountModules } from '../utils/account'; import { getTransaction } from '../utils/transaction'; import { genesisInfoSchema, stateDiffSchema, validatorsSchema } from '../../src/schema'; +import { formatInt } from '../../src/data_access/storage'; import { createStateStore } from '../utils/state_store'; import { CONSENSUS_STATE_GENESIS_INFO, CONSENSUS_STATE_VALIDATORS_KEY } from '../../src/constants'; @@ -65,8 +66,7 @@ describe('chain', () => { beforeEach(() => { // Arrange - db = new KVStore('temp'); - (formatInt as jest.Mock).mockImplementation(n => n); + db = new Database('temp'); (db.createReadStream as jest.Mock).mockReturnValue(Readable.from([])); chainInstance = new Chain({ @@ -106,11 +106,11 @@ describe('chain', () => { ); when(db.get) .mockRejectedValue(new NotFoundError('Data not found') as never) - .calledWith(`blocks:height:${formatInt(1)}`) + .calledWith(Buffer.from(`blocks:height:${formatInt(1)}`)) .mockResolvedValue(genesisBlock.header.id as never) - .calledWith(`blocks:id:${genesisBlock.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${genesisBlock.header.id.toString('binary')}`)) .mockResolvedValue(encodeGenesisBlockHeader(genesisBlock.header) as never) - .calledWith(`blocks:id:${lastBlock.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${lastBlock.header.id.toString('binary')}`)) .mockResolvedValue(encodeDefaultBlockHeader(lastBlock.header) as never); jest.spyOn(chainInstance.dataAccess, 'getBlockHeadersByHeightBetween').mockResolvedValue([]); }); @@ -179,19 +179,14 @@ describe('chain', () => { describe('saveBlock', () => { let stateStoreStub: StateStore; - let batchMock: any; let savingBlock: Block; const fakeAccounts = [createFakeDefaultAccount(), createFakeDefaultAccount()]; beforeEach(() => { savingBlock = createValidDefaultBlock({ header: { height: 300 } }); - batchMock = { - put: jest.fn(), - del: jest.fn(), - write: jest.fn(), - }; - (db.batch as jest.Mock).mockReturnValue(batchMock); + jest.spyOn(Batch.prototype, 'set'); + jest.spyOn(Batch.prototype, 'del'); stateStoreStub = { finalize: jest.fn(), account: { @@ -205,8 +200,8 @@ describe('chain', () => { removeFromTempTable: true, }); expect(db.clear).toHaveBeenCalledWith({ - gte: `diff:${formatInt(0)}`, - lt: `diff:${formatInt(100)}`, + gte: Buffer.from(`diff:${formatInt(0)}`), + lte: Buffer.from(`diff:${formatInt(99)}`), }); }); @@ -214,20 +209,20 @@ describe('chain', () => { await chainInstance.saveBlock(savingBlock, stateStoreStub, 0, { removeFromTempTable: true, }); - expect(batchMock.del).toHaveBeenCalledWith( - `tempBlocks:height:${formatInt(savingBlock.header.height)}`, + expect(Batch.prototype.del).toHaveBeenCalledWith( + Buffer.from(`tempBlocks:height:${formatInt(savingBlock.header.height)}`), ); expect(stateStoreStub.finalize).toHaveBeenCalledTimes(1); }); it('should save block', async () => { await chainInstance.saveBlock(savingBlock, stateStoreStub, 0); - expect(batchMock.put).toHaveBeenCalledWith( - `blocks:id:${savingBlock.header.id.toString('binary')}`, + expect(Batch.prototype.set).toHaveBeenCalledWith( + Buffer.from(`blocks:id:${savingBlock.header.id.toString('binary')}`), expect.anything(), ); - expect(batchMock.put).toHaveBeenCalledWith( - `blocks:height:${formatInt(savingBlock.header.height)}`, + expect(Batch.prototype.set).toHaveBeenCalledWith( + Buffer.from(`blocks:height:${formatInt(savingBlock.header.height)}`), expect.anything(), ); expect(stateStoreStub.finalize).toHaveBeenCalledTimes(1); @@ -253,15 +248,9 @@ describe('chain', () => { const fakeAccounts = [createFakeDefaultAccount(), createFakeDefaultAccount()]; let stateStoreStub: StateStore; - let batchMock: any; beforeEach(() => { - batchMock = { - put: jest.fn(), - del: jest.fn(), - write: jest.fn(), - }; - (db.batch as jest.Mock).mockReturnValue(batchMock); + jest.spyOn(Batch.prototype, 'set'); stateStoreStub = { finalize: jest.fn(), account: { @@ -293,11 +282,12 @@ describe('chain', () => { const block = createValidDefaultBlock(); when(db.get) - .calledWith(`diff:${formatInt(block.header.height)}`) + .calledWith(Buffer.from(`diff:${formatInt(block.header.height)}`)) .mockResolvedValue(emptyEncodedDiff as never); const deleteBlockError = new Error('Delete block failed'); - batchMock.write.mockRejectedValue(deleteBlockError); + + db.write.mockRejectedValue(deleteBlockError); // Act & Assert await expect(chainInstance.removeBlock(block, stateStoreStub)).rejects.toEqual( @@ -310,13 +300,13 @@ describe('chain', () => { jest.spyOn(chainInstance.dataAccess, 'getBlockByID').mockResolvedValue(genesisBlock as never); const block = createValidDefaultBlock(); when(db.get) - .calledWith(`diff:${formatInt(block.header.height)}`) + .calledWith(Buffer.from(`diff:${formatInt(block.header.height)}`)) .mockResolvedValue(emptyEncodedDiff as never); // Act await chainInstance.removeBlock(block, stateStoreStub); // Assert - expect(batchMock.put).not.toHaveBeenCalledWith( - `tempBlocks:height:${formatInt(block.header.height)}`, + expect(Batch.prototype.set).not.toHaveBeenCalledWith( + Buffer.from(`tempBlocks:height:${formatInt(block.header.height)}`), block, ); }); @@ -327,15 +317,15 @@ describe('chain', () => { const tx = getTransaction(); const block = createValidDefaultBlock({ payload: [tx] }); when(db.get) - .calledWith(`diff:${formatInt(block.header.height)}`) + .calledWith(Buffer.from(`diff:${formatInt(block.header.height)}`)) .mockResolvedValue(emptyEncodedDiff as never); // Act await chainInstance.removeBlock(block, stateStoreStub, { saveTempBlock: true, }); // Assert - expect(batchMock.put).toHaveBeenCalledWith( - `tempBlocks:height:${formatInt(block.header.height)}`, + expect(Batch.prototype.set).toHaveBeenCalledWith( + Buffer.from(`tempBlocks:height:${formatInt(block.header.height)}`), encodedDefaultBlock(block), ); }); @@ -374,7 +364,7 @@ describe('chain', () => { validators, }); when(db.get) - .calledWith(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`) + .calledWith(Buffer.from(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`)) .mockResolvedValue(validatorBuffer as never); }); @@ -402,7 +392,7 @@ describe('chain', () => { validators, }); when(db.get) - .calledWith(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`) + .calledWith(Buffer.from(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`)) .mockResolvedValue(validatorBuffer as never); }); @@ -440,9 +430,9 @@ describe('chain', () => { }); when(db.get) - .calledWith(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`) + .calledWith(Buffer.from(`consensus:${CONSENSUS_STATE_VALIDATORS_KEY}`)) .mockResolvedValue(validatorBuffer as never) - .calledWith(`consensus:${CONSENSUS_STATE_GENESIS_INFO}`) + .calledWith(Buffer.from(`consensus:${CONSENSUS_STATE_GENESIS_INFO}`)) .mockResolvedValue(genesisInfoBufer as never); }); diff --git a/elements/lisk-chain/test/unit/data_access/data_access.spec.ts b/elements/lisk-chain/test/unit/data_access/data_access.spec.ts index 8a5457e5f5c..cfd4afd7978 100644 --- a/elements/lisk-chain/test/unit/data_access/data_access.spec.ts +++ b/elements/lisk-chain/test/unit/data_access/data_access.spec.ts @@ -13,7 +13,7 @@ */ import { Readable } from 'stream'; import { when } from 'jest-when'; -import { KVStore, formatInt, NotFoundError, getFirstPrefix, getLastPrefix } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { DataAccess } from '../../../src/data_access'; import { createFakeBlockHeader, @@ -29,6 +29,7 @@ import { defaultAccountSchema, } from '../../utils/account'; import { getGenesisBlockHeaderAssetSchema, blockHeaderAssetSchema } from '../../../src/schema'; +import { formatInt } from '../../../src/data_access/storage'; jest.mock('@liskhq/lisk-db'); @@ -38,11 +39,9 @@ describe('data_access', () => { let block: Block; beforeEach(() => { - db = new KVStore('temp'); + db = new Database('temp'); (db.createReadStream as jest.Mock).mockReturnValue(Readable.from([])); - (formatInt as jest.Mock).mockImplementation(num => num); - (getFirstPrefix as jest.Mock).mockImplementation(str => str); - (getLastPrefix as jest.Mock).mockImplementation(str => str); + dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, @@ -121,17 +120,21 @@ describe('data_access', () => { ]), ); when(db.get) - .calledWith(`blocks:height:${formatInt(block.header.height)}`) + .calledWith(Buffer.from(`blocks:height:${formatInt(block.header.height)}`)) .mockResolvedValue(block.header.id as never) - .calledWith(`blocks:id:${block.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)) .mockResolvedValue(encodeDefaultBlockHeader(block.header) as never); // Act await dataAccess.getBlockHeaderByHeight(1); // Assert expect(db.get).toHaveBeenCalledTimes(2); - expect(db.get).toHaveBeenCalledWith(`blocks:height:${formatInt(block.header.height)}`); - expect(db.get).toHaveBeenCalledWith(`blocks:id:${block.header.id.toString('binary')}`); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`blocks:height:${formatInt(block.header.height)}`), + ); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`blocks:id:${block.header.id.toString('binary')}`), + ); }); }); @@ -184,17 +187,21 @@ describe('data_access', () => { it('should return persisted blocks if cache does not exist', async () => { // Arrange when(db.get) - .calledWith(`blocks:height:${formatInt(block.header.height)}`) + .calledWith(Buffer.from(`blocks:height:${formatInt(block.header.height)}`)) .mockResolvedValue(block.header.id as never) - .calledWith(`blocks:id:${block.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)) .mockResolvedValue(encodeDefaultBlockHeader(block.header) as never); // Act await dataAccess.getBlockHeadersWithHeights([1]); // Assert expect(db.get).toHaveBeenCalledTimes(2); - expect(db.get).toHaveBeenCalledWith(`blocks:height:${formatInt(block.header.height)}`); - expect(db.get).toHaveBeenCalledWith(`blocks:id:${block.header.id.toString('binary')}`); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`blocks:height:${formatInt(block.header.height)}`), + ); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`blocks:id:${block.header.id.toString('binary')}`), + ); }); }); @@ -226,7 +233,9 @@ describe('data_access', () => { // Assert expect(db.get).toHaveBeenCalledTimes(1); expect(db.createReadStream).toHaveBeenCalledTimes(1); - expect(db.get).toHaveBeenCalledWith(`blocks:id:${block.header.id.toString('binary')}`); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`blocks:id:${block.header.id.toString('binary')}`), + ); }); }); @@ -317,13 +326,13 @@ describe('data_access', () => { // Arrange when(db.get) .mockRejectedValue(new NotFoundError('Data not found') as never) - .calledWith('blocks:id:1') + .calledWith(Buffer.from('blocks:id:1')) .mockResolvedValue(encodeDefaultBlockHeader(block.header) as never); // Act await dataAccess.getBlocksByIDs([Buffer.from('1')]); // Assert - expect(db.get).toHaveBeenCalledWith('blocks:id:1'); + expect(db.get).toHaveBeenCalledWith(Buffer.from('blocks:id:1')); }); }); @@ -339,7 +348,7 @@ describe('data_access', () => { ); when(db.get) .mockRejectedValue(new NotFoundError('Data not found') as never) - .calledWith(`blocks:id:${block.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)) .mockResolvedValue(encodeDefaultBlockHeader(block.header) as never); // Act await dataAccess.getBlocksByHeightBetween(1, 2); @@ -362,7 +371,7 @@ describe('data_access', () => { ); when(db.get) .mockRejectedValue(new NotFoundError('Data not found') as never) - .calledWith(`blocks:id:${block.header.id.toString('binary')}`) + .calledWith(Buffer.from(`blocks:id:${block.header.id.toString('binary')}`)) .mockResolvedValue(encodeDefaultBlockHeader(block.header) as never); // Act await dataAccess.getLastBlock(); @@ -379,7 +388,9 @@ describe('data_access', () => { await dataAccess.isBlockPersisted(block.header.id); // Assert - expect(db.exists).toHaveBeenCalledWith(`blocks:id:${block.header.id.toString('binary')}`); + expect(db.has).toHaveBeenCalledWith( + Buffer.from(`blocks:id:${block.header.id.toString('binary')}`), + ); }); }); @@ -439,8 +450,8 @@ describe('data_access', () => { // Assert expect(db.clear).toHaveBeenCalledTimes(1); expect(db.clear).toHaveBeenCalledWith({ - gte: expect.stringContaining('tempBlocks:height'), - lte: expect.stringContaining('tempBlocks:height'), + gte: Buffer.from('tempBlocks:height\x00'), + lte: Buffer.from('tempBlocks:height\xff'), }); }); }); @@ -458,7 +469,7 @@ describe('data_access', () => { }, }); when(db.get) - .calledWith(`accounts:address:${account.address.toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${account.address.toString('binary')}`)) .mockResolvedValue(encodeDefaultAccount(account) as never); // Act const result = await dataAccess.getAccountByAddress<{ token: { balance: bigint } }>( @@ -466,7 +477,9 @@ describe('data_access', () => { ); // Assert - expect(db.get).toHaveBeenCalledWith(`accounts:address:${account.address.toString('binary')}`); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`accounts:address:${account.address.toString('binary')}`), + ); expect(typeof result.token.balance).toEqual('bigint'); }); }); @@ -495,9 +508,9 @@ describe('data_access', () => { }), ]; when(db.get) - .calledWith(`accounts:address:${accounts[0].address.toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${accounts[0].address.toString('binary')}`)) .mockResolvedValue(encodeDefaultAccount(accounts[0]) as never) - .calledWith(`accounts:address:${accounts[1].address.toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${accounts[1].address.toString('binary')}`)) .mockResolvedValue(encodeDefaultAccount(accounts[1]) as never); // Act const result = await dataAccess.getAccountsByAddress<{ token: { balance: bigint } }>( @@ -531,13 +544,15 @@ describe('data_access', () => { }); // Arrange when(db.get) - .calledWith(`transactions:id:${tx.id.toString('binary')}`) + .calledWith(Buffer.from(`transactions:id:${tx.id.toString('binary')}`)) .mockResolvedValue(tx.getBytes() as never); // Act const [result] = await dataAccess.getTransactionsByIDs([tx.id]); // Assert - expect(db.get).toHaveBeenCalledWith(`transactions:id:${tx.id.toString('binary')}`); + expect(db.get).toHaveBeenCalledWith( + Buffer.from(`transactions:id:${tx.id.toString('binary')}`), + ); expect(typeof result.fee).toBe('bigint'); }); }); @@ -548,7 +563,7 @@ describe('data_access', () => { await dataAccess.isTransactionPersisted(Buffer.from('1')); // Assert - expect(db.exists).toHaveBeenCalledWith('transactions:id:1'); + expect(db.has).toHaveBeenCalledWith(Buffer.from('transactions:id:1')); }); }); diff --git a/elements/lisk-chain/test/unit/process.spec.ts b/elements/lisk-chain/test/unit/process.spec.ts index e701905ff0a..f37afdd0072 100644 --- a/elements/lisk-chain/test/unit/process.spec.ts +++ b/elements/lisk-chain/test/unit/process.spec.ts @@ -13,7 +13,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { getRandomBytes, getAddressFromPublicKey, hash } from '@liskhq/lisk-cryptography'; import { when } from 'jest-when'; import { codec } from '@liskhq/lisk-codec'; @@ -70,7 +70,7 @@ describe('chain/process block', () => { let block: Block; beforeEach(() => { - db = new KVStore('temp'); + db = new Database('temp'); chainInstance = new Chain({ db, genesisBlock, @@ -408,7 +408,7 @@ describe('chain/process block', () => { validators: [{ address: getRandomBytes(20) }], }); when(db.get) - .calledWith('consensus:validators') + .calledWith(Buffer.from('consensus:validators')) .mockResolvedValue(validatorBuffer as never); // Act & assert @@ -429,7 +429,7 @@ describe('chain/process block', () => { ], }); when(db.get) - .calledWith('consensus:validators') + .calledWith(Buffer.from('consensus:validators')) .mockResolvedValue(validatorBuffer as never); // Act & assert @@ -504,7 +504,7 @@ describe('chain/process block', () => { validators: [{ address: getAddressFromPublicKey(block.header.generatorPublicKey) }], }); when(db.get) - .calledWith('consensus:validators') + .calledWith(Buffer.from('consensus:validators')) .mockResolvedValue(validatorBuffer as never); // Act & assert diff --git a/elements/lisk-chain/test/unit/state_store/account.spec.ts b/elements/lisk-chain/test/unit/state_store/account.spec.ts index 20da6240682..94f8550dc4d 100644 --- a/elements/lisk-chain/test/unit/state_store/account.spec.ts +++ b/elements/lisk-chain/test/unit/state_store/account.spec.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { KVStore, BatchChain, NotFoundError } from '@liskhq/lisk-db'; +import { Database, Batch, NotFoundError } from '@liskhq/lisk-db'; import { when } from 'jest-when'; import { objects } from '@liskhq/lisk-utils'; import { StateStore } from '../../../src/state_store'; @@ -54,7 +54,7 @@ describe('state store / account', () => { let db: any; beforeEach(async () => { - db = new KVStore('temp'); + db = new Database('temp'); const dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, @@ -70,11 +70,11 @@ describe('state store / account', () => { }); // Setting this as default behavior throws UnhandledPromiseRejection, so it is specifying the non-existing account const dbGetMock = when(db.get) - .calledWith(`accounts:address:${Buffer.from('123L', 'utf8').toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${Buffer.from('123L', 'utf8').toString('binary')}`)) .mockRejectedValue(new NotFoundError('Data not found') as never); for (const data of accountInDB) { dbGetMock - .calledWith(`accounts:address:${data.key.toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${data.key.toString('binary')}`)) .mockResolvedValue(data.value as never); } for (const account of stateStoreAccounts) { @@ -100,7 +100,7 @@ describe('state store / account', () => { await stateStore.account.get(accountInDB[2].key); // Assert expect(db.get).toHaveBeenCalledWith( - `accounts:address:${accountInDB[2].key.toString('binary')}`, + Buffer.from(`accounts:address:${accountInDB[2].key.toString('binary')}`), ); }); @@ -129,7 +129,7 @@ describe('state store / account', () => { await stateStore.account.get(accountInDB[2].key); // Assert expect(db.get).toHaveBeenCalledWith( - `accounts:address:${accountInDB[2].key.toString('binary')}`, + Buffer.from(`accounts:address:${accountInDB[2].key.toString('binary')}`), ); }); @@ -181,7 +181,7 @@ describe('state store / account', () => { // Arrange const inmemoryAccount = createFakeDefaultAccount({ token: { balance: BigInt(200000000) } }); when(db.get) - .calledWith(`accounts:address:${inmemoryAccount.address.toString('binary')}`) + .calledWith(Buffer.from(`accounts:address:${inmemoryAccount.address.toString('binary')}`)) .mockRejectedValue(new NotFoundError('Data not found') as never); await stateStore.account.set(inmemoryAccount.address, inmemoryAccount); // Act @@ -216,10 +216,10 @@ describe('state store / account', () => { describe('finalize', () => { let existingAccount; let updatedAccount: Account; - let batchStub: BatchChain; + let batchStub: Batch; beforeEach(async () => { - batchStub = { put: jest.fn() } as any; + batchStub = { set: jest.fn() } as any; existingAccount = await stateStore.account.get(accountInDB[0].key); updatedAccount = objects.cloneDeep(existingAccount); @@ -231,19 +231,19 @@ describe('state store / account', () => { it('should save the account state in the database', () => { stateStore.account.finalize(batchStub); - expect(batchStub.put).toHaveBeenCalledWith( - `accounts:address:${updatedAccount.address.toString('binary')}`, + expect(batchStub.set).toHaveBeenCalledWith( + Buffer.from(`accounts:address:${updatedAccount.address.toString('binary')}`), expect.any(Buffer), ); }); }); describe('diff', () => { - let batchStub: BatchChain; + let batchStub: Batch; let stateDiff: StateDiff; beforeEach(() => { - batchStub = { put: jest.fn(), del: jest.fn() } as any; + batchStub = { set: jest.fn(), del: jest.fn() } as any; }); it('should have updated with initial values', async () => { diff --git a/elements/lisk-chain/test/unit/state_store/chain_state.spec.ts b/elements/lisk-chain/test/unit/state_store/chain_state.spec.ts index 1336a89d413..e861b2658a9 100644 --- a/elements/lisk-chain/test/unit/state_store/chain_state.spec.ts +++ b/elements/lisk-chain/test/unit/state_store/chain_state.spec.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { KVStore, BatchChain } from '@liskhq/lisk-db'; +import { Database, Batch } from '@liskhq/lisk-db'; import { when } from 'jest-when'; import { StateStore } from '../../../src/state_store'; import { DataAccess } from '../../../src/data_access'; @@ -31,7 +31,7 @@ describe('state store / chain_state', () => { ] as unknown) as ReadonlyArray; beforeEach(() => { - db = new KVStore('temp'); + db = new Database('temp'); const dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, @@ -70,7 +70,7 @@ describe('state store / chain_state', () => { // Arrange await stateStore.chain.set('key1', Buffer.from('value1')); when(db.get) - .calledWith('chain:key1') + .calledWith(Buffer.from('chain:key1')) .mockResolvedValue('value5' as never); // Act & Assert expect(await stateStore.chain.get('key1')).toEqual(Buffer.from('value1')); @@ -79,7 +79,7 @@ describe('state store / chain_state', () => { it('should try to get value from database if not in cache', async () => { // Arrange when(db.get) - .calledWith('chain:key1') + .calledWith(Buffer.from('chain:key1')) .mockResolvedValue('value5' as never); // Act & Assert expect(await stateStore.chain.get('key1')).toEqual('value5'); @@ -106,18 +106,18 @@ describe('state store / chain_state', () => { }); describe('finalize', () => { - let batchStub: BatchChain; + let batchStub: Batch; let stateDiff: StateDiff; beforeEach(() => { - batchStub = { put: jest.fn() } as any; + batchStub = { set: jest.fn() } as any; }); it('should not call storage if nothing is set', () => { // Act stateStore.chain.finalize(batchStub); // Assert - expect(batchStub.put).not.toHaveBeenCalled(); + expect(batchStub.set).not.toHaveBeenCalled(); }); it('should call storage for all the updated keys', async () => { @@ -127,8 +127,8 @@ describe('state store / chain_state', () => { await stateStore.chain.set('key4', Buffer.from('value5')); stateDiff = stateStore.chain.finalize(batchStub); // Assert - expect(batchStub.put).toHaveBeenCalledWith('chain:key3', Buffer.from('value4')); - expect(batchStub.put).toHaveBeenCalledWith('chain:key4', Buffer.from('value5')); + expect(batchStub.set).toHaveBeenCalledWith(Buffer.from('chain:key3'), Buffer.from('value4')); + expect(batchStub.set).toHaveBeenCalledWith(Buffer.from('chain:key4'), Buffer.from('value5')); }); it('should return state diff with created and updated values after finalize', async () => { diff --git a/elements/lisk-chain/test/unit/state_store/consensus_state.spec.ts b/elements/lisk-chain/test/unit/state_store/consensus_state.spec.ts index 8250eb8033e..aaae4cc3bc9 100644 --- a/elements/lisk-chain/test/unit/state_store/consensus_state.spec.ts +++ b/elements/lisk-chain/test/unit/state_store/consensus_state.spec.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { KVStore, BatchChain } from '@liskhq/lisk-db'; +import { Database, Batch } from '@liskhq/lisk-db'; import { when } from 'jest-when'; import { StateStore } from '../../../src/state_store'; import { BlockHeader, StateDiff } from '../../../src/types'; @@ -31,7 +31,7 @@ describe('state store / chain_state', () => { ] as unknown) as ReadonlyArray; beforeEach(() => { - db = new KVStore('temp'); + db = new Database('temp'); const dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, @@ -52,7 +52,7 @@ describe('state store / chain_state', () => { // Arrange await stateStore.consensus.set('key1', Buffer.from('value1')); when(db.get) - .calledWith('consensus:key1') + .calledWith(Buffer.from('consensus:key1')) .mockResolvedValue(Buffer.from('value5') as never); // Act & Assert expect(await stateStore.consensus.get('key1')).toEqual(Buffer.from('value1')); @@ -61,7 +61,7 @@ describe('state store / chain_state', () => { it('should try to get value from database if not in cache', async () => { // Arrange when(db.get) - .calledWith('consensus:key1') + .calledWith(Buffer.from('consensus:key1')) .mockResolvedValue(Buffer.from('value5') as never); // Act & Assert expect(await stateStore.consensus.get('key1')).toEqual(Buffer.from('value5')); @@ -88,18 +88,18 @@ describe('state store / chain_state', () => { }); describe('finalize', () => { - let batchStub: BatchChain; + let batchStub: Batch; let stateDiff: StateDiff; beforeEach(() => { - batchStub = { put: jest.fn() } as any; + batchStub = { set: jest.fn() } as any; }); it('should not call storage if nothing is set', () => { // Act stateStore.consensus.finalize(batchStub); // Assert - expect(batchStub.put).not.toHaveBeenCalled(); + expect(batchStub.set).not.toHaveBeenCalled(); }); it('should call storage for all the updated keys', async () => { @@ -110,9 +110,18 @@ describe('state store / chain_state', () => { await stateStore.consensus.set('key4', Buffer.from('value5')); stateDiff = stateStore.consensus.finalize(batchStub); // Assert - expect(batchStub.put).toHaveBeenCalledWith('consensus:key3', Buffer.from('value4')); - expect(batchStub.put).toHaveBeenCalledWith('consensus:key4', Buffer.from('value5')); - expect(batchStub.put).toHaveBeenCalledWith('consensus:finalizedHeight', Buffer.from('3')); + expect(batchStub.set).toHaveBeenCalledWith( + Buffer.from('consensus:key3'), + Buffer.from('value4'), + ); + expect(batchStub.set).toHaveBeenCalledWith( + Buffer.from('consensus:key4'), + Buffer.from('value5'), + ); + expect(batchStub.set).toHaveBeenCalledWith( + Buffer.from('consensus:finalizedHeight'), + Buffer.from('3'), + ); }); it('should return state diff with created and updated values after finalize', async () => { diff --git a/elements/lisk-chain/test/utils/state_store.ts b/elements/lisk-chain/test/utils/state_store.ts index 8fa6a2b8894..8e284a8a5c8 100644 --- a/elements/lisk-chain/test/utils/state_store.ts +++ b/elements/lisk-chain/test/utils/state_store.ts @@ -12,14 +12,17 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { StateStore } from '../../src/state_store'; import { DataAccess } from '../../src/data_access'; import { defaultAccountSchema, defaultAccount } from './account'; import { registeredBlockHeaders, defaultNetworkIdentifier } from './block'; import { BlockHeader } from '../../src'; -export const createStateStore = (db: KVStore, lastBlockHeaders: BlockHeader[] = []): StateStore => { +export const createStateStore = ( + db: Database, + lastBlockHeaders: BlockHeader[] = [], +): StateStore => { const dataAccess = new DataAccess({ db, accountSchema: defaultAccountSchema, diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 3483ef88168..9f3905bc0ef 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.2", + "version": "5.2.3-alpha.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,7 +55,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-passphrase": "^3.1.1", diff --git a/elements/lisk-db/.eslintignore b/elements/lisk-db/.eslintignore deleted file mode 100644 index 726191f1ee8..00000000000 --- a/elements/lisk-db/.eslintignore +++ /dev/null @@ -1,4 +0,0 @@ -dist-node -jest.config.js -benchmark -.eslintrc.js diff --git a/elements/lisk-db/.eslintrc.js b/elements/lisk-db/.eslintrc.js deleted file mode 100644 index 8b7fd410b8d..00000000000 --- a/elements/lisk-db/.eslintrc.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - extends: '../../.eslintrc.js', - parserOptions: { - project: './tsconfig.json', - tsconfigRootDir: __dirname, - }, -}; diff --git a/elements/lisk-db/.npmignore b/elements/lisk-db/.npmignore deleted file mode 120000 index 8a0be70f3ed..00000000000 --- a/elements/lisk-db/.npmignore +++ /dev/null @@ -1 +0,0 @@ -../../templates/.npmignore.tmpl \ No newline at end of file diff --git a/elements/lisk-db/.npmrc b/elements/lisk-db/.npmrc deleted file mode 120000 index 5cc817c4313..00000000000 --- a/elements/lisk-db/.npmrc +++ /dev/null @@ -1 +0,0 @@ -../../templates/.npmrc.tmpl \ No newline at end of file diff --git a/elements/lisk-db/.prettierignore b/elements/lisk-db/.prettierignore deleted file mode 120000 index 044e4a3df69..00000000000 --- a/elements/lisk-db/.prettierignore +++ /dev/null @@ -1 +0,0 @@ -../../templates/.prettierignore.tmpl \ No newline at end of file diff --git a/elements/lisk-db/.prettierrc.json b/elements/lisk-db/.prettierrc.json deleted file mode 120000 index 00ecd510aaf..00000000000 --- a/elements/lisk-db/.prettierrc.json +++ /dev/null @@ -1 +0,0 @@ -../../templates/.prettierrc.json.tmpl \ No newline at end of file diff --git a/elements/lisk-db/README.md b/elements/lisk-db/README.md deleted file mode 100644 index 23b92f07752..00000000000 --- a/elements/lisk-db/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# @liskhq/lisk-db - -@liskhq/lisk-db is a database access implementation for use with Lisk-related software. - -## Installation - -```sh -$ npm install --save @liskhq/lisk-db -``` - -## Benchmarking - -System configuration: - -```sh -Ubuntu: 18.04.3 (LTS) x64 -8 GB / 4 CPUs -160 GB SSD disk -``` - -Install dependencies: - -```sh -$ npm i -``` - -Benchmark embedded databases: - -Default payload size is 1024, pass payload size according to benchmarking - -```sh -$ node ./benchmark/index.js 15000 -``` - -Benchmark results leveldb vs rocksdb (15,000 bytes): - -| Operation | Leveldb | Rocksdb | Winner | % change | -| :-------: | :----------------------: | :-----------------------: | :-----: | :------: | -| get | x 61,274 ops/sec ±13.94% | x 47,785 ops/sec ±14.31% | Leveldb | 28 | -| put | x 22,668 ops/sec ±19.58% | x 32,666 ops/sec ±13.56% | Rocksdb | 44 | -| del | x 51,503 ops/sec ±18.72% | x 51,415 ops/sec ±21.31% | Leveldb | 0.17 | -| batch | x 2,427 ops/sec ±11.34% | x 105,386 ops/sec ±66.37% | Rocksdb | 4242 | - -Benchmark results leveldb vs rocksdb (50,000 bytes): - -| Operation | Leveldb | Rocksdb | Winner | % change | -| :-------: | :----------------------: | :-----------------------: | :-----: | :------: | -| get | x 50,070 ops/sec ±19.63% | x 46,941 ops/sec ±29.65% | Leveldb | 6.6 | -| put | x 14,355 ops/sec ±20.95% | x 4,483 ops/sec ±24.78% | Leveldb | 220 | -| del | x 50,609 ops/sec ±25.45% | x 39,479 ops/sec ±32.27% | Leveldb | 28 | -| batch | x 674 ops/sec ±14.39% | x 133,690 ops/sec ±12.28% | Rocksdb | 19735 | - -Benchmark results leveldb vs rocksdb (100,000 bytes): - -| Operation | Leveldb | Rocksdb | Winner | % change | -| :-------: | :----------------------: | :-----------------------: | :-----: | :------: | -| get | x 41,040 ops/sec ±20.37% | x 48,913 ops/sec ±14.79% | Rocksdb | 19.18 | -| put | x 5,446 ops/sec ±19.04% | x 11,592 ops/sec ±16.66% | Rocksdb | 112.8 | -| del | x 53,184 ops/sec ±31.21% | x 48,948 ops/sec ±10.19% | Rocksdb | 8.65 | -| batch | x 679 ops/sec ±5.71% | x 146,248 ops/sec ±20.69% | Rocksdb | 21438 | - -Benchmark results leveldb vs rocksdb (150,000 bytes): - -| Operation | Leveldb | Rocksdb | Winner | % change | -| :-------: | :----------------------: | :----------------------: | :-----: | :------: | -| get | x 44,966 ops/sec ±19.13% | x 39,282 ops/sec ±15.83% | Leveldb | 14.46 | -| put | x 5,508 ops/sec ±22.79% | x 8,674 ops/sec ±10.63% | Rocksdb | 57.48 | -| del | x 70,292 ops/sec ±13.37% | x 38,684 ops/sec ±19.42% | Leveldb | 81.70 | -| batch | x 389 ops/sec ±10.68% | x 81,421 ops/sec ±23.89% | Rocksdb | 20830 | - -## License - -Copyright 2016-2020 Lisk Foundation - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -[lisk core github]: https://github.com/LiskHQ/lisk -[lisk documentation site]: https://lisk.io/documentation/lisk-elements diff --git a/elements/lisk-db/benchmark/databases/index.js b/elements/lisk-db/benchmark/databases/index.js deleted file mode 100644 index 5b7a25a2935..00000000000 --- a/elements/lisk-db/benchmark/databases/index.js +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -module.exports.LevelDB = require('./leveldb'); -module.exports.RocksDB = require('./rocksdb'); diff --git a/elements/lisk-db/benchmark/databases/leveldb.js b/elements/lisk-db/benchmark/databases/leveldb.js deleted file mode 100644 index f098544de4d..00000000000 --- a/elements/lisk-db/benchmark/databases/leveldb.js +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const path = require('path'); -const fs = require('fs'); -const levelup = require('levelup'); -const leveldown = require('leveldown'); - -const createDb = (name, location = (process.env.DB_PATH = '/tmp')) => { - const filePath = `${location}/leveldb/${name}`; - fs.mkdirSync(filePath, { recursive: true }); - const parentDir = path.resolve(path.join(filePath, '../')); - if (!fs.existsSync(parentDir)) { - throw new Error(`${parentDir} does not exist`); - } - const db = levelup(leveldown(location)); - return db; -}; - -const closeDb = (db, name, location = (process.env.DB_PATH = '/tmp')) => { - const filePath = `${location}/leveldb/${name}`; - db.close(); - fs.rmdirSync(filePath, { recursive: true }); -}; - -module.exports = { - createDb: createDb, - closeDb: closeDb, -}; diff --git a/elements/lisk-db/benchmark/databases/rocksdb.js b/elements/lisk-db/benchmark/databases/rocksdb.js deleted file mode 100644 index 5fe84fafcb9..00000000000 --- a/elements/lisk-db/benchmark/databases/rocksdb.js +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const fs = require('fs'); -const { KVStore } = require('../../dist-node/kv_store'); - -const createDb = (name, location = (process.env.DB_PATH = '/tmp')) => { - const filePath = `${location}/rocksdb/${name}`; - fs.mkdirSync(filePath, { recursive: true }); - const db = new KVStore(filePath); - return db; -}; - -const closeDb = (db, name, location = (process.env.DB_PATH = '/tmp')) => { - const filePath = `${location}/rocksdb/${name}`; - db.close(); - fs.rmdirSync(filePath, { recursive: true }); -}; - -module.exports = { - createDb: createDb, - closeDb: closeDb, -}; diff --git a/elements/lisk-db/benchmark/index.js b/elements/lisk-db/benchmark/index.js deleted file mode 100644 index 1fd881fde62..00000000000 --- a/elements/lisk-db/benchmark/index.js +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const { getFakeBlock } = require('./tests/fixtures'); -const { LevelDB, RocksDB } = require('./databases'); -const { getSuite } = require('./tests/get'); -const { putSuite } = require('./tests/put'); -const { delSuite } = require('./tests/del'); -const { batchSuite } = require('./tests/batch'); - -let ldb = LevelDB.createDb('leveldb_bench'); -let rdb = RocksDB.createDb('rocksdb_bench'); - -// 15,000 (15Kb) payload -const benchDB = async payload_size => { - await getSuite(ldb, rdb, getFakeBlock(payload_size)); - await putSuite(ldb, rdb, getFakeBlock(payload_size)); - await delSuite(ldb, rdb, getFakeBlock(payload_size)); - await batchSuite(ldb, rdb, payload_size); -}; - -const cliArgs = process.argv.slice(2); -let payload_size = 1024; - -switch (cliArgs[0]) { - case '15000': - payload_size = parseInt(cliArgs[0], 10); - break; - case '50000': - payload_size = parseInt(cliArgs[0], 10); - break; - case '100000': - payload_size = parseInt(cliArgs[0], 10); - break; - case '150000': - payload_size = parseInt(cliArgs[0], 10); - break; -} - -benchDB(payload_size) - .then(console.log(`Start benchmarking for payload ${payload_size}!!!`)) - .catch(err => { - console.log(err); - process.exit(1); - }); diff --git a/elements/lisk-db/benchmark/tests/batch.js b/elements/lisk-db/benchmark/tests/batch.js deleted file mode 100644 index fd255910481..00000000000 --- a/elements/lisk-db/benchmark/tests/batch.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const { Suite } = require('benchmark'); -const { getFakeBlock } = require('./fixtures'); - -const batchSuite = async (ldb, rdb, payload_size) => { - const suite = new Suite(); - const data = prepare(payload_size); - - suite - .add(`LevelDB: batch([key:string, val: Buffer]) x ${payload_size}(bytes)`, async () => { - await ldb.batch(data); - }) - .add(`RocksDB: batch([key:string, val: Buffer]) x ${payload_size}(bytes)`, async () => { - await rdb.batch(data); - }) - .on('cycle', event => { - console.log(String(event.target)); - }) - .on('complete', async function () { - console.log('Fastest is ' + this.filter('fastest').map('name')); - await ldb.clear(); - await rdb.clear(); - }) - .run({ async: true }); -}; - -const prepare = payload_size => { - const data = []; - - for (let i = 0; i < 10; i++) { - data.push({ - type: 'put', - ...getFakeBlock(payload_size), - }); - } - - return data; -}; - -module.exports.batchSuite = batchSuite; diff --git a/elements/lisk-db/benchmark/tests/del.js b/elements/lisk-db/benchmark/tests/del.js deleted file mode 100644 index 3192cb6c835..00000000000 --- a/elements/lisk-db/benchmark/tests/del.js +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const { Suite } = require('benchmark'); - -const delSuite = async (ldb, rdb, { key, value }) => { - const suite = new Suite(); - await ldb.put(key, value); - await rdb.put(key, value); - - suite - .add(`LevelDB: del(key: string) x ${value.length}(bytes)`, async () => { - await ldb.del(key); - }) - .add(`RocksDB: del(key: string) x ${value.length}(bytes)`, async () => { - await rdb.del(key); - }) - .on('cycle', event => { - console.log(String(event.target)); - }) - .on('complete', async function () { - console.log('Fastest is ' + this.filter('fastest').map('name')); - await ldb.clear(); - await rdb.clear(); - }) - .run({ async: true }); -}; - -module.exports.delSuite = delSuite; diff --git a/elements/lisk-db/benchmark/tests/fixtures.js b/elements/lisk-db/benchmark/tests/fixtures.js deleted file mode 100644 index f4010e62626..00000000000 --- a/elements/lisk-db/benchmark/tests/fixtures.js +++ /dev/null @@ -1,9 +0,0 @@ -const crypto = require('crypto'); -const randomize = require('randomatic'); - -const getFakeBlock = payload_size => ({ - key: `blocks:id:${randomize('0', 10)}`, - value: crypto.randomBytes(payload_size), -}); - -module.exports.getFakeBlock = getFakeBlock; diff --git a/elements/lisk-db/benchmark/tests/get.js b/elements/lisk-db/benchmark/tests/get.js deleted file mode 100644 index cbbced8be11..00000000000 --- a/elements/lisk-db/benchmark/tests/get.js +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const { Suite } = require('benchmark'); - -const getSuite = async (ldb, rdb, { key, value }) => { - const suite = new Suite(); - await ldb.put(key, value); - await rdb.put(key, value); - - suite - .add(`LevelDB: get(key: string):Buffer x ${value.length}(bytes)`, async () => { - await ldb.get(key); - }) - .add(`RocksDB: get(key: string):Buffer x ${value.length}(bytes)`, async () => { - await rdb.get(key); - }) - .on('cycle', event => { - console.log(String(event.target)); - }) - .on('complete', async function () { - console.log('Fastest is ' + this.filter('fastest').map('name')); - await ldb.clear(); - await rdb.clear(); - }) - .run({ async: true }); -}; - -module.exports.getSuite = getSuite; diff --git a/elements/lisk-db/benchmark/tests/put.js b/elements/lisk-db/benchmark/tests/put.js deleted file mode 100644 index 98de8879b8a..00000000000 --- a/elements/lisk-db/benchmark/tests/put.js +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -const { Suite } = require('benchmark'); - -const putSuite = async (ldb, rdb, { key, value }) => { - const suite = new Suite(); - - suite - .add(`LevelDB: put(key: string):Buffer x ${value.length}(bytes)`, async () => { - await ldb.put(key, value); - }) - .add(`RocksDB: put(key: string):Buffer x ${value.length}(bytes)`, async () => { - await rdb.put(key, value); - }) - .on('cycle', event => { - console.log(String(event.target)); - }) - .on('complete', async function () { - console.log('Fastest is ' + this.filter('fastest').map('name')); - await ldb.clear(); - await rdb.clear(); - }) - .run({ async: true }); -}; - -module.exports.putSuite = putSuite; diff --git a/elements/lisk-db/jest.config.js b/elements/lisk-db/jest.config.js deleted file mode 120000 index 475ff02b18a..00000000000 --- a/elements/lisk-db/jest.config.js +++ /dev/null @@ -1 +0,0 @@ -../../templates/jest.config.js.tmpl \ No newline at end of file diff --git a/elements/lisk-db/package.json b/elements/lisk-db/package.json deleted file mode 100644 index 3a0db898ccd..00000000000 --- a/elements/lisk-db/package.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "name": "@liskhq/lisk-db", - "version": "0.2.1", - "description": "A database access implementation for use with Lisk-related software", - "author": "Lisk Foundation , lightcurve GmbH ", - "license": "Apache-2.0", - "keywords": [ - "lisk", - "blockchain" - ], - "homepage": "https://github.com/LiskHQ/lisk-sdk/tree/master/elements/lisk-db#readme", - "repository": { - "type": "git", - "url": "git+https://github.com/LiskHQ/lisk-sdk.git" - }, - "bugs": { - "url": "https://github.com/LiskHQ/lisk-sdk/issues" - }, - "engines": { - "node": ">=16.14.1 <=16", - "npm": ">=8.1.0" - }, - "main": "dist-node/index.js", - "scripts": { - "clean": "./scripts/clean.sh", - "format": "prettier --write '**/*'", - "lint": "eslint --ext .js,.ts .", - "lint:fix": "eslint --fix --ext .js,.ts .", - "test": "jest", - "test:coverage": "jest --coverage=true --coverage-reporters=text", - "test:ci": "jest --coverage=true --coverage-reporters=json --verbose", - "test:watch": "npm test -- --watch", - "prebuild": "rm -r dist-node/* || mkdir dist-node || true", - "build": "tsc", - "build:check": "node -e \"require('./dist-node')\"", - "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" - }, - "dependencies": { - "debug": "4.3.4", - "levelup": "4.4.0", - "rocksdb": "5.1.1" - }, - "devDependencies": { - "@types/debug": "4.1.7", - "@types/encoding-down": "5.0.0", - "@types/jest": "26.0.21", - "@types/jest-when": "2.7.2", - "@types/levelup": "4.3.0", - "@types/node": "16.11.26", - "@types/rocksdb": "3.0.1", - "@typescript-eslint/eslint-plugin": "4.19.0", - "@typescript-eslint/parser": "4.19.0", - "benchmark": "2.1.4", - "eslint": "7.22.0", - "eslint-config-lisk-base": "2.0.1", - "eslint-plugin-import": "2.22.1", - "eslint-plugin-jest": "24.3.2", - "jest": "26.6.3", - "jest-extended": "0.11.5", - "jest-when": "3.2.1", - "leveldown": "6.0.0", - "prettier": "2.2.1", - "randomatic": "3.1.1", - "source-map-support": "0.5.19", - "ts-jest": "26.5.4", - "ts-node": "9.1.1", - "tsconfig-paths": "3.9.0", - "typescript": "4.2.3" - } -} diff --git a/elements/lisk-db/scripts b/elements/lisk-db/scripts deleted file mode 120000 index f81ccd0a763..00000000000 --- a/elements/lisk-db/scripts +++ /dev/null @@ -1 +0,0 @@ -../../templates/scripts.tmpl \ No newline at end of file diff --git a/elements/lisk-db/src/errors.ts b/elements/lisk-db/src/errors.ts deleted file mode 100644 index c67c08f6683..00000000000 --- a/elements/lisk-db/src/errors.ts +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ -import { smartConvert } from './utils'; - -export class NotFoundError extends Error { - public constructor(key: string) { - super(`Specified key ${smartConvert(key, ':', 'hex')} does not exist`); - } -} diff --git a/elements/lisk-db/src/index.ts b/elements/lisk-db/src/index.ts deleted file mode 100644 index 859fbdbb2e7..00000000000 --- a/elements/lisk-db/src/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -export * from './kv_store'; -export * from './errors'; -export * from './utils'; diff --git a/elements/lisk-db/src/kv_store.ts b/elements/lisk-db/src/kv_store.ts deleted file mode 100644 index b9e1d8aaadd..00000000000 --- a/elements/lisk-db/src/kv_store.ts +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ -import * as fs from 'fs'; -import * as path from 'path'; -import { debug } from 'debug'; -import levelup, { LevelUp } from 'levelup'; -import { NotFoundError } from './errors'; - -// rocksdb removed the default export. However, @types/rocksdb still only exposes default. -// Therefore, temporarily require with below syntax. -// eslint-disable-next-line import/order -import rocksDB = require('rocksdb'); - -const logger = debug('db'); - -export interface Options { - readonly gt?: string; - readonly gte?: string; - readonly lt?: string; - readonly lte?: string; - readonly reverse?: boolean; - readonly limit?: number; -} - -export interface BatchChain { - put: (key: string, value: Buffer) => this; - del: (key: string) => this; - clear: () => this; - write: () => Promise; - readonly length: number; -} - -export interface ReadStreamOptions extends Options { - readonly keys?: boolean; - readonly values?: boolean; - keyAsBuffer?: boolean; -} - -export class KVStore { - private readonly _db: LevelUp; - - public constructor(filePath: string) { - logger('opening file', { filePath }); - const parentDir = path.resolve(path.join(filePath, '../')); - if (!fs.existsSync(parentDir)) { - throw new Error(`${parentDir} does not exist`); - } - // eslint-disable-next-line @typescript-eslint/no-unsafe-call,@typescript-eslint/no-explicit-any - this._db = levelup((rocksDB as any)(filePath)); - } - - public async close(): Promise { - await this._db.close(); - } - - public async get(key: string): Promise { - logger('get', { key }); - try { - const result = (await this._db.get(key)) as Buffer; - return result; - } catch (error) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - if (error.notFound) { - throw new NotFoundError(key); - } - throw error; - } - } - - public async exists(key: string): Promise { - try { - logger('exists', { key }); - await this._db.get(key); - - return true; - } catch (error) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - if (error.notFound) { - return false; - } - throw error; - } - } - - public async clear(options?: Options): Promise { - await this._db.clear(options); - } - - public async put(key: string, val: Buffer): Promise { - logger('put', { key }); - - await this._db.put(key, val); - } - - public async del(key: string): Promise { - logger('del', { key }); - - await this._db.del(key); - } - - public createReadStream(options?: ReadStreamOptions): NodeJS.ReadableStream { - logger('readStream', { options }); - - // Treat key as string - const updatedOption = options ? { ...options, keyAsBuffer: false } : { keyAsBuffer: false }; - - return this._db.createReadStream(updatedOption); - } - - public batch(): BatchChain { - return this._db.batch(); - } -} diff --git a/elements/lisk-db/src/utils.ts b/elements/lisk-db/src/utils.ts deleted file mode 100644 index 47694841466..00000000000 --- a/elements/lisk-db/src/utils.ts +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -export const formatInt = (num: number | bigint): string => { - let buf: Buffer; - if (typeof num === 'bigint') { - if (num < BigInt(0)) { - throw new Error('Negative number cannot be formatted'); - } - buf = Buffer.alloc(8); - buf.writeBigUInt64BE(num); - } else { - if (num < 0) { - throw new Error('Negative number cannot be formatted'); - } - buf = Buffer.alloc(4); - buf.writeUInt32BE(num, 0); - } - return buf.toString('binary'); -}; - -export const getFirstPrefix = (prefix: string): string => `${prefix}\x00`; -export const getLastPrefix = (prefix: string): string => `${prefix}\xFF`; - -export const isASCIIChar = (val: string): boolean => /^[\x21-\x7F]*$/.test(val); - -export const smartConvert = (message: string, delimiter: string, format: string): string => - message - .split(delimiter) - .map(s => { - if (isASCIIChar(s)) { - return s; - } - return Buffer.from(s, 'binary').toString(format as BufferEncoding); - }) - .join(delimiter); diff --git a/elements/lisk-db/test/.eslintrc.js b/elements/lisk-db/test/.eslintrc.js deleted file mode 100644 index a98dfb6d823..00000000000 --- a/elements/lisk-db/test/.eslintrc.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - extends: '../../../.eslintrc.test.js', - parserOptions: { - project: './tsconfig.json', - tsconfigRootDir: __dirname, - }, -}; diff --git a/elements/lisk-db/test/_setup.js b/elements/lisk-db/test/_setup.js deleted file mode 100644 index e2a8f3c32b8..00000000000 --- a/elements/lisk-db/test/_setup.js +++ /dev/null @@ -1,3 +0,0 @@ -require('jest-extended'); - -process.env.NODE_ENV = 'test'; diff --git a/elements/lisk-db/test/kv_store.spec.ts b/elements/lisk-db/test/kv_store.spec.ts deleted file mode 100644 index 8bd39a6b482..00000000000 --- a/elements/lisk-db/test/kv_store.spec.ts +++ /dev/null @@ -1,431 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ -import * as path from 'path'; -import * as fs from 'fs'; -import { KVStore } from '../src/kv_store'; -import { NotFoundError } from '../src/errors'; - -interface KeyValuePair { - key: string; - value: Buffer; -} - -describe('KVStore', () => { - let db: KVStore; - beforeAll(async () => { - const parentPath = path.join(__dirname, '../tmp'); - if (!fs.existsSync(parentPath)) { - await fs.promises.mkdir(parentPath); - } - db = new KVStore(path.join(parentPath, '/test.db')); - }); - - afterEach(async () => { - await db.clear(); - }); - - describe('constructor', () => { - it('should throw error if the parent folder does not exist', () => { - expect(() => new KVStore('./random-folder/sample.db')).toThrow( - 'random-folder does not exist', - ); - }); - }); - - describe('get', () => { - it('should reject with NotFoundError if the key does not exist', async () => { - await expect(db.get('Random value')).rejects.toThrow(NotFoundError); - }); - - it('should return JSON object if exists', async () => { - const defaultKey = 'random'; - const defaultValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(defaultKey, defaultValue); - - const value = await db.get(defaultKey); - expect(value).toEqual(defaultValue); - }); - }); - - describe('exists', () => { - it('should return false if key does not exist', async () => { - await expect(db.exists('Random value')).resolves.toBeFalse(); - }); - - it('should return true if key exists', async () => { - const defaultKey = 'random'; - const defaultValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(defaultKey, defaultValue); - - await expect(db.exists(defaultKey)).resolves.toBeTrue(); - }); - }); - - describe('put', () => { - it('should put the JSON object to the database', async () => { - const defaultKey = 'random'; - const defaultValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db.put(defaultKey, defaultValue); - - const value = await db['_db'].get(defaultKey); - expect(value).toEqual(defaultValue); - }); - }); - - describe('del', () => { - it('should delete the key if exists', async () => { - const defaultKey = 'random'; - const defaultValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(defaultKey, defaultValue); - - await db.del(defaultKey); - await expect(db.get(defaultKey)).rejects.toThrow(NotFoundError); - }); - - it('should not throw error if key does not exist', async () => { - const defaultKey = 'random'; - await expect(db.del(defaultKey)).not.toReject(); - }); - }); - - describe('createReadStream', () => { - let expectedValues: KeyValuePair[]; - - beforeEach(async () => { - expectedValues = [ - { - key: '001', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '103', - value: Buffer.from(JSON.stringify(3), 'binary'), - }, - { - key: '010', - value: Buffer.from(JSON.stringify([19, 5, 6]), 'binary'), - }, - { - key: '321', - value: Buffer.from(JSON.stringify('string'), 'binary'), - }, - ]; - const batch = db.batch(); - for (const expected of expectedValues) { - batch.put(expected.key, expected.value); - } - await batch.write(); - }); - - it('should return all the entries in lexicographical order', async () => { - const stream = db.createReadStream(); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(expectedValues.length); - expect(result[0].key).toEqual(expectedValues[0].key); - expect(result[1].key).toEqual(expectedValues[2].key); - expect(result[2].key).toEqual(expectedValues[1].key); - }); - - it('should return all the entries in reverse lexicographical order when reverse is specified', async () => { - const stream = db.createReadStream({ reverse: true }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(expectedValues.length); - expect(result[0].key).toEqual(expectedValues[3].key); - expect(result[1].key).toEqual(expectedValues[1].key); - expect(result[2].key).toEqual(expectedValues[2].key); - }); - - it('should return limited number of entries when limit is specified', async () => { - const stream = db.createReadStream({ limit: 2 }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(2); - expect(result[0].key).toEqual(expectedValues[0].key); - expect(result[1].key).toEqual(expectedValues[2].key); - }); - - it('should return limited number of entries in reverse order when limit and reverse are specified', async () => { - const stream = db.createReadStream({ limit: 2, reverse: true }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(2); - expect(result[0].key).toEqual(expectedValues[3].key); - expect(result[1].key).toEqual(expectedValues[1].key); - }); - - it('should return ranged value if gte and lte is specified', async () => { - const stream = db.createReadStream({ gte: '001', lte: '010' }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(2); - expect(result[0].key).toEqual(expectedValues[0].key); - expect(result[1].key).toEqual(expectedValues[2].key); - }); - - it('should return ranged value if gte and lte is specified in reverse order', async () => { - const stream = db.createReadStream({ - gte: '001', - lte: '010', - reverse: true, - }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(2); - expect(result[0].key).toEqual(expectedValues[2].key); - expect(result[1].key).toEqual(expectedValues[0].key); - }); - - it('should return ranged value if gt and lt is specified', async () => { - const stream = db.createReadStream({ gte: '000', lt: '010' }); - const result = await new Promise((resolve, reject) => { - const data: KeyValuePair[] = []; - stream - .on('data', ({ key, value }) => { - data.push({ key, value }); - }) - .on('error', error => { - reject(error); - }) - .on('end', () => { - resolve(data); - }); - }); - - expect(result).toHaveLength(1); - expect(result[0].key).toEqual(expectedValues[0].key); - }); - }); - - describe('batch', () => { - it('should put the batched operation', async () => { - const expectedValues = [ - { - key: '1', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '3', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '2', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - ]; - const batch = db.batch(); - for (const expected of expectedValues) { - batch.put(expected.key, expected.value); - } - await batch.write(); - - expect.assertions(expectedValues.length); - for (const expected of expectedValues) { - const result = await db['_db'].get(expected.key); - expect(result).toEqual(expected.value); - } - }); - - it('should update and delete in the same batch', async () => { - const deletingKey = 'random'; - const deletingValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(deletingKey, deletingValue); - const updatingKey = '1'; - const updatingValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(updatingKey, updatingValue); - - const expectedValues = [ - { - key: '1', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '3', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '2', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - ]; - const batch = db.batch(); - for (const expected of expectedValues) { - batch.put(expected.key, expected.value); - } - batch.del(deletingKey); - await batch.write(); - - expect.assertions(expectedValues.length + 1); - for (const expected of expectedValues) { - const result = await db['_db'].get(expected.key); - expect(result).toEqual(expected.value); - } - await expect(db.get(deletingKey)).rejects.toThrow(NotFoundError); - }); - }); - - describe('clear', () => { - it('should remove all data existed', async () => { - const defaultKey = 'random'; - const defaultValue = Buffer.from( - JSON.stringify({ - key: 'something', - balance: 1000000, - }), - 'binary', - ); - await db['_db'].put(defaultKey, defaultValue); - - await db.clear(); - - await expect(db.get(defaultKey)).rejects.toThrow(NotFoundError); - }); - - it('should only remove specified data', async () => { - const expectedValues = [ - { - key: '001', - value: Buffer.from(JSON.stringify([4, 5, 6]), 'binary'), - }, - { - key: '103', - value: Buffer.from(JSON.stringify(3), 'binary'), - }, - { - key: '010', - value: Buffer.from(JSON.stringify([19, 5, 6]), 'binary'), - }, - ]; - const batch = db.batch(); - for (const expected of expectedValues) { - batch.put(expected.key, expected.value); - } - await batch.write(); - await db.clear({ gt: '001', lt: '103', limit: 2 }); - - await expect(db.get(expectedValues[0].key)).toResolve(); - await expect(db.get(expectedValues[1].key)).toResolve(); - await expect(db.get(expectedValues[2].key)).rejects.toThrow(NotFoundError); - }); - }); -}); diff --git a/elements/lisk-db/test/tsconfig.json b/elements/lisk-db/test/tsconfig.json deleted file mode 120000 index c73c54e77b4..00000000000 --- a/elements/lisk-db/test/tsconfig.json +++ /dev/null @@ -1 +0,0 @@ -../../../templates/test/tsconfig.json.tmpl \ No newline at end of file diff --git a/elements/lisk-db/test/utils.spec.ts b/elements/lisk-db/test/utils.spec.ts deleted file mode 100644 index afc997f0a25..00000000000 --- a/elements/lisk-db/test/utils.spec.ts +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright © 2020 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -import { formatInt, getFirstPrefix, getLastPrefix } from '../src/utils'; - -describe('utils', () => { - describe('formatInt', () => { - describe('when bigint is provided', () => { - it('should return string which can be sorted lexicographically', () => { - const str1 = formatInt(BigInt(100)); - const str2 = formatInt(BigInt(10)); - const str3 = formatInt(BigInt(11)); - const stringArray = [str1, str2, str3]; - // eslint-disable-next-line @typescript-eslint/require-array-sort-compare - stringArray.sort(); - expect(Buffer.from(stringArray[0], 'binary').toString('hex')).toEqual('000000000000000a'); - expect(Buffer.from(stringArray[1], 'binary').toString('hex')).toEqual('000000000000000b'); - expect(Buffer.from(stringArray[2], 'binary').toString('hex')).toEqual('0000000000000064'); - }); - }); - - describe('when number is provided', () => { - it('should return string which can be sorted lexicographically', () => { - const str1 = formatInt(100); - const str2 = formatInt(10); - const str3 = formatInt(11); - const stringArray = [str1, str2, str3]; - // eslint-disable-next-line @typescript-eslint/require-array-sort-compare - stringArray.sort(); - expect(Buffer.from(stringArray[0], 'binary').toString('hex')).toEqual('0000000a'); - expect(Buffer.from(stringArray[1], 'binary').toString('hex')).toEqual('0000000b'); - expect(Buffer.from(stringArray[2], 'binary').toString('hex')).toEqual('00000064'); - }); - }); - }); - - describe('getFirstPrefix', () => { - it('should return string which is the next ascii string by binary', () => { - const prefix = 'block:id'; - const defaultKey = '0000000000000000000000000000000000000000000000000000000000000000'; - const startPrefix = getFirstPrefix(prefix); - // start prefix should come before the expected value - expect(`${prefix}:${defaultKey}`.localeCompare(startPrefix, 'en')).toEqual(1); - // start prefix should come after the expected value - expect(`block:ic:${defaultKey}`.localeCompare(startPrefix, 'en')).toEqual(-1); - }); - }); - - describe('getLastPrefix', () => { - it('should return next ascii string by binary', () => { - const prefix = 'block:id'; - const defaultKey = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'; - const endPrefix = getLastPrefix(prefix); - // end prefix should come after the expected value - expect(`${prefix}:${defaultKey}`.localeCompare(endPrefix, 'en')).toEqual(-1); - // end prefix should come before the expected value - expect(`block:iz:${defaultKey}`.localeCompare(endPrefix, 'en')).toEqual(1); - }); - }); -}); diff --git a/elements/lisk-db/tsconfig.json b/elements/lisk-db/tsconfig.json deleted file mode 120000 index 900bb05c680..00000000000 --- a/elements/lisk-db/tsconfig.json +++ /dev/null @@ -1 +0,0 @@ -../../templates/tsconfig.json.tmpl \ No newline at end of file diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 456a0da71a8..87b9716a8fd 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.2", + "version": "5.2.3-alpha.0", "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,14 +36,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-bft": "^0.3.4", - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-bft": "^0.3.5-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", - "@liskhq/lisk-genesis": "^0.2.4", - "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-genesis": "^0.2.5-alpha.0", + "@liskhq/lisk-p2p": "^0.7.4-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1", "@liskhq/lisk-transaction-pool": "^0.5.3", "@liskhq/lisk-transactions": "^5.2.2", diff --git a/elements/lisk-elements/src/index.ts b/elements/lisk-elements/src/index.ts index 422a171142b..6a9a9f60cdb 100644 --- a/elements/lisk-elements/src/index.ts +++ b/elements/lisk-elements/src/index.ts @@ -22,7 +22,6 @@ export * as utils from '@liskhq/lisk-utils'; export * as tree from '@liskhq/lisk-tree'; export * as validator from '@liskhq/lisk-validator'; export * as codec from '@liskhq/lisk-codec'; -export * as db from '@liskhq/lisk-db'; export * as chain from '@liskhq/lisk-chain'; export * as bft from '@liskhq/lisk-bft'; export * as genesis from '@liskhq/lisk-genesis'; diff --git a/elements/lisk-elements/test/__snapshots__/index.spec.ts.snap b/elements/lisk-elements/test/__snapshots__/index.spec.ts.snap index e89206489ca..dea8d90b499 100644 --- a/elements/lisk-elements/test/__snapshots__/index.spec.ts.snap +++ b/elements/lisk-elements/test/__snapshots__/index.spec.ts.snap @@ -12,7 +12,6 @@ Array [ "tree", "validator", "codec", - "db", "chain", "bft", "genesis", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 6ce6e5be5b3..4bdbbffeb0d 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.4", + "version": "0.2.5-alpha.0", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-utils": "^0.2.1", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 75d39342a21..276598b4e34 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.7.3", + "version": "0.7.4-alpha.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-p2p/src/utils/network.ts b/elements/lisk-p2p/src/utils/network.ts index a5d02f2de97..a970ff0d8f5 100644 --- a/elements/lisk-p2p/src/utils/network.ts +++ b/elements/lisk-p2p/src/utils/network.ts @@ -143,9 +143,7 @@ export const expirePeerFromBucket = ( ): P2PEnhancedPeerInfo | undefined => { for (const [peerId, peer] of bucket) { if (peer.dateAdded) { - const timeDifference = Math.round( - Math.abs(new Date().getTime() - peer.dateAdded.getTime()), - ); + const timeDifference = Math.round(Math.abs(new Date().getTime() - peer.dateAdded.getTime())); if (timeDifference >= thresholdTime) { bucket.delete(peerId); diff --git a/elements/lisk-p2p/test/unit/utils/network.spec.ts b/elements/lisk-p2p/test/unit/utils/network.spec.ts index ec484ffb598..b643439f8c7 100644 --- a/elements/lisk-p2p/test/unit/utils/network.spec.ts +++ b/elements/lisk-p2p/test/unit/utils/network.spec.ts @@ -181,8 +181,8 @@ describe('utils/network', () => { it("should return undefined when peers don't have dateAdded field", () => { const peerBucketWithoutDateAdded = new Map(); - const peers = initPeerInfoList(); - for (const p of peers) { + const peers2 = initPeerInfoList(); + for (const p of peers2) { peerBucketWithoutDateAdded.set(p?.peerId, p); } expect( diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index f4ca5ce4397..3dff8831d1e 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.7", + "version": "0.1.8-alpha.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-client": "^5.2.3-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-utils": "^0.2.1", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.2", + "lisk-framework": "^0.9.3-alpha.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index c904bac7fa7..07af2442043 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.7", + "version": "0.1.8-alpha.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-client": "^5.2.2", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-client": "^5.2.3-alpha.0", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", "axios": "1.3.2", "express": "4.17.3", - "lisk-framework": "^0.9.2", + "lisk-framework": "^0.9.3-alpha.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 0f58c96fbee..bf80faf9bab 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.7", + "version": "0.2.8-alpha.0", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,10 +38,10 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-db": "^0.3.6", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.2" + "lisk-framework": "^0.9.3-alpha.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-genesis": "^0.2.4", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-genesis": "^0.2.5-alpha.0", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-forger-plugin/src/controllers/forging_info.ts b/framework-plugins/lisk-framework-forger-plugin/src/controllers/forging_info.ts index 84713cfa9bb..05f6937d87b 100644 --- a/framework-plugins/lisk-framework-forger-plugin/src/controllers/forging_info.ts +++ b/framework-plugins/lisk-framework-forger-plugin/src/controllers/forging_info.ts @@ -13,7 +13,7 @@ */ import { BaseChannel, PluginCodec } from 'lisk-framework'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { getForgerInfo } from '../db'; import { Forger, DPoSAccountJSON } from '../types'; @@ -29,7 +29,7 @@ interface ForgerInfo extends Forger { export const getForgingInfo = async ( channel: BaseChannel, codec: PluginCodec, - db: KVStore, + db: Database, ): Promise => { const forgingDelegates = await channel.invoke>('app:getForgingStatus'); const encodedAccounts = await channel.invoke('app:getAccounts', { diff --git a/framework-plugins/lisk-framework-forger-plugin/src/controllers/voters.ts b/framework-plugins/lisk-framework-forger-plugin/src/controllers/voters.ts index 05faaa2c52b..79c974a2eb3 100644 --- a/framework-plugins/lisk-framework-forger-plugin/src/controllers/voters.ts +++ b/framework-plugins/lisk-framework-forger-plugin/src/controllers/voters.ts @@ -13,7 +13,7 @@ */ import { BaseChannel, PluginCodec } from 'lisk-framework'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { Forger, DPoSAccountJSON } from '../types'; import { getForgerInfo } from '../db'; @@ -30,7 +30,7 @@ interface Voter { export const getVoters = async ( channel: BaseChannel, codec: PluginCodec, - db: KVStore, + db: Database, ): Promise => { const forgersList = await channel.invoke('app:getForgingStatus'); const forgerAccounts = ( diff --git a/framework-plugins/lisk-framework-forger-plugin/src/db.ts b/framework-plugins/lisk-framework-forger-plugin/src/db.ts index df31b24e860..56a62b0a6fd 100644 --- a/framework-plugins/lisk-framework-forger-plugin/src/db.ts +++ b/framework-plugins/lisk-framework-forger-plugin/src/db.ts @@ -13,7 +13,7 @@ */ import * as createDebug from 'debug'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import * as os from 'os'; import { join } from 'path'; @@ -27,16 +27,16 @@ const debug = createDebug('plugin:forger:db'); export const getDBInstance = async ( dataPath: string, dbName = 'lisk-framework-forger-plugin.db', -): Promise => { +): Promise => { const dirPath = join(dataPath.replace('~', os.homedir()), 'plugins/data', dbName); await ensureDir(dirPath); - return new KVStore(dirPath); + return new Database(dirPath); }; -export const getForgerSyncInfo = async (db: KVStore): Promise => { +export const getForgerSyncInfo = async (db: Database): Promise => { try { - const encodedSyncInfo = await db.get(DB_KEY_FORGER_SYNC_INFO); + const encodedSyncInfo = await db.get(Buffer.from(DB_KEY_FORGER_SYNC_INFO)); return codec.decode(forgerSyncSchema, encodedSyncInfo); } catch (error) { debug('Forger sync info does not exists'); @@ -46,24 +46,24 @@ export const getForgerSyncInfo = async (db: KVStore): Promise => } }; -export const setForgerSyncInfo = async (db: KVStore, blockHeight: number): Promise => { +export const setForgerSyncInfo = async (db: Database, blockHeight: number): Promise => { const encodedSyncInfo = codec.encode(forgerSyncSchema, { syncUptoHeight: blockHeight }); - await db.put(DB_KEY_FORGER_SYNC_INFO, encodedSyncInfo); + await db.set(Buffer.from(DB_KEY_FORGER_SYNC_INFO), encodedSyncInfo); }; export const setForgerInfo = async ( - db: KVStore, + db: Database, forgerAddress: string, forgerInfo: ForgerInfo, ): Promise => { const encodedForgerInfo = codec.encode(forgerInfoSchema, forgerInfo); - await db.put(`${DB_KEY_FORGER_INFO}:${forgerAddress}`, encodedForgerInfo); + await db.set(Buffer.from(`${DB_KEY_FORGER_INFO}:${forgerAddress}`), encodedForgerInfo); }; -export const getForgerInfo = async (db: KVStore, forgerAddress: string): Promise => { +export const getForgerInfo = async (db: Database, forgerAddress: string): Promise => { let forgerInfo; try { - forgerInfo = await db.get(`${DB_KEY_FORGER_INFO}:${forgerAddress}`); + forgerInfo = await db.get(Buffer.from(`${DB_KEY_FORGER_INFO}:${forgerAddress}`)); } catch (error) { debug(`Forger info does not exists for delegate: ${forgerAddress}`); return { diff --git a/framework-plugins/lisk-framework-forger-plugin/src/forger_plugin.ts b/framework-plugins/lisk-framework-forger-plugin/src/forger_plugin.ts index 07968d607e1..b15ddb3db35 100644 --- a/framework-plugins/lisk-framework-forger-plugin/src/forger_plugin.ts +++ b/framework-plugins/lisk-framework-forger-plugin/src/forger_plugin.ts @@ -13,7 +13,7 @@ */ import { getAddressFromPublicKey } from '@liskhq/lisk-cryptography'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { ActionsDefinition, BasePlugin, @@ -79,7 +79,7 @@ const getBinaryAddress = (hexAddressStr: string) => const getAddressBuffer = (hexAddressStr: string) => Buffer.from(hexAddressStr, 'hex'); export class ForgerPlugin extends BasePlugin { - private _forgerPluginDB!: KVStore; + private _forgerPluginDB!: Database; private _channel!: BaseChannel; private _forgersList!: dataStructures.BufferMap; private _transactionFees!: TransactionFees; @@ -146,8 +146,9 @@ export class ForgerPlugin extends BasePlugin { }); } + // eslint-disable-next-line @typescript-eslint/require-await public async unload(): Promise { - await this._forgerPluginDB.close(); + this._forgerPluginDB.close(); } private async _setForgersList(): Promise { diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 6296f16a5f2..47ca6cc03ae 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.7", + "version": "0.2.8-alpha.0", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,14 +37,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2" + "lisk-framework": "^0.9.3-alpha.0" }, "devDependencies": { "@liskhq/lisk-cryptography": "^3.2.1", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 5b8575bf37c..9b538a37b49 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.7", + "version": "0.2.8-alpha.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-utils": "^0.2.1", @@ -46,7 +46,7 @@ "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.2" + "lisk-framework": "^0.9.3-alpha.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index d3465ea7445..e46d79d6a48 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.7", + "version": "0.2.8-alpha.0", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.4", - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-bft": "^0.3.5-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", + "@liskhq/lisk-db": "^0.3.6", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.2" + "lisk-framework": "^0.9.3-alpha.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/src/db.ts b/framework-plugins/lisk-framework-report-misbehavior-plugin/src/db.ts index 4fadef559c6..fe961091e21 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/src/db.ts +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/src/db.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { formatInt, KVStore, getFirstPrefix, getLastPrefix } from '@liskhq/lisk-db'; +import { Batch, Database } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { RawBlockHeader, BlockHeader } from '@liskhq/lisk-chain'; import { areHeadersContradicting } from '@liskhq/lisk-bft'; @@ -47,22 +47,43 @@ interface BlockHeaders { readonly blockHeaders: Buffer[]; } +const formatInt = (num: number | bigint): string => { + let buf: Buffer; + if (typeof num === 'bigint') { + if (num < BigInt(0)) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(8); + buf.writeBigUInt64BE(num); + } else { + if (num < 0) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(4); + buf.writeUInt32BE(num, 0); + } + return buf.toString('binary'); +}; + +const getFirstPrefix = (prefix: string): Buffer => Buffer.from(`${prefix}\x00`); +const getLastPrefix = (prefix: string): Buffer => Buffer.from(`${prefix}\xFF`); + export const getDBInstance = async ( dataPath: string, dbName = 'lisk-framework-report-misbehavior-plugin.db', -): Promise => { +): Promise => { const dirPath = join(dataPath.replace('~', os.homedir()), 'plugins/data', dbName); await ensureDir(dirPath); - return new KVStore(dirPath); + return new Database(dirPath); }; export const getBlockHeaders = async ( - db: KVStore, + db: Database, dbKeyBlockHeader: string, ): Promise => { try { - const encodedBlockHeaders = await db.get(dbKeyBlockHeader); + const encodedBlockHeaders = await db.get(Buffer.from(dbKeyBlockHeader)); return codec.decode(blockHeadersSchema, encodedBlockHeaders); } catch (error) { return { blockHeaders: [] as Buffer[] }; @@ -82,7 +103,7 @@ export const decodeBlockHeader = (encodedHeader: Buffer, schema: RegisteredSchem }; export const saveBlockHeaders = async ( - db: KVStore, + db: Database, schemas: RegisteredSchema, header: Buffer, ): Promise => { @@ -92,8 +113,8 @@ export const saveBlockHeaders = async ( const { blockHeaders } = await getBlockHeaders(db, dbKey); if (!blockHeaders.find(blockHeader => hash(blockHeader).equals(blockId))) { - await db.put( - dbKey, + await db.set( + Buffer.from(dbKey), codec.encode(blockHeadersSchema, { blockHeaders: [...blockHeaders, header], }), @@ -106,7 +127,7 @@ export const saveBlockHeaders = async ( type IteratableStream = NodeJS.ReadableStream & { destroy: (err?: Error) => void }; export const getContradictingBlockHeader = async ( - db: KVStore, + db: Database, blockHeader: BlockHeader, schemas: RegisteredSchema, ): Promise => @@ -135,7 +156,7 @@ export const getContradictingBlockHeader = async ( }); export const clearBlockHeaders = async ( - db: KVStore, + db: Database, schemas: RegisteredSchema, currentHeight: number, ): Promise => { @@ -159,9 +180,9 @@ export const clearBlockHeaders = async ( resolve(res); }); }); - const batch = db.batch(); + const batch = new Batch(); for (const k of keys) { - batch.del(k); + batch.del(Buffer.from(k)); } - await batch.write(); + await db.write(batch); }; diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/src/report_misbehavior_plugin.ts b/framework-plugins/lisk-framework-report-misbehavior-plugin/src/report_misbehavior_plugin.ts index 2f4815d396e..9dbe3996b4b 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/src/report_misbehavior_plugin.ts +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/src/report_misbehavior_plugin.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ import { validator, LiskValidationError } from '@liskhq/lisk-validator'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { BlockHeader, RawBlock, Transaction } from '@liskhq/lisk-chain'; import { @@ -59,7 +59,7 @@ const actionParamsSchema = { }; export class ReportMisbehaviorPlugin extends BasePlugin { - private _pluginDB!: KVStore; + private _pluginDB!: Database; private _options!: Options; private readonly _state: State = { currentHeight: 0 }; private _channel!: BaseChannel; @@ -153,10 +153,11 @@ export class ReportMisbehaviorPlugin extends BasePlugin { }, this._clearBlockHeadersInterval); } + // eslint-disable-next-line @typescript-eslint/require-await public async unload(): Promise { clearInterval(this._clearBlockHeadersIntervalId as NodeJS.Timer); - await this._pluginDB.close(); + this._pluginDB.close(); } private _subscribeToChannel(): void { diff --git a/framework/package.json b/framework/package.json index df200a9f236..c270f137272 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.2", + "version": "0.9.3-alpha.0", "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,14 +40,14 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-bft": "^0.3.4", - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-bft": "^0.3.5-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", - "@liskhq/lisk-genesis": "^0.2.4", - "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-db": "^0.3.6", + "@liskhq/lisk-genesis": "^0.2.5-alpha.0", + "@liskhq/lisk-p2p": "^0.7.4-alpha.0", "@liskhq/lisk-transaction-pool": "^0.5.3", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-tree": "^0.2.2", diff --git a/framework/src/application.ts b/framework/src/application.ts index 8e12e28269c..7a2b1c1d9cc 100644 --- a/framework/src/application.ts +++ b/framework/src/application.ts @@ -17,7 +17,7 @@ import * as path from 'path'; import * as psList from 'ps-list'; import * as assert from 'assert'; import { promisify } from 'util'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { validator, LiskValidationError } from '@liskhq/lisk-validator'; import { objects, jobHandlers } from '@liskhq/lisk-utils'; import { @@ -125,9 +125,9 @@ export class Application { private _channel!: InMemoryChannel; private _genesisBlock!: Record | undefined; - private _blockchainDB!: KVStore; - private _nodeDB!: KVStore; - private _forgerDB!: KVStore; + private _blockchainDB!: Database; + private _nodeDB!: Database; + private _forgerDB!: Database; private readonly _mutex = new jobHandlers.Mutex(); @@ -304,9 +304,9 @@ export class Application { this._channel.publish(APP_EVENT_SHUTDOWN); await this._node.cleanup(); await this._controller.cleanup(errorCode, message); - await this._blockchainDB.close(); - await this._forgerDB.close(); - await this._nodeDB.close(); + this._blockchainDB.close(); + this._forgerDB.close(); + this._nodeDB.close(); await this._emptySocketsDirectory(); this._clearControllerPidFile(); this.logger.info({ errorCode, message }, 'Application shutdown completed'); @@ -531,10 +531,10 @@ export class Application { fs.unlinkSync(path.join(dirs.pids, 'controller.pid')); } - private _getDBInstance(options: ApplicationConfig, dbName: string): KVStore { + private _getDBInstance(options: ApplicationConfig, dbName: string): Database { const dirs = systemDirs(options.label, options.rootPath); const dbPath = `${dirs.data}/${dbName}`; this.logger.debug({ dbName, dbPath }, 'Create database instance.'); - return new KVStore(dbPath); + return new Database(dbPath); } } diff --git a/framework/src/node/forger/data_access.ts b/framework/src/node/forger/data_access.ts index cc76c592496..45a3d48fc51 100644 --- a/framework/src/node/forger/data_access.ts +++ b/framework/src/node/forger/data_access.ts @@ -15,7 +15,7 @@ import { BlockHeader } from '@liskhq/lisk-chain'; import { codec } from '@liskhq/lisk-codec'; import { getAddressFromPublicKey } from '@liskhq/lisk-cryptography'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { dataStructures } from '@liskhq/lisk-utils'; import { DB_KEY_FORGER_PREVIOUSLY_FORGED, @@ -159,12 +159,12 @@ export interface PreviouslyForgedInfoStoreObject { } export const getRegisteredHashOnionSeeds = async ( - db: KVStore, + db: Database, ): Promise> => { try { const registeredHashes = codec.decode( registeredHashOnionsStoreSchema, - await db.get(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS), + await db.get(Buffer.from(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS)), ); const result = new dataStructures.BufferMap(); @@ -179,7 +179,7 @@ export const getRegisteredHashOnionSeeds = async ( }; export const setRegisteredHashOnionSeeds = async ( - db: KVStore, + db: Database, registeredHashOnionSeeds: dataStructures.BufferMap, ): Promise => { const savingData: RegisteredHashOnionStoreObject = { @@ -194,14 +194,17 @@ export const setRegisteredHashOnionSeeds = async ( } const registeredHashOnionSeedsBuffer = codec.encode(registeredHashOnionsStoreSchema, savingData); - await db.put(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS, registeredHashOnionSeedsBuffer); + await db.set( + Buffer.from(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS), + registeredHashOnionSeedsBuffer, + ); }; -export const getUsedHashOnions = async (db: KVStore): Promise => { +export const getUsedHashOnions = async (db: Database): Promise => { try { return codec.decode( usedHashOnionsStoreSchema, - await db.get(DB_KEY_FORGER_USED_HASH_ONION), + await db.get(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)), ).usedHashOnions; } catch (error) { return []; @@ -209,22 +212,22 @@ export const getUsedHashOnions = async (db: KVStore): Promise = }; export const setUsedHashOnions = async ( - db: KVStore, + db: Database, usedHashOnions: UsedHashOnion[], ): Promise => { const usedHashOnionObject: UsedHashOnionStoreObject = { usedHashOnions }; - await db.put( - DB_KEY_FORGER_USED_HASH_ONION, + await db.set( + Buffer.from(DB_KEY_FORGER_USED_HASH_ONION), codec.encode(usedHashOnionsStoreSchema, usedHashOnionObject), ); }; export const getPreviouslyForgedMap = async ( - db: KVStore, + db: Database, ): Promise> => { try { - const previouslyForgedBuffer = await db.get(DB_KEY_FORGER_PREVIOUSLY_FORGED); + const previouslyForgedBuffer = await db.get(Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED)); const parsedMap = codec.decode( previouslyForgedInfoSchema, previouslyForgedBuffer, @@ -244,7 +247,7 @@ export const getPreviouslyForgedMap = async ( }; export const setPreviouslyForgedMap = async ( - db: KVStore, + db: Database, previouslyForgedMap: dataStructures.BufferMap, ): Promise => { const previouslyForgedStoreObject: PreviouslyForgedInfoStoreObject = { previouslyForgedInfo: [] }; @@ -256,8 +259,8 @@ export const setPreviouslyForgedMap = async ( a.generatorAddress.compare(b.generatorAddress), ); - await db.put( - DB_KEY_FORGER_PREVIOUSLY_FORGED, + await db.set( + Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED), codec.encode(previouslyForgedInfoSchema, previouslyForgedStoreObject), ); }; @@ -267,7 +270,7 @@ export const setPreviouslyForgedMap = async ( * so it needs to be outside of the DB transaction */ export const saveMaxHeightPreviouslyForged = async ( - db: KVStore, + db: Database, header: BlockHeader, previouslyForgedMap: dataStructures.BufferMap, ): Promise => { diff --git a/framework/src/node/forger/forger.ts b/framework/src/node/forger/forger.ts index 5f1c11de129..153c2cd077b 100644 --- a/framework/src/node/forger/forger.ts +++ b/framework/src/node/forger/forger.ts @@ -28,7 +28,7 @@ import { BFT } from '@liskhq/lisk-bft'; import { MerkleTree } from '@liskhq/lisk-tree'; import { dataStructures } from '@liskhq/lisk-utils'; import { TransactionPool } from '@liskhq/lisk-transaction-pool'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { HighFeeForgingStrategy } from './strategies'; import { Processor } from '../processor'; import { Logger } from '../../logger'; @@ -69,7 +69,7 @@ export interface RegisteredDelegate { interface ForgerConstructor { readonly forgingStrategy?: HighFeeForgingStrategy; readonly logger: Logger; - readonly db: KVStore; + readonly db: Database; readonly processorModule: Processor; readonly bftModule: BFT; readonly transactionPoolModule: TransactionPool; @@ -120,7 +120,7 @@ const IsEqualForgingInfo = (info1: ForgingInfo, info2: ForgingInfo): boolean => export class Forger { private readonly _logger: Logger; - private readonly _db: KVStore; + private readonly _db: Database; private readonly _processorModule: Processor; private readonly _bftModule: BFT; private readonly _transactionPoolModule: TransactionPool; diff --git a/framework/src/node/network/network.ts b/framework/src/node/network/network.ts index 0d46cf92b6a..10dcd2faa38 100644 --- a/framework/src/node/network/network.ts +++ b/framework/src/node/network/network.ts @@ -14,7 +14,7 @@ import { codec } from '@liskhq/lisk-codec'; import { getRandomBytes } from '@liskhq/lisk-cryptography'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { EventEmitter } from 'events'; import * as liskP2P from '@liskhq/lisk-p2p'; @@ -74,7 +74,7 @@ interface NetworkConstructor { readonly options: NetworkConfig; readonly channel: InMemoryChannel; readonly logger: Logger; - readonly nodeDB: KVStore; + readonly nodeDB: Database; readonly networkVersion: string; } @@ -106,7 +106,7 @@ export class Network { private readonly _options: NetworkConfig; private readonly _channel: InMemoryChannel; private readonly _logger: Logger; - private readonly _nodeDB: KVStore; + private readonly _nodeDB: Database; private readonly _networkVersion: string; private _networkID!: string; private _secret: number | undefined; @@ -129,7 +129,9 @@ export class Network { let previousPeers: ReadonlyArray = []; try { // Load peers from the database that were tried or connected the last time node was running - const previousPeersBuffer = await this._nodeDB.get(DB_KEY_NETWORK_TRIED_PEERS_LIST); + const previousPeersBuffer = await this._nodeDB.get( + Buffer.from(DB_KEY_NETWORK_TRIED_PEERS_LIST), + ); // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment previousPeers = JSON.parse(previousPeersBuffer.toString('utf8')); } catch (error) { @@ -141,7 +143,7 @@ export class Network { // Get previous secret if exists let secret: Buffer | undefined; try { - secret = await this._nodeDB.get(DB_KEY_NETWORK_NODE_SECRET); + secret = await this._nodeDB.get(Buffer.from(DB_KEY_NETWORK_NODE_SECRET)); } catch (error) { if (!(error instanceof NotFoundError)) { this._logger.error({ err: error as Error }, 'Error while querying nodeDB'); @@ -150,7 +152,7 @@ export class Network { if (!secret) { secret = getRandomBytes(4); - await this._nodeDB.put(DB_KEY_NETWORK_NODE_SECRET, secret); + await this._nodeDB.set(Buffer.from(DB_KEY_NETWORK_NODE_SECRET), secret); } this._secret = secret?.readUInt32BE(0); @@ -353,7 +355,7 @@ export class Network { { err: error as Error, procedure: request.procedure }, 'Peer request not fulfilled event: Could not respond to peer request', ); - request.error(error); // Send an error back to the peer. + request.error(error as Error); // Send an error back to the peer. } }); @@ -394,8 +396,8 @@ export class Network { setInterval(async () => { const triedPeers = this._p2p.getTriedPeers(); if (triedPeers.length) { - await this._nodeDB.put( - DB_KEY_NETWORK_TRIED_PEERS_LIST, + await this._nodeDB.set( + Buffer.from(DB_KEY_NETWORK_TRIED_PEERS_LIST), Buffer.from(JSON.stringify(triedPeers), 'utf8'), ); } diff --git a/framework/src/node/node.ts b/framework/src/node/node.ts index cffb05e1849..cd9e4d7ed14 100644 --- a/framework/src/node/node.ts +++ b/framework/src/node/node.ts @@ -33,7 +33,7 @@ import { import { EVENT_BFT_BLOCK_FINALIZED, BFT } from '@liskhq/lisk-bft'; import { getNetworkIdentifier, hash } from '@liskhq/lisk-cryptography'; import { TransactionPool, events as txPoolEvents } from '@liskhq/lisk-transaction-pool'; -import { KVStore, NotFoundError } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { jobHandlers } from '@liskhq/lisk-utils'; import { codec } from '@liskhq/lisk-codec'; import { @@ -88,9 +88,9 @@ interface NodeInitInput { readonly genesisBlockJSON: Record; readonly logger: Logger; readonly channel: InMemoryChannel; - readonly forgerDB: KVStore; - readonly blockchainDB: KVStore; - readonly nodeDB: KVStore; + readonly forgerDB: Database; + readonly blockchainDB: Database; + readonly nodeDB: Database; readonly bus: Bus; } @@ -104,11 +104,12 @@ export class Node { private readonly _options: NodeOptions; private readonly _registeredModules: BaseModule[] = []; private _bus!: Bus; + private _dataPath!: string; private _channel!: InMemoryChannel; private _logger!: Logger; - private _nodeDB!: KVStore; - private _forgerDB!: KVStore; - private _blockchainDB!: KVStore; + private _nodeDB!: Database; + private _forgerDB!: Database; + private _blockchainDB!: Database; private _networkIdentifier!: Buffer; private _registeredAccountSchemas: { [moduleName: string]: AccountSchema } = {}; private _networkModule!: Network; @@ -240,6 +241,7 @@ export class Node { this._forgerDB = forgerDB; this._nodeDB = nodeDB; this._bus = bus; + this._dataPath = configPath; // read from compiled genesis block if exist const genesisBlock = this._readGenesisBlock(genesisBlockJSON, configPath); @@ -589,9 +591,9 @@ export class Node { return this._processor.verifyTransactions(transactions, stateStore); }, ...this._options.transactionPool, - minEntranceFeePriority: BigInt(this._options.transactionPool.minEntranceFeePriority), + minEntranceFeePriority: BigInt(this._options.transactionPool.minEntranceFeePriority ?? 0), minReplacementFeeDifference: BigInt( - this._options.transactionPool.minReplacementFeeDifference, + this._options.transactionPool.minReplacementFeeDifference ?? 0, ), maxPayloadLength: this._options.genesisConfig.maxPayloadLength, }); @@ -724,6 +726,25 @@ export class Node { ), }); + if ( + this._options.backup.height > 0 && + this._options.backup.height === block.header.height + ) { + const backupPath = path.resolve(this._dataPath, 'backup'); + // if backup already exist, it should remove the directory and create a new checkpoint + if (fs.existsSync(backupPath)) { + fs.removeSync(backupPath); + } + this._blockchainDB + .checkpoint(backupPath) + .catch(err => + this._logger.fatal( + { err: err as Error, height: this._options.backup.height, path: backupPath }, + 'Fail to create backup', + ), + ); + } + // Remove any transactions from the pool on new block if (block.payload.length) { for (const transaction of block.payload) { diff --git a/framework/src/node/processor/processor.ts b/framework/src/node/processor/processor.ts index 0bf92036046..f0826028058 100644 --- a/framework/src/node/processor/processor.ts +++ b/framework/src/node/processor/processor.ts @@ -349,7 +349,7 @@ export class Processor { throw new TransactionApplyError( (err as Error).message ?? 'Transaction verification failed', transaction.id, - err, + err as Error, ); } } diff --git a/framework/src/schema/application_config_schema.ts b/framework/src/schema/application_config_schema.ts index 2074c14d386..10e9782ce34 100644 --- a/framework/src/schema/application_config_schema.ts +++ b/framework/src/schema/application_config_schema.ts @@ -15,7 +15,16 @@ export const applicationConfigSchema = { $id: '#/config', type: 'object', - required: ['version', 'networkVersion', 'rpc', 'genesisConfig', 'forging', 'network', 'plugins'], + required: [ + 'version', + 'networkVersion', + 'backup', + 'rpc', + 'genesisConfig', + 'forging', + 'network', + 'plugins', + ], properties: { label: { type: 'string', @@ -41,6 +50,17 @@ export const applicationConfigSchema = { description: 'The root path for storing temporary pid and socket file and data. Restricted length due to unix domain socket path length limitations.', }, + backup: { + type: 'object', + required: ['height'], + properties: { + height: { + type: 'integer', + minimum: 0, + description: 'Height at which the DB backup should be taken.', + }, + }, + }, logger: { type: 'object', required: ['fileLogLevel', 'logFileName', 'consoleLogLevel'], @@ -380,6 +400,9 @@ export const applicationConfigSchema = { port: 8080, host: '127.0.0.1', }, + backup: { + height: 0, + }, genesisConfig: { blockTime: 10, communityIdentifier: 'sdk', diff --git a/framework/src/testing/block_processing_env.ts b/framework/src/testing/block_processing_env.ts index b8d829b5120..dc78a29f1ad 100644 --- a/framework/src/testing/block_processing_env.ts +++ b/framework/src/testing/block_processing_env.ts @@ -30,7 +30,7 @@ import { getPrivateAndPublicKeyFromPassphrase, getAddressAndPublicKeyFromPassphrase, } from '@liskhq/lisk-cryptography'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { objects } from '@liskhq/lisk-utils'; import { TokenModule, SequenceModule, KeysModule, DPoSModule } from '../modules'; @@ -68,7 +68,7 @@ export interface BlockProcessingEnv { createBlock: (payload?: Transaction[], timestamp?: number) => Promise; getProcessor: () => Processor; getChain: () => Chain; - getBlockchainDB: () => KVStore; + getBlockchainDB: () => Database; process: (block: Block) => Promise; processUntilHeight: (height: number) => Promise; getLastBlock: () => Block; @@ -97,7 +97,7 @@ const getAppConfig = (genesisConfig?: GenesisConfig): ApplicationConfig => { }; const getProcessor = ( - db: KVStore, + db: Database, appConfig: ApplicationConfig, genesisBlock: GenesisBlock, networkIdentifier: Buffer, @@ -290,7 +290,7 @@ export const getBlockProcessingEnv = async ( getDataAccess: () => processor['_chain'].dataAccess, cleanup: async ({ databasePath }): Promise => { await processor.stop(); - await db.close(); + db.close(); removeDB(databasePath); }, }; diff --git a/framework/src/testing/utils.ts b/framework/src/testing/utils.ts index 280da0a6ef4..7ab23c98e83 100644 --- a/framework/src/testing/utils.ts +++ b/framework/src/testing/utils.ts @@ -15,7 +15,7 @@ import * as fs from 'fs-extra'; import { AccountDefaultProps, AccountSchema, Block, BlockHeaderAsset } from '@liskhq/lisk-chain'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { Logger } from '../logger'; import { BaseModule, BaseModuleChannel } from '../modules'; @@ -226,10 +226,10 @@ const defaultDatabasePath = '/tmp/lisk-framework/test'; export const getDBPath = (name: string, dbPath = defaultDatabasePath): string => `${dbPath}/${name}.db`; -export const createDB = (name: string, dbPath = defaultDatabasePath): KVStore => { +export const createDB = (name: string, dbPath = defaultDatabasePath): Database => { fs.ensureDirSync(dbPath); const filePath = getDBPath(name, dbPath); - return new KVStore(filePath); + return new Database(filePath); }; export const removeDB = (dbPath = defaultDatabasePath): void => diff --git a/framework/src/types.ts b/framework/src/types.ts index af89faaeb7f..d805f5efce9 100644 --- a/framework/src/types.ts +++ b/framework/src/types.ts @@ -172,6 +172,9 @@ export interface ApplicationConfig { force?: boolean; defaultPassword?: string; }; + backup: { + height: number; + }; network: NetworkConfig; logger: { logFileName: string; diff --git a/framework/test/integration/node/forger/seed_reveal.spec.ts b/framework/test/integration/node/forger/seed_reveal.spec.ts index 4501be86ecd..7f4241fec99 100644 --- a/framework/test/integration/node/forger/seed_reveal.spec.ts +++ b/framework/test/integration/node/forger/seed_reveal.spec.ts @@ -41,11 +41,11 @@ describe('seed reveal', () => { afterEach(async () => { await forgerDB.forgerDB.clear(); - await forgerDB.forgerDB.close(); + forgerDB.forgerDB.close(); removeDB(dbName); }); - it('should pass for selecting the next seed reveal', async () => { + it('should pass for selecting the next seed reveal', () => { const address = '9cabee3d27426676b852ce6b804cb2fdff7cd0b5'; // 2nd and 3rd Hash onion from config file for address 9cabee3d27426676b852ce6b804cb2fdff7cd0b5 const secondCheckpointStart = Buffer.from('f7a3fb976e50d882c709edb63bde4d9c', 'hex'); diff --git a/framework/test/integration/node/forger/transaction_pool.spec.ts b/framework/test/integration/node/forger/transaction_pool.spec.ts index 67f1262846c..561755ce54d 100644 --- a/framework/test/integration/node/forger/transaction_pool.spec.ts +++ b/framework/test/integration/node/forger/transaction_pool.spec.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { Transaction } from '@liskhq/lisk-chain'; import { nodeUtils } from '../../../utils'; import { createDB, removeDB } from '../../../utils/kv_store'; @@ -22,8 +22,8 @@ import { createTransferTransaction } from '../../../utils/node/transaction'; describe('Transaction pool', () => { const dbName = 'transaction_pool'; let node: any; - let blockchainDB: KVStore; - let forgerDB: KVStore; + let blockchainDB: Database; + let forgerDB: Database; beforeAll(async () => { ({ blockchainDB, forgerDB } = createDB(dbName)); @@ -32,8 +32,8 @@ describe('Transaction pool', () => { afterAll(async () => { await node.cleanup(); - await blockchainDB.close(); - await forgerDB.close(); + blockchainDB.close(); + forgerDB.close(); removeDB(dbName); }); diff --git a/framework/test/integration/node/genesis_block.spec.ts b/framework/test/integration/node/genesis_block.spec.ts index 339f8032ace..372de0c9752 100644 --- a/framework/test/integration/node/genesis_block.spec.ts +++ b/framework/test/integration/node/genesis_block.spec.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { Account } from '@liskhq/lisk-chain'; import { validator } from '@liskhq/lisk-validator'; import { nodeUtils } from '../../utils'; @@ -25,8 +25,8 @@ describe('genesis block', () => { const dbName = 'genesis_block'; const genesisBlock = getGenesisBlock(); let node: Node; - let blockchainDB: KVStore; - let forgerDB: KVStore; + let blockchainDB: Database; + let forgerDB: Database; beforeAll(async () => { ({ blockchainDB, forgerDB } = createDB(dbName)); @@ -38,8 +38,8 @@ describe('genesis block', () => { afterAll(async () => { await node.cleanup(); - await blockchainDB.close(); - await forgerDB.close(); + blockchainDB.close(); + forgerDB.close(); removeDB(dbName); }); diff --git a/framework/test/integration/node/processor/delete_block.spec.ts b/framework/test/integration/node/processor/delete_block.spec.ts index 89354423375..d5dbcbedf16 100644 --- a/framework/test/integration/node/processor/delete_block.spec.ts +++ b/framework/test/integration/node/processor/delete_block.spec.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { formatInt, NotFoundError } from '@liskhq/lisk-db'; +import { NotFoundError } from '@liskhq/lisk-db'; import { Block, stateDiffSchema, @@ -27,6 +27,7 @@ import { genesis, DefaultAccountProps } from '../../../fixtures'; import { createTransferTransaction } from '../../../utils/node/transaction'; import * as testing from '../../../../src/testing'; import { Processor } from '../../../../src/node/processor'; +import { formatInt } from '../../../utils/kv_store'; describe('Delete block', () => { let processEnv: testing.BlockProcessingEnv; @@ -89,7 +90,7 @@ describe('Delete block', () => { newBlock = await processEnv.createBlock([transaction]); await processEnv .getBlockchainDB() - .put(`diff:${formatInt(newBlock.header.height)}`, emptyDiffState); + .set(Buffer.from(`diff:${formatInt(newBlock.header.height)}`), emptyDiffState); await processEnv.process(newBlock); await processor.deleteLastBlock(); }); @@ -123,7 +124,9 @@ describe('Delete block', () => { it('should not persist the state diff for that block height', async () => { await expect( - processEnv.getBlockchainDB().get(`diff:${formatInt(newBlock.header.height)}`), + processEnv + .getBlockchainDB() + .get(Buffer.from(`diff:${formatInt(newBlock.header.height)}`)), ).rejects.toBeInstanceOf(NotFoundError); }); }); @@ -151,7 +154,7 @@ describe('Delete block', () => { // Assert await expect( processEnv.getDataAccess().getAccountByAddress(recipientAccount.address), - ).rejects.toThrow('Specified key accounts:address'); + ).rejects.toThrow('Specified key 6163636f756e74733a61646472657373'); const revertedGenesisAccount = await processEnv .getDataAccess() .getAccountByAddress(genesisAccount.address); diff --git a/framework/test/unit/__snapshots__/application.spec.ts.snap b/framework/test/unit/__snapshots__/application.spec.ts.snap index 2235c193001..d74f5d92fb3 100644 --- a/framework/test/unit/__snapshots__/application.spec.ts.snap +++ b/framework/test/unit/__snapshots__/application.spec.ts.snap @@ -2,6 +2,9 @@ exports[`Application #constructor should set internal variables 1`] = ` Object { + "backup": Object { + "height": 0, + }, "forging": Object { "defaultPassword": "elephant tree paris dragon chair galaxy", "delegates": Array [ diff --git a/framework/test/unit/node/forger/forger.spec.ts b/framework/test/unit/node/forger/forger.spec.ts index 31f2132e8bb..48612f37823 100644 --- a/framework/test/unit/node/forger/forger.spec.ts +++ b/framework/test/unit/node/forger/forger.spec.ts @@ -71,7 +71,7 @@ describe('forger', () => { }; dbStub = { get: jest.fn(), - put: jest.fn(), + set: jest.fn(), }; bftModuleStub = { getMaxHeightPrevoted: jest.fn().mockReturnValue(5), @@ -151,7 +151,7 @@ describe('forger', () => { maxHeightPrevoted: 10, }); when(dbStub.get) - .calledWith(DB_KEY_FORGER_PREVIOUSLY_FORGED) + .calledWith(Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED)) .mockResolvedValue( codec.encode(previouslyForgedInfoSchema, previouslyForgedStoreObject) as never, ); @@ -253,7 +253,7 @@ describe('forger', () => { describe('overwrite=false', () => { it('should fail when forger info does not exist', async () => { when(dbStub.get) - .calledWith(DB_KEY_FORGER_PREVIOUSLY_FORGED) + .calledWith(Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED)) .mockRejectedValue(new NotFoundError('data not found') as never); await expect( @@ -348,8 +348,8 @@ describe('forger', () => { true, ); - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_PREVIOUSLY_FORGED, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED), codec.encode(previouslyForgedInfoSchema, previouslyForgedStoreObject), ); @@ -832,7 +832,7 @@ describe('forger', () => { (forgeModule as any)._config.forging.delegates = delegates; when(dbStub.get) - .calledWith(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS) + .calledWith(Buffer.from(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS)) .mockResolvedValue(registeredHashOnionsBuffer as never); // Act @@ -851,8 +851,8 @@ describe('forger', () => { expect(loggerStub.warn).toHaveBeenCalledWith( expect.stringContaining('Overwriting with new hash onion'), ); - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_REGISTERED_HASH_ONION_SEEDS), codec.encode(registeredHashOnionsStoreSchema, originalKey), ); }); @@ -869,7 +869,7 @@ describe('forger', () => { ], }; when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(codec.encode(usedHashOnionsStoreSchema, usedHashOnions) as never); // Act @@ -895,7 +895,7 @@ describe('forger', () => { ], }; when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(codec.encode(usedHashOnionsStoreSchema, usedHashOnion) as never); // Act @@ -931,7 +931,7 @@ describe('forger', () => { getSlotNumberStub = chainModuleStub.slots.getSlotNumber; when(dbStub.get) - .calledWith(DB_KEY_FORGER_PREVIOUSLY_FORGED) + .calledWith(Buffer.from(DB_KEY_FORGER_PREVIOUSLY_FORGED)) .mockRejectedValue(new NotFoundError('not found') as never); when(getSlotNumberStub).calledWith().mockReturnValue(currentSlot); when(getSlotNumberStub) @@ -979,7 +979,7 @@ describe('forger', () => { chainModuleStub.slots.getSlotTime.mockReturnValue(futureSlotTime); await expect(forgeModule.forge()).toResolve(); - expect(dbStub.put).not.toHaveBeenCalled(); + expect(dbStub.set).not.toHaveBeenCalled(); dateNowMockFn.mockRestore(); }); @@ -1096,7 +1096,7 @@ describe('forger', () => { }; when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(codec.encode(usedHashOnionsStoreSchema, usedHashOnion) as never); // Act @@ -1164,14 +1164,14 @@ describe('forger', () => { ); when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(usedHashOnionInputBuffer as never); // Act await forgeModule.forge(); // Assert - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_USED_HASH_ONION, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_USED_HASH_ONION), usedHashOnionOutputBuffer, ); }); @@ -1233,14 +1233,14 @@ describe('forger', () => { ); when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(usedHashOnionInputBuffer as never); // Act await forgeModule.forge(); // Assert - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_USED_HASH_ONION, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_USED_HASH_ONION), usedHashOnionOutputBuffer, ); }); @@ -1294,15 +1294,15 @@ describe('forger', () => { ); when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(usedHashOnionInputBuffer as never); (forgeModule as any)._bftModule.finalizedHeight = 318; // Act await forgeModule.forge(); // Assert - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_USED_HASH_ONION, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_USED_HASH_ONION), usedHashOnionOutputBuffer, ); }); @@ -1353,7 +1353,7 @@ describe('forger', () => { ); when(dbStub.get) - .calledWith(DB_KEY_FORGER_USED_HASH_ONION) + .calledWith(Buffer.from(DB_KEY_FORGER_USED_HASH_ONION)) .mockResolvedValue(usedHashOnionInputBuffer as never); // Act @@ -1362,8 +1362,8 @@ describe('forger', () => { expect(loggerStub.warn).toHaveBeenCalledWith( 'All of the hash onion has been used already. Please update to the new hash onion.', ); - expect(dbStub.put).toHaveBeenCalledWith( - DB_KEY_FORGER_USED_HASH_ONION, + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from(DB_KEY_FORGER_USED_HASH_ONION), usedHashOnionOutputBuffer, ); }); @@ -1406,7 +1406,7 @@ describe('forger', () => { } as unknown) as Block, }); // Assert - expect(dbStub.get).toHaveBeenCalledWith('forger:previouslyForged'); + expect(dbStub.get).toHaveBeenCalledWith(Buffer.from('forger:previouslyForged')); // previousBlock.height + 1 expect(block.header.asset.maxHeightPreviouslyForged).toBe(0); }); @@ -1429,7 +1429,7 @@ describe('forger', () => { } as Block, }); // Assert - expect(dbStub.get).toHaveBeenCalledWith('forger:previouslyForged'); + expect(dbStub.get).toHaveBeenCalledWith(Buffer.from('forger:previouslyForged')); expect(block.header.asset.maxHeightPreviouslyForged).toBe(previouslyForgedHeight); }); @@ -1493,7 +1493,10 @@ describe('forger', () => { }, ], }); - expect(dbStub.put).toHaveBeenCalledWith('forger:previouslyForged', maxHeightResult); + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from('forger:previouslyForged'), + maxHeightResult, + ); }); it('should set maxPreviouslyForgedHeight to forging height', async () => { @@ -1518,7 +1521,10 @@ describe('forger', () => { }, ], }); - expect(dbStub.put).toHaveBeenCalledWith('forger:previouslyForged', maxHeightResult); + expect(dbStub.set).toHaveBeenCalledWith( + Buffer.from('forger:previouslyForged'), + maxHeightResult, + ); }); it('should not set maxPreviouslyForgedHeight to next height if lower', async () => { @@ -1545,7 +1551,7 @@ describe('forger', () => { header: { height: 10 }, } as Block, }); - expect(dbStub.put).not.toHaveBeenCalled(); + expect(dbStub.set).not.toHaveBeenCalled(); }); it('should include seed reveal as specified in the block', async () => { diff --git a/framework/test/unit/node/network/network.spec.ts b/framework/test/unit/node/network/network.spec.ts index c2b21ba800d..30eb42fc7b1 100644 --- a/framework/test/unit/node/network/network.spec.ts +++ b/framework/test/unit/node/network/network.spec.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { P2P } from '@liskhq/lisk-p2p'; import { Network } from '../../../../src/node/network'; import { Logger } from '../../../../src/logger'; @@ -26,7 +26,7 @@ describe('network', () => { let network: Network; jest.useFakeTimers(); beforeEach(() => { - const db = new KVStore('~/.lisk/stubbed'); + const db = new Database('~/.lisk/stubbed'); network = new Network({ nodeDB: db, networkVersion: '2.0', @@ -165,7 +165,7 @@ describe('network', () => { describe('previousPeers', () => { const getDBStub = jest.fn(); - const putDBStub = jest.fn(); + const setDBStub = jest.fn(); const previousPeers = [ { @@ -181,10 +181,10 @@ describe('network', () => { const previousPeersBuffer = Buffer.from(JSON.stringify(previousPeers), 'utf8'); beforeEach(() => { - const db = new KVStore('~/.lisk/stubbed'); + const db = new Database('~/.lisk/stubbed'); db.get = getDBStub; - db.put = putDBStub; + db.set = setDBStub; getDBStub.mockResolvedValue(previousPeersBuffer); @@ -218,7 +218,7 @@ describe('network', () => { } as any; jest.advanceTimersByTime(600000); - expect(putDBStub).toHaveBeenCalledTimes(1); + expect(setDBStub).toHaveBeenCalledTimes(1); }); }); }); diff --git a/framework/test/unit/node/node.spec.ts b/framework/test/unit/node/node.spec.ts index 8fbe0989d72..1cb5eab8cee 100644 --- a/framework/test/unit/node/node.spec.ts +++ b/framework/test/unit/node/node.spec.ts @@ -14,7 +14,7 @@ import { BFT } from '@liskhq/lisk-bft'; import { codec } from '@liskhq/lisk-codec'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { TransactionPool } from '@liskhq/lisk-transaction-pool'; import { when } from 'jest-when'; import { InMemoryChannel } from '../../../src/controller/channels'; @@ -35,9 +35,9 @@ describe('Node', () => { let node: Node; let subscribedEvents: any; const stubs: any = {}; - let blockchainDB: KVStore; - let forgerDB: KVStore; - let nodeDB: KVStore; + let blockchainDB: Database; + let forgerDB: Database; + let nodeDB: Database; let tokenModule: BaseModule; let dposModule: BaseModule; @@ -53,9 +53,9 @@ describe('Node', () => { jest.spyOn(Processor.prototype, 'init').mockResolvedValue(undefined); jest.spyOn(Synchronizer.prototype, 'init').mockResolvedValue(undefined); - blockchainDB = new KVStore('blockchain.db'); - forgerDB = new KVStore('forger.db'); - nodeDB = new KVStore('node.db'); + blockchainDB = new Database('blockchain.db'); + forgerDB = new Database('forger.db'); + nodeDB = new Database('node.db'); tokenModule = new TokenModule(nodeOptions.genesisConfig); dposModule = new DPoSModule(nodeOptions.genesisConfig); diff --git a/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts b/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts index c45df3f3c3b..fe169a87620 100644 --- a/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts +++ b/framework/test/unit/node/synchronizer/block_synchronization_mechanism/block_synchronization_mechanism.spec.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { when } from 'jest-when'; import { codec } from '@liskhq/lisk-codec'; import { Block, Chain, BlockHeader } from '@liskhq/lisk-chain'; @@ -81,7 +81,7 @@ describe('block_synchronization_mechanism', () => { channelMock = new ChannelMock(); - const blockchainDB = new KVStore('blockchain.db'); + const blockchainDB = new Database('blockchain.db'); networkMock = { requestFromPeer: jest.fn(), diff --git a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts index 371bfd95636..3a4b2a2cbd0 100644 --- a/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts +++ b/framework/test/unit/node/synchronizer/fast_chain_switching_mechanism/fast_chain_switching_mechanism.spec.ts @@ -13,7 +13,7 @@ */ import { when } from 'jest-when'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { Block, Chain } from '@liskhq/lisk-chain'; import { BFT } from '@liskhq/lisk-bft'; @@ -73,7 +73,7 @@ describe('fast_chain_switching_mechanism', () => { channelMock = new ChannelMock(); - const blockchainDB = new KVStore('blockchain.db'); + const blockchainDB = new Database('blockchain.db'); chainModule = new Chain({ networkIdentifier: defaultNetworkIdentifier, diff --git a/framework/test/unit/node/synchronizer/synchronizer.spec.ts b/framework/test/unit/node/synchronizer/synchronizer.spec.ts index a0c90834dd9..0ecc4c1fdba 100644 --- a/framework/test/unit/node/synchronizer/synchronizer.spec.ts +++ b/framework/test/unit/node/synchronizer/synchronizer.spec.ts @@ -16,7 +16,7 @@ import { when } from 'jest-when'; import { codec } from '@liskhq/lisk-codec'; import { Block, Chain, Transaction } from '@liskhq/lisk-chain'; import { BFT } from '@liskhq/lisk-bft'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { getAddressAndPublicKeyFromPassphrase, signDataWithPassphrase, @@ -73,7 +73,7 @@ describe('Synchronizer', () => { }; channelMock = new ChannelMock(); - const blockchainDB = new KVStore('blockchain.db'); + const blockchainDB = new Database('blockchain.db'); chainModule = new Chain({ networkIdentifier: defaultNetworkIdentifier, diff --git a/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap b/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap index abe0fe65e2e..5c3ff51ec9d 100644 --- a/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap +++ b/framework/test/unit/schema/__snapshots__/application_schema.spec.ts.snap @@ -6,6 +6,9 @@ Object { "$id": "#/config", "additionalProperties": false, "default": Object { + "backup": Object { + "height": 0, + }, "forging": Object { "delegates": Array [], "force": false, @@ -60,6 +63,19 @@ Object { "version": "0.0.0", }, "properties": Object { + "backup": Object { + "properties": Object { + "height": Object { + "description": "Height at which the DB backup should be taken.", + "minimum": 0, + "type": "integer", + }, + }, + "required": Array [ + "height", + ], + "type": "object", + }, "forging": Object { "properties": Object { "defaultPassword": Object { @@ -451,6 +467,7 @@ Object { "required": Array [ "version", "networkVersion", + "backup", "rpc", "genesisConfig", "forging", diff --git a/framework/test/utils/kv_store.ts b/framework/test/utils/kv_store.ts index 4ee0dda2e2f..550e21ecf4e 100644 --- a/framework/test/utils/kv_store.ts +++ b/framework/test/utils/kv_store.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ import * as fs from 'fs-extra'; -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; export const defaultPath = '/tmp/lisk-framework/test'; @@ -24,11 +24,29 @@ export const createDB = (name: string) => { const forgerDBPath = getPath(name); return { path, - nodeDB: new KVStore(`${path}/node.db`), - blockchainDB: new KVStore(`${path}/blockchain.db`), + nodeDB: new Database(`${path}/node.db`), + blockchainDB: new Database(`${path}/blockchain.db`), forgerDBPath, - forgerDB: new KVStore(`${path}/forger.db`), + forgerDB: new Database(`${path}/forger.db`), }; }; +export const formatInt = (num: number | bigint): string => { + let buf: Buffer; + if (typeof num === 'bigint') { + if (num < BigInt(0)) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(8); + buf.writeBigUInt64BE(num); + } else { + if (num < 0) { + throw new Error('Negative number cannot be formatted'); + } + buf = Buffer.alloc(4); + buf.writeUInt32BE(num, 0); + } + return buf.toString('binary'); +}; + export const removeDB = (name: string): void => fs.removeSync(getPath(name)); diff --git a/framework/test/utils/node/node.ts b/framework/test/utils/node/node.ts index 95458c3dde0..60357d674b2 100644 --- a/framework/test/utils/node/node.ts +++ b/framework/test/utils/node/node.ts @@ -13,7 +13,7 @@ * */ -import { KVStore } from '@liskhq/lisk-db'; +import { Database } from '@liskhq/lisk-db'; import { objects } from '@liskhq/lisk-utils'; import { nodeConfig } from '../configs'; import { createMockChannel, createMockBus } from '../channel'; @@ -53,8 +53,8 @@ export const fakeLogger = createLogger({ /* eslint-enable @typescript-eslint/no-empty-function, @typescript-eslint/explicit-module-boundary-types */ export const createAndLoadNode = async ( - blockchainDB: KVStore, - forgerDB: KVStore, + blockchainDB: Database, + forgerDB: Database, logger: Logger = fakeLogger, channel?: InMemoryChannel, options?: NodeOptions, @@ -64,8 +64,8 @@ export const createAndLoadNode = async ( }); const nodeDB = ({ get: jest.fn(), - put: jest.fn(), - } as unknown) as KVStore; + set: jest.fn(), + } as unknown) as Database; await chainModule.init({ genesisBlockJSON, dataPath: defaultPath, diff --git a/sdk/package.json b/sdk/package.json index 5e4aa532cc7..5a261fbdf82 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.2", + "version": "5.2.3-alpha.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.6", - "@liskhq/lisk-bft": "^0.3.4", - "@liskhq/lisk-chain": "^0.3.4", + "@liskhq/lisk-api-client": "^5.1.7-alpha.0", + "@liskhq/lisk-bft": "^0.3.5-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@liskhq/lisk-codec": "^0.2.2", "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-db": "^0.2.1", - "@liskhq/lisk-framework-forger-plugin": "^0.2.7", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.7", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.7", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.7", - "@liskhq/lisk-genesis": "^0.2.4", - "@liskhq/lisk-p2p": "^0.7.3", + "@liskhq/lisk-db": "^0.3.6", + "@liskhq/lisk-framework-forger-plugin": "^0.2.8-alpha.0", + "@liskhq/lisk-framework-http-api-plugin": "^0.2.8-alpha.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.2.8-alpha.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.8-alpha.0", + "@liskhq/lisk-genesis": "^0.2.5-alpha.0", + "@liskhq/lisk-p2p": "^0.7.4-alpha.0", "@liskhq/lisk-passphrase": "^3.1.1", "@liskhq/lisk-transaction-pool": "^0.5.3", "@liskhq/lisk-transactions": "^5.2.2", "@liskhq/lisk-tree": "^0.2.2", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", - "lisk-framework": "^0.9.2" + "lisk-framework": "^0.9.3-alpha.0" }, "devDependencies": { "eslint": "7.22.0", diff --git a/yarn.lock b/yarn.lock index 8df229cde39..7bd678628de 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2539,6 +2539,31 @@ dependencies: "@types/node" "11.11.2" +"@liskhq/lisk-db@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@liskhq/lisk-db/-/lisk-db-0.3.6.tgz#d8e3b0c548ae54dfd98b371738ce4dc259303108" + integrity sha512-x1NtivGhfIrgzusDhOTqiz7eNIpQbNrPnEbEcbK32Iz1KWagZpdaJ6kOAXTTNA4QBwM6brbmxjFagFPvIMkgnw== + dependencies: + "@mapbox/node-pre-gyp" "^1.0.9" + "@types/node" "^16 || ^18" + cargo-cp-artifact "^0.1" + shelljs "^0.8.5" + +"@mapbox/node-pre-gyp@^1.0.9": + version "1.0.10" + resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz#8e6735ccebbb1581e5a7e652244cadc8a844d03c" + integrity sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA== + dependencies: + detect-libc "^2.0.0" + https-proxy-agent "^5.0.0" + make-dir "^3.1.0" + node-fetch "^2.6.7" + nopt "^5.0.0" + npmlog "^5.0.1" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.11" + "@nodelib/fs.scandir@2.1.3": version "2.1.3" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.3.tgz#3a582bdb53804c6ba6d146579c46e52130cf4a3b" @@ -3555,11 +3580,6 @@ multimatch "^5.0.0" typescript "~4.1.2" -"@types/abstract-leveldown@*": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@types/abstract-leveldown/-/abstract-leveldown-5.0.1.tgz#3c7750d0186b954c7f2d2f6acc8c3c7ba0c3412e" - integrity sha512-wYxU3kp5zItbxKmeRYCEplS2MW7DzyBnxPGj+GJVHZEUZiK/nn5Ei1sUFgURDh+X051+zsGe28iud3oHjrYWQQ== - "@types/anymatch@*": version "1.3.1" resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a" @@ -3697,14 +3717,6 @@ resolved "https://registry.yarnpkg.com/@types/ejs/-/ejs-3.0.5.tgz#95a3a1c3d9603eba80fe67ff56da1ba275ef2eda" integrity sha512-k4ef69sS4sIqAPW9GoBnN+URAON2LeL1H0duQvL4RgdEBna19/WattYSA1qYqvbVEDRTSWzOw56tCLhC/m/IOw== -"@types/encoding-down@5.0.0": - version "5.0.0" - resolved "https://registry.yarnpkg.com/@types/encoding-down/-/encoding-down-5.0.0.tgz#0b5b90b93ac3aa75148f19508044e7bd36463557" - integrity sha512-G0MlS/+/U2RIQLcSEhhAcoMrXw3hXUCFSKbhbeEljoKMra2kq+NPX6tfOveSWQLX2hJXBo+YrvKgAGe+tFL1Aw== - dependencies: - "@types/abstract-leveldown" "*" - "@types/level-codec" "*" - "@types/eslint@^7.2.6": version "7.2.6" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-7.2.6.tgz#5e9aff555a975596c03a98b59ecd103decc70c3c" @@ -3922,19 +3934,6 @@ dependencies: "@types/node" "*" -"@types/level-codec@*": - version "9.0.0" - resolved "https://registry.yarnpkg.com/@types/level-codec/-/level-codec-9.0.0.tgz#9f1dc7f9017b6fba094a450602ec0b91cc384059" - integrity sha512-SWYkVJylo1dqblkhrr7UtmsQh4wdZA9bV1y3QJSywMPSqGfW0p1w37N1EayZtKbg1dGReIIQEEOtxk4wZvGrWQ== - -"@types/levelup@4.3.0": - version "4.3.0" - resolved "https://registry.yarnpkg.com/@types/levelup/-/levelup-4.3.0.tgz#4f55585e05a33caa08c1439c344bbba93e947327" - integrity sha512-h82BoajhjU/zwLoM4BUBX/SCodCFi1ae/ZlFOYh5Z4GbHeaXj9H709fF1LYl/StrK8KSwnJOeMRPo9lnC6sz4w== - dependencies: - "@types/abstract-leveldown" "*" - "@types/node" "*" - "@types/listr@0.14.2": version "0.14.2" resolved "https://registry.yarnpkg.com/@types/listr/-/listr-0.14.2.tgz#2e5f80fbc3ca8dceb9940ce9bf8e3113ab452545" @@ -4054,6 +4053,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.9.tgz#79df4ae965fb76d31943b54a6419599307a21394" integrity sha512-EPZBIGed5gNnfWCiwEIwTE2Jdg4813odnG8iNPMQGrqVxrI+wL68SPtPeCX+ZxGBaA6pKAVc6jaKgP/Q0QzfdQ== +"@types/node@^16 || ^18": + version "18.16.18" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.16.18.tgz#85da09bafb66d4bc14f7c899185336d0c1736390" + integrity sha512-/aNaQZD0+iSBAGnvvN2Cx92HqE5sZCPZtx2TsK+4nvV23fFe09jVDvpArXr2j9DnYlzuU9WuoykDDc6wqvpNcw== + "@types/normalize-package-data@^2.4.0": version "2.4.0" resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" @@ -4164,14 +4168,6 @@ dependencies: "@types/node" "*" -"@types/rocksdb@3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/rocksdb/-/rocksdb-3.0.1.tgz#bb1e43ca3bfb5d7969211525979160ada09c00bc" - integrity sha512-fQhnc9CeRAi9dnDXlaaItYtm3FhqE8KZYhvj3zJve2pT57pdbySah3uELxrFt15jVcSoKsLHBuwUhU5TqQgnVw== - dependencies: - "@types/abstract-leveldown" "*" - "@types/node" "*" - "@types/sc-auth@*": version "5.0.0" resolved "https://registry.yarnpkg.com/@types/sc-auth/-/sc-auth-5.0.0.tgz#b9bca82783419233ed938f59e37ae940bfdb454a" @@ -4794,28 +4790,6 @@ abbrev@1, abbrev@^1.0.0: resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== -abstract-leveldown@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-7.0.0.tgz#1a8bc3b07f502793804d456a881dc15cedb9bc5d" - integrity sha512-mFAi5sB/UjpNYglrQ4irzdmr2mbQtE94OJbrAYuK2yRARjH/OACinN1meOAorfnaLPMQdFymSQMlkiDm9AXXKQ== - dependencies: - buffer "^6.0.3" - is-buffer "^2.0.5" - level-concat-iterator "^3.0.0" - level-supports "^2.0.0" - queue-microtask "^1.2.3" - -abstract-leveldown@~6.2.1: - version "6.2.3" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz#036543d87e3710f2528e47040bc3261b77a9a8eb" - integrity sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ== - dependencies: - buffer "^5.5.0" - immediate "^3.2.3" - level-concat-iterator "~2.0.0" - level-supports "~1.0.0" - xtend "~4.0.0" - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" @@ -6045,7 +6019,7 @@ buffer-xor@^1.0.3: resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= -buffer@6.0.3, buffer@^6.0.3: +buffer@6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== @@ -6373,6 +6347,11 @@ cardinal@^2.1.1: ansicolors "~0.3.2" redeyed "~2.1.0" +cargo-cp-artifact@^0.1: + version "0.1.8" + resolved "https://registry.yarnpkg.com/cargo-cp-artifact/-/cargo-cp-artifact-0.1.8.tgz#353814f49f6aa76601a4bcb3ea5f3071180b90de" + integrity sha512-3j4DaoTrsCD1MRkTF2Soacii0Nx7UHCce0EwUf4fHnggwiE4fbmF2AbnfzayR36DF8KGadfh7M/Yfy625kgPlA== + case-sensitive-paths-webpack-plugin@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz#23ac613cc9a856e4f88ff8bb73bbb5e989825cf7" @@ -7785,14 +7764,6 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" -deferred-leveldown@~5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz#27a997ad95408b61161aa69bd489b86c71b78058" - integrity sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw== - dependencies: - abstract-leveldown "~6.2.1" - inherits "^2.0.3" - define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -7898,6 +7869,11 @@ detect-indent@^6.0.0: resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.0.0.tgz#0abd0f549f69fc6659a254fe96786186b6f528fd" integrity sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA== +detect-libc@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" + integrity sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w== + detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" @@ -8313,13 +8289,6 @@ errno@^0.1.3, errno@~0.1.7: dependencies: prr "~1.0.1" -errno@~0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.7.tgz#4684d71779ad39af177e3f007996f7c67c852618" - integrity sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg== - dependencies: - prr "~1.0.1" - error-ex@^1.2.0, error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" @@ -10580,11 +10549,6 @@ ignore@^5.2.0: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== -immediate@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.2.3.tgz#d140fa8f614659bd6541233097ddaac25cdd991c" - integrity sha1-0UD6j2FGWb1lQSMwl92qwlzdmRw= - immer@7.0.9: version "7.0.9" resolved "https://registry.yarnpkg.com/immer/-/immer-7.0.9.tgz#28e7552c21d39dd76feccd2b800b7bc86ee4a62e" @@ -10895,11 +10859,6 @@ is-buffer@^1.1.0, is-buffer@^1.1.5: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== -is-buffer@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" - integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== - is-callable@^1.1.4, is-callable@^1.2.2, is-callable@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" @@ -11093,11 +11052,6 @@ is-number@^3.0.0: dependencies: kind-of "^3.0.2" -is-number@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" - integrity sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ== - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" @@ -12333,64 +12287,6 @@ lerna@6.4.1: nx ">=15.4.2 < 16" typescript "^3 || ^4" -level-concat-iterator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-3.0.0.tgz#416ddaf0c2ed834f006aa3124ee68906eb4769d4" - integrity sha512-UHGiIdj+uiFQorOrURRvJF3Ei0uHc89ciM/aRi0qsWDV2f0HXypeXUPhJKL6DsONgSR76Pc0AI4sKYEYYRn2Dg== - -level-concat-iterator@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz#1d1009cf108340252cb38c51f9727311193e6263" - integrity sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw== - -level-errors@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-2.0.1.tgz#2132a677bf4e679ce029f517c2f17432800c05c8" - integrity sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw== - dependencies: - errno "~0.1.1" - -level-iterator-stream@~4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz#7ceba69b713b0d7e22fcc0d1f128ccdc8a24f79c" - integrity sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q== - dependencies: - inherits "^2.0.4" - readable-stream "^3.4.0" - xtend "^4.0.2" - -level-supports@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-2.0.0.tgz#b0b9f63f30c4175fb2612217144f03f3b77580d9" - integrity sha512-8UJgzo1pvWP1wq80ZlkL19fPeK7tlyy0sBY90+2pj0x/kvzHCoLDWyuFJJMrsTn33oc7hbMkS3SkjCxMRPHWaw== - -level-supports@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-1.0.1.tgz#2f530a596834c7301622521988e2c36bb77d122d" - integrity sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg== - dependencies: - xtend "^4.0.2" - -leveldown@6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-6.0.0.tgz#3ec7f00463c45f8f7c8e68248d10ab299059c7ca" - integrity sha512-NEsyqpfdDhpFO49Zm9htNSsWixMa9Q9sUXgrBTaQNPyPo2Kx1wRctgIXMzc7tduXJqNff8QAwulv2eZDboghxQ== - dependencies: - abstract-leveldown "^7.0.0" - napi-macros "~2.0.0" - node-gyp-build "~4.2.1" - -levelup@4.4.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.4.0.tgz#f89da3a228c38deb49c48f88a70fb71f01cafed6" - integrity sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ== - dependencies: - deferred-leveldown "~5.3.0" - level-errors "~2.0.0" - level-iterator-stream "~4.0.0" - level-supports "~1.0.0" - xtend "~4.0.0" - leven@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" @@ -12831,7 +12727,7 @@ make-dir@^3.0.0: dependencies: semver "^6.0.0" -make-dir@^3.0.2: +make-dir@^3.0.2, make-dir@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== @@ -12921,11 +12817,6 @@ map-visit@^1.0.0: dependencies: object-visit "^1.0.0" -math-random@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c" - integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A== - md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" @@ -13506,11 +13397,6 @@ nanomatch@^1.2.9: snapdragon "^0.8.1" to-regex "^3.0.1" -napi-macros@^2.0.0, napi-macros@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" - integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== - native-url@^0.2.6: version "0.2.6" resolved "https://registry.yarnpkg.com/native-url/-/native-url-0.2.6.tgz#ca1258f5ace169c716ff44eccbddb674e10399ae" @@ -13590,7 +13476,7 @@ node-forge@^0.10.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== -node-gyp-build@^4.2.0, node-gyp-build@~4.2.1: +node-gyp-build@^4.2.0: version "4.2.3" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" integrity sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg== @@ -15885,11 +15771,6 @@ querystringify@^2.1.1: resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== -queue-microtask@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - quick-lru@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" @@ -15907,15 +15788,6 @@ ramda@~0.26.1: resolved "https://registry.yarnpkg.com/ramda/-/ramda-0.26.1.tgz#8d41351eb8111c55353617fc3bbffad8e4d35d06" integrity sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ== -randomatic@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" - integrity sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw== - dependencies: - is-number "^4.0.0" - kind-of "^6.0.0" - math-random "^1.0.1" - randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" @@ -16676,15 +16548,6 @@ ripemd160@^2.0.0, ripemd160@^2.0.1: hash-base "^3.0.0" inherits "^2.0.1" -rocksdb@5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/rocksdb/-/rocksdb-5.1.1.tgz#ab5a996d4a4f24f84f609a53e1e2c332ee193f9d" - integrity sha512-eHQKJCa1gsvzK31nYJIwZvscIx/MSPbC4ipted2zdSv99OoJSzYCyaEbhujQFo7m+WVR0XC3xTT/parjBn2Uzw== - dependencies: - abstract-leveldown "^7.0.0" - napi-macros "^2.0.0" - node-gyp-build "^4.3.0" - rollup-plugin-babel@^4.3.3: version "4.4.0" resolved "https://registry.yarnpkg.com/rollup-plugin-babel/-/rollup-plugin-babel-4.4.0.tgz#d15bd259466a9d1accbdb2fe2fff17c52d030acb" @@ -17173,7 +17036,7 @@ shell-quote@1.7.2, shell-quote@^1.6.1: resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== -shelljs@^0.8.4: +shelljs@^0.8.4, shelljs@^0.8.5: version "0.8.5" resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" integrity "sha1-3gVUCNg2G+1mxmnS8ABTjO2O4gw= sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==" @@ -19766,7 +19629,7 @@ xmlchars@^2.2.0: resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== -xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1: +xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== From dedbc5daa54bcd3de0deb2249208b1f6010b9eb1 Mon Sep 17 00:00:00 2001 From: !shan Date: Tue, 1 Aug 2023 10:48:30 +0200 Subject: [PATCH 100/170] Merge Feature/6930 implement poa module to development (#8789) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Bootstrap PoA files and folders (#8456) * Bootstrap PoA files and folders * Update method names on module.ts * Replace .keep to index.ts for folders * Add bootstrap test file * Rename internal method to PoAInternalMethod * Implement events for PoA (#8464) * Bootstrap PoA files and folders * Update method names on module.ts * Replace .keep to index.ts for folders * Add bootstrap test file * Create events and register of the said event * Update enum name * Add stores to PoA module (#8466) * 🌱 Initialize stores * ♻️ Update names and register stores * 💅 Rename snapshotStoreSchema->snapshotSchema * Define genesis PoA store schema & relevant types (#8501) * Define genesis PoA store schema & relevant types * `length' replaced with `minLength` & `maxLength` * Add missing $id * PoA Register Authority Command (#8496) * Implement Register Authority Commands * Renaming Commands * Remove extra files * Added Unit Test for RegisterAuthority * Put types back to PoA * Update code according to PR review and comments * Update chainID in test case to match with actual format * Update param names to be grammar-correct * Update AUTHORITY_REGISTRATION_FEE comments and test unit imports * Update $id for updateAuthority Schema * Change name of updateAuthoritySchema * Implement afterTransactionsExecute and _shuffleValidatorsList (#8523) * 🌱 Implement afterTransactionsExecute and _shuffleValidatorsList * ✅ Add unit tests for afterTransactionsExecute and shuffleValidatorList * ♻️ Use constants and simplify shuffleValidatorList * ✅ Adjust expectation setValidatorsParams to expect secondSnapshot threshold * 💅🏻 Rename snapshot keys * ♻️ Add missing logic and improve logic for afterTransactionsExecute * 💅🏻 Assert type on the arguments for readability * ♻️Naming issues and refine loop in test * Implement update authority command (#8527) * Implement Register Authority Commands * Renaming Commands * Remove extra files * Added Unit Test for RegisterAuthority * Put types back to PoA * Update code according to PR review and comments * Update chainID in test case to match with actual format * Update param names to be grammar-correct * Update AUTHORITY_REGISTRATION_FEE comments and test unit imports * Update $id for updateAuthority Schema * Implement Update Authority and Test Cases * Change name of updateAuthoritySchema * Update updateAuthoritySchema naming * Update update authority coding according to PR comments * Edited Buffer bits and test cases captions * Update cosmetic change on update_authority files * Add comments and better error message to UpdateAuthorityCommand * Minor changes to error message captions * Update error message caption * Implement PoA Update Generator Key (#8534) * Implement Register Authority Commands * Renaming Commands * Remove extra files * Added Unit Test for RegisterAuthority * Put types back to PoA * Update code according to PR review and comments * Update chainID in test case to match with actual format * Update param names to be grammar-correct * Update AUTHORITY_REGISTRATION_FEE comments and test unit imports * Update $id for updateAuthority Schema * Implement Update Authority and Test Cases * Change name of updateAuthoritySchema * Implement UpdateGeneratorKey and Test Cases * Update updateAuthoritySchema naming * Update update authority coding according to PR comments * Edited Buffer bits and test cases captions * Update cosmetic change on update_authority files * Update UpdateGeneratorKeyCommand to use senderAddress * Add comments and better error message to UpdateAuthorityCommand * Minor changes to error message captions * Update error message caption --------- Co-authored-by: !shan * Implement proof of authority genesis initialization (#8525) * Add PoA constants, schemas and types * Implement proof of authority genesis initialization * Update error messages in poa module initGenesis * Export types and fix snapshotstore type * Implement tests for poa genesis initialization * Remove unused validators from test * Update snapshot storage * Implement finalizeGenesisState * Update lint no-console place holder --------- Co-authored-by: Mitsuaki Uchimoto * Implement PoA module endpoints (#8581) * Implement PoA module endpoints * Add getRegistrationFee endpoint --------- Co-authored-by: Mitsuaki Uchimoto * Resolve PoA dependencies (#8584) * ♻️ Resolve dependencies and add configurable constant * ♻️ Use authorityRegistrationFee in endpoint and fix test * Add PoAMethod to src/index.ts * Use PoA dependency * Move setValidatorsParams to under registerValidatorKeys (#8672) Co-authored-by: !shan * Expose PoA Commands (#8699) Expose Commands on PoA * PoA Example (And update to application.ts) (#8663) * Add PoA example * Update PoA Genesis * Update README and remove uncessary genesis json * Update application.ts to accept PoS and PoA * Add Reward Module to PoA * Renaming poa to sidechain * Revert application.ts * Move setValidatorsParams to under registerValidatorKeys * Register module inside the PoA * Add RewardModule and revert module.ts (Changes in separate PR) * Add sign scripts for updateAuthority.ts * Update yarn.lock * Fix `registerValidatorKeys` (#8703) Co-authored-by: !shan * Fix `poa_getAllValidators` endpoint (#8700) * Update getAllValidators Endpoint * Update test cases for poa_getAllValidators * Sort response by name on getAllValidators function * Update PoA test cases * Update code per PR comments --------- Co-authored-by: Khalid Hameed * ♻️ Update interfaces used in PoA * ✅ Fix test and lint error --------- Co-authored-by: Franco NG Co-authored-by: sitetester Co-authored-by: Mitsuaki Uchimoto <36514357+mitsuaki-u@users.noreply.github.com> Co-authored-by: Mitsuaki Uchimoto --- examples/poa-sidechain/.eslintignore | 14 + examples/poa-sidechain/.eslintrc.js | 12 + examples/poa-sidechain/.gitignore | 45 + examples/poa-sidechain/.lintstagedrc.json | 5 + examples/poa-sidechain/.liskrc.json | 6 + examples/poa-sidechain/.prettierignore | 36 + examples/poa-sidechain/.prettierrc.json | 7 + examples/poa-sidechain/README.md | 37 + examples/poa-sidechain/bin/run | 6 + examples/poa-sidechain/bin/run.cmd | 3 + .../poa-sidechain/config/alphanet/config.json | 44 + .../config/alphanet/dev-validators.json | 1652 ++++++++++ .../config/alphanet/genesis_assets.json | 2799 +++++++++++++++++ .../config/alphanet/genesis_block.blob | Bin 0 -> 30475 bytes .../config/alphanet/passphrase.json | 3 + .../poa-sidechain/config/default/config.json | 46 + .../config/default/dev-validators.json | 1652 ++++++++++ .../config/default/genesis_assets.json | 1876 +++++++++++ .../config/default/genesis_block.blob | Bin 0 -> 27339 bytes .../config/default/passphrase.json | 3 + examples/poa-sidechain/ecosystem.config.js | 35 + examples/poa-sidechain/jest.config.js | 41 + examples/poa-sidechain/package.json | 158 + .../poa-sidechain/scripts/extern_types.ts | 42 + examples/poa-sidechain/scripts/schema.ts | 39 + .../scripts/updateAuthority.json | 28 + .../poa-sidechain/scripts/updateAuthority.ts | 105 + examples/poa-sidechain/scripts/updateKey.json | 3 + examples/poa-sidechain/src/app/app.ts | 10 + examples/poa-sidechain/src/app/index.ts | 1 + examples/poa-sidechain/src/app/modules.ts | 44 + .../poa-sidechain/src/app/modules/.gitkeep | 0 examples/poa-sidechain/src/app/plugins.ts | 4 + .../poa-sidechain/src/app/plugins/.gitkeep | 0 .../poa-sidechain/src/commands/block/get.ts | 1 + .../src/commands/blockchain/export.ts | 1 + .../src/commands/blockchain/hash.ts | 1 + .../src/commands/blockchain/import.ts | 1 + .../src/commands/blockchain/reset.ts | 1 + .../src/commands/config/create.ts | 1 + .../poa-sidechain/src/commands/config/show.ts | 1 + .../poa-sidechain/src/commands/console.ts | 1 + .../src/commands/endpoint/invoke.ts | 1 + .../src/commands/endpoint/list.ts | 1 + .../src/commands/generator/disable.ts | 1 + .../src/commands/generator/enable.ts | 1 + .../src/commands/generator/export.ts | 1 + .../src/commands/generator/import.ts | 1 + .../src/commands/generator/status.ts | 1 + .../src/commands/genesis-block/create.ts | 15 + .../poa-sidechain/src/commands/hash-onion.ts | 1 + .../poa-sidechain/src/commands/keys/create.ts | 1 + .../src/commands/keys/encrypt.ts | 1 + .../poa-sidechain/src/commands/keys/export.ts | 1 + .../poa-sidechain/src/commands/keys/import.ts | 1 + .../src/commands/passphrase/create.ts | 1 + .../src/commands/passphrase/decrypt.ts | 1 + .../src/commands/passphrase/encrypt.ts | 1 + examples/poa-sidechain/src/commands/start.ts | 136 + .../src/commands/system/metadata.ts | 1 + .../src/commands/system/node-info.ts | 1 + .../src/commands/transaction/create.ts | 22 + .../src/commands/transaction/get.ts | 1 + .../src/commands/transaction/send.ts | 1 + .../src/commands/transaction/sign.ts | 20 + examples/poa-sidechain/test/.eslintrc.js | 7 + examples/poa-sidechain/test/_setup.js | 4 + .../poa-sidechain/test/integration/.gitkeep | 0 examples/poa-sidechain/test/network/.gitkeep | 0 examples/poa-sidechain/test/tsconfig.json | 7 + examples/poa-sidechain/test/types.ts | 16 + .../poa-sidechain/test/unit/modules/.gitkeep | 0 examples/poa-sidechain/test/utils/config.ts | 10 + examples/poa-sidechain/tsconfig.json | 26 + framework/src/index.ts | 1 + .../poa/commands/register_authority.ts | 104 + .../modules/poa/commands/update_authority.ts | 176 ++ .../poa/commands/update_generator_key.ts | 74 + framework/src/modules/poa/constants.ts | 45 + framework/src/modules/poa/endpoint.ts | 111 + .../modules/poa/events/authority_update.ts | 40 + framework/src/modules/poa/events/index.ts | 13 + framework/src/modules/poa/index.ts | 16 + framework/src/modules/poa/internal_method.ts | 17 + framework/src/modules/poa/method.ts | 17 + framework/src/modules/poa/module.ts | 338 ++ framework/src/modules/poa/schemas.ts | 287 ++ .../modules/poa/stores/chain_properties.ts | 39 + framework/src/modules/poa/stores/index.ts | 18 + framework/src/modules/poa/stores/name.ts | 37 + framework/src/modules/poa/stores/snapshot.ts | 61 + framework/src/modules/poa/stores/validator.ts | 37 + framework/src/modules/poa/types.ts | 132 + framework/src/modules/poa/utils.ts | 43 + .../poa/commands/register_authority.spec.ts | 210 ++ .../poa/commands/update_authority.spec.ts | 420 +++ .../poa/commands/update_generator_key.spec.ts | 130 + .../test/unit/modules/poa/endpoint.spec.ts | 198 ++ .../modules/poa/genesis_block_test_data.ts | 197 ++ .../test/unit/modules/poa/module.spec.ts | 506 +++ framework/test/unit/modules/poa/utils.spec.ts | 50 + 101 files changed, 12363 insertions(+) create mode 100644 examples/poa-sidechain/.eslintignore create mode 100644 examples/poa-sidechain/.eslintrc.js create mode 100644 examples/poa-sidechain/.gitignore create mode 100644 examples/poa-sidechain/.lintstagedrc.json create mode 100644 examples/poa-sidechain/.liskrc.json create mode 100644 examples/poa-sidechain/.prettierignore create mode 100644 examples/poa-sidechain/.prettierrc.json create mode 100644 examples/poa-sidechain/README.md create mode 100755 examples/poa-sidechain/bin/run create mode 100644 examples/poa-sidechain/bin/run.cmd create mode 100644 examples/poa-sidechain/config/alphanet/config.json create mode 100644 examples/poa-sidechain/config/alphanet/dev-validators.json create mode 100644 examples/poa-sidechain/config/alphanet/genesis_assets.json create mode 100644 examples/poa-sidechain/config/alphanet/genesis_block.blob create mode 100644 examples/poa-sidechain/config/alphanet/passphrase.json create mode 100644 examples/poa-sidechain/config/default/config.json create mode 100644 examples/poa-sidechain/config/default/dev-validators.json create mode 100644 examples/poa-sidechain/config/default/genesis_assets.json create mode 100644 examples/poa-sidechain/config/default/genesis_block.blob create mode 100644 examples/poa-sidechain/config/default/passphrase.json create mode 100644 examples/poa-sidechain/ecosystem.config.js create mode 100644 examples/poa-sidechain/jest.config.js create mode 100755 examples/poa-sidechain/package.json create mode 100644 examples/poa-sidechain/scripts/extern_types.ts create mode 100644 examples/poa-sidechain/scripts/schema.ts create mode 100644 examples/poa-sidechain/scripts/updateAuthority.json create mode 100644 examples/poa-sidechain/scripts/updateAuthority.ts create mode 100644 examples/poa-sidechain/scripts/updateKey.json create mode 100644 examples/poa-sidechain/src/app/app.ts create mode 100644 examples/poa-sidechain/src/app/index.ts create mode 100644 examples/poa-sidechain/src/app/modules.ts create mode 100644 examples/poa-sidechain/src/app/modules/.gitkeep create mode 100644 examples/poa-sidechain/src/app/plugins.ts create mode 100644 examples/poa-sidechain/src/app/plugins/.gitkeep create mode 100644 examples/poa-sidechain/src/commands/block/get.ts create mode 100644 examples/poa-sidechain/src/commands/blockchain/export.ts create mode 100644 examples/poa-sidechain/src/commands/blockchain/hash.ts create mode 100644 examples/poa-sidechain/src/commands/blockchain/import.ts create mode 100644 examples/poa-sidechain/src/commands/blockchain/reset.ts create mode 100644 examples/poa-sidechain/src/commands/config/create.ts create mode 100644 examples/poa-sidechain/src/commands/config/show.ts create mode 100644 examples/poa-sidechain/src/commands/console.ts create mode 100644 examples/poa-sidechain/src/commands/endpoint/invoke.ts create mode 100755 examples/poa-sidechain/src/commands/endpoint/list.ts create mode 100644 examples/poa-sidechain/src/commands/generator/disable.ts create mode 100644 examples/poa-sidechain/src/commands/generator/enable.ts create mode 100644 examples/poa-sidechain/src/commands/generator/export.ts create mode 100644 examples/poa-sidechain/src/commands/generator/import.ts create mode 100644 examples/poa-sidechain/src/commands/generator/status.ts create mode 100644 examples/poa-sidechain/src/commands/genesis-block/create.ts create mode 100644 examples/poa-sidechain/src/commands/hash-onion.ts create mode 100644 examples/poa-sidechain/src/commands/keys/create.ts create mode 100644 examples/poa-sidechain/src/commands/keys/encrypt.ts create mode 100644 examples/poa-sidechain/src/commands/keys/export.ts create mode 100644 examples/poa-sidechain/src/commands/keys/import.ts create mode 100644 examples/poa-sidechain/src/commands/passphrase/create.ts create mode 100644 examples/poa-sidechain/src/commands/passphrase/decrypt.ts create mode 100644 examples/poa-sidechain/src/commands/passphrase/encrypt.ts create mode 100644 examples/poa-sidechain/src/commands/start.ts create mode 100644 examples/poa-sidechain/src/commands/system/metadata.ts create mode 100644 examples/poa-sidechain/src/commands/system/node-info.ts create mode 100644 examples/poa-sidechain/src/commands/transaction/create.ts create mode 100644 examples/poa-sidechain/src/commands/transaction/get.ts create mode 100644 examples/poa-sidechain/src/commands/transaction/send.ts create mode 100644 examples/poa-sidechain/src/commands/transaction/sign.ts create mode 100644 examples/poa-sidechain/test/.eslintrc.js create mode 100644 examples/poa-sidechain/test/_setup.js create mode 100644 examples/poa-sidechain/test/integration/.gitkeep create mode 100644 examples/poa-sidechain/test/network/.gitkeep create mode 100644 examples/poa-sidechain/test/tsconfig.json create mode 100644 examples/poa-sidechain/test/types.ts create mode 100644 examples/poa-sidechain/test/unit/modules/.gitkeep create mode 100644 examples/poa-sidechain/test/utils/config.ts create mode 100644 examples/poa-sidechain/tsconfig.json create mode 100644 framework/src/modules/poa/commands/register_authority.ts create mode 100644 framework/src/modules/poa/commands/update_authority.ts create mode 100644 framework/src/modules/poa/commands/update_generator_key.ts create mode 100644 framework/src/modules/poa/constants.ts create mode 100644 framework/src/modules/poa/endpoint.ts create mode 100644 framework/src/modules/poa/events/authority_update.ts create mode 100644 framework/src/modules/poa/events/index.ts create mode 100644 framework/src/modules/poa/index.ts create mode 100644 framework/src/modules/poa/internal_method.ts create mode 100644 framework/src/modules/poa/method.ts create mode 100644 framework/src/modules/poa/module.ts create mode 100644 framework/src/modules/poa/schemas.ts create mode 100644 framework/src/modules/poa/stores/chain_properties.ts create mode 100644 framework/src/modules/poa/stores/index.ts create mode 100644 framework/src/modules/poa/stores/name.ts create mode 100644 framework/src/modules/poa/stores/snapshot.ts create mode 100644 framework/src/modules/poa/stores/validator.ts create mode 100644 framework/src/modules/poa/types.ts create mode 100644 framework/src/modules/poa/utils.ts create mode 100644 framework/test/unit/modules/poa/commands/register_authority.spec.ts create mode 100644 framework/test/unit/modules/poa/commands/update_authority.spec.ts create mode 100644 framework/test/unit/modules/poa/commands/update_generator_key.spec.ts create mode 100644 framework/test/unit/modules/poa/endpoint.spec.ts create mode 100644 framework/test/unit/modules/poa/genesis_block_test_data.ts create mode 100644 framework/test/unit/modules/poa/module.spec.ts create mode 100644 framework/test/unit/modules/poa/utils.spec.ts diff --git a/examples/poa-sidechain/.eslintignore b/examples/poa-sidechain/.eslintignore new file mode 100644 index 00000000000..00a15e70c20 --- /dev/null +++ b/examples/poa-sidechain/.eslintignore @@ -0,0 +1,14 @@ +docs/ +examples/ +**/*.d.ts +jest.config.js +.eslintrc.js +coverage +benchmark +dist +tmp +build +scripts +config +test/_setup.js +ecosystem.config.js diff --git a/examples/poa-sidechain/.eslintrc.js b/examples/poa-sidechain/.eslintrc.js new file mode 100644 index 00000000000..12f565e1e3c --- /dev/null +++ b/examples/poa-sidechain/.eslintrc.js @@ -0,0 +1,12 @@ +module.exports = { + root: true, + parserOptions: { + project: './tsconfig.json', + tsconfigRootDir: __dirname, + }, + extends: ['lisk-base/ts'], + rules: { + '@typescript-eslint/member-ordering': 'off', + '@typescript-eslint/no-unsafe-argument': 'off', + }, +}; diff --git a/examples/poa-sidechain/.gitignore b/examples/poa-sidechain/.gitignore new file mode 100644 index 00000000000..c36928433e2 --- /dev/null +++ b/examples/poa-sidechain/.gitignore @@ -0,0 +1,45 @@ +# General +~ +.DS_Store +.project +__MACOSX/ +*.swp +*.swo +ssl/ +tmp/ + +# Build revision file generated while building a release +REVISION +npm-shrinkwrap.json + +# Dependency directories +tsconfig.tsbuildinfo +node_modules/ + +# Docs +docs/jsdoc/ + +# Logs +logs/* +logs.log +npm-debug.log +tmux-client-*.log +stacktrace* + +# IDE directories +.vscode/ +.idea/ +Session.vim + +# Config files +sftp-config.json +.secrets + +# Coverage directory used by tools like istanbul +.coverage/ + +# Local config file useful for development +config.local.json + +# Build Directory +dist diff --git a/examples/poa-sidechain/.lintstagedrc.json b/examples/poa-sidechain/.lintstagedrc.json new file mode 100644 index 00000000000..50d39da6d11 --- /dev/null +++ b/examples/poa-sidechain/.lintstagedrc.json @@ -0,0 +1,5 @@ +{ + "*.js": ["prettier --write", "eslint"], + "*.ts": ["prettier --write", "eslint"], + "*.{json,md}": ["prettier --write"] +} diff --git a/examples/poa-sidechain/.liskrc.json b/examples/poa-sidechain/.liskrc.json new file mode 100644 index 00000000000..0011a20d0c3 --- /dev/null +++ b/examples/poa-sidechain/.liskrc.json @@ -0,0 +1,6 @@ +{ + "commander": { + "version": "5.1.9" + }, + "template": "lisk-ts" +} diff --git a/examples/poa-sidechain/.prettierignore b/examples/poa-sidechain/.prettierignore new file mode 100644 index 00000000000..05fdf62598a --- /dev/null +++ b/examples/poa-sidechain/.prettierignore @@ -0,0 +1,36 @@ +# Files +Jenkinsfile* +Makefile +Dockerfile +LICENSE +.DS_Store +data/ +.idea +logs/ + +.gitkeep + +# rc files +.*rc + +## ignore files +.*ignore + +# Ignore extensions +*.png +*.sql + +## jest snapshot +*.snap +*.tsbuildinfo + +# project specific paths +dist/ +bin +tmp/ + +*.pid +*.gz +*.blob + +docker/* diff --git a/examples/poa-sidechain/.prettierrc.json b/examples/poa-sidechain/.prettierrc.json new file mode 100644 index 00000000000..7b23ef55098 --- /dev/null +++ b/examples/poa-sidechain/.prettierrc.json @@ -0,0 +1,7 @@ +{ + "printWidth": 100, + "singleQuote": true, + "trailingComma": "all", + "useTabs": true, + "arrowParens": "avoid" +} diff --git a/examples/poa-sidechain/README.md b/examples/poa-sidechain/README.md new file mode 100644 index 00000000000..f50797d1d3a --- /dev/null +++ b/examples/poa-sidechain/README.md @@ -0,0 +1,37 @@ +# PoA Example + +This project was bootstrapped with [Lisk SDK](https://github.com/LiskHQ/lisk-sdk) + +### Start a node + +``` +./bin/run start +``` + +### Add a new module + +``` +lisk generate:module ModuleName ModuleID +// Example +lisk generate:module token 1 +``` + +### Add a new asset + +``` +lisk generate:asset ModuleName AssetName AssetID +// Example +lisk generate:asset token transfer 1 +``` + +### Add a new plugin + +``` +lisk generate:plugin PluginName +// Example +lisk generate:plugin httpAPI +``` + +## Learn More + +You can learn more in the [documentation](https://lisk.com/documentation/lisk-sdk/). diff --git a/examples/poa-sidechain/bin/run b/examples/poa-sidechain/bin/run new file mode 100755 index 00000000000..283c01038f5 --- /dev/null +++ b/examples/poa-sidechain/bin/run @@ -0,0 +1,6 @@ +#!/usr/bin/env node + +require('@oclif/core') + .run() + .then(require('@oclif/core/flush')) + .catch(require('@oclif/core/handle')); diff --git a/examples/poa-sidechain/bin/run.cmd b/examples/poa-sidechain/bin/run.cmd new file mode 100644 index 00000000000..cf40b543c96 --- /dev/null +++ b/examples/poa-sidechain/bin/run.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\run" %* \ No newline at end of file diff --git a/examples/poa-sidechain/config/alphanet/config.json b/examples/poa-sidechain/config/alphanet/config.json new file mode 100644 index 00000000000..62319a04e38 --- /dev/null +++ b/examples/poa-sidechain/config/alphanet/config.json @@ -0,0 +1,44 @@ +{ + "system": { + "dataPath": "~/.lisk/pos-mainchain", + "enableMetrics": true + }, + "rpc": { + "modes": ["ipc"] + }, + "genesis": { + "block": { + "fromFile": "./config/genesis_block.blob" + }, + "blockTime": 10, + "bftBatchSize": 103, + "chainID": "04000000", + "maxTransactionsSize": 15360 + }, + "generator": { + "keys": {} + }, + "network": { + "version": "1.0", + "seedPeers": [ + { + "ip": "127.0.0.1", + "port": 7667 + } + ], + "port": 7667 + }, + "transactionPool": { + "maxTransactions": 4096, + "maxTransactionsPerAccount": 64, + "transactionExpiryTime": 10800000, + "minEntranceFeePriority": "0", + "minReplacementFeeDifference": "10" + }, + "modules": {}, + "plugins": { + "reportMisbehavior": { + "encryptedPassphrase": "iterations=10&cipherText=5dea8b928a3ea2481ebc02499ae77679b7552189181ff189d4aa1f8d89e8d07bf31f7ebd1c66b620769f878629e1b90499506a6f752bf3323799e3a54600f8db02f504c44d&iv=37e0b1753b76a90ed0b8c319&salt=963c5b91d3f7ba02a9d001eed49b5836&tag=c3e30e8f3440ba3f5b6d9fbaccc8918d&version=1" + } + } +} diff --git a/examples/poa-sidechain/config/alphanet/dev-validators.json b/examples/poa-sidechain/config/alphanet/dev-validators.json new file mode 100644 index 00000000000..fe51b5a4a71 --- /dev/null +++ b/examples/poa-sidechain/config/alphanet/dev-validators.json @@ -0,0 +1,1652 @@ +{ + "keys": [ + { + "address": "lske5sqed53fdcs4m9et28f2k7u9fk6hno9bauday", + "keyPath": "m/44'/134'/0'", + "publicKey": "a3f96c50d0446220ef2f98240898515cbba8155730679ca35326d98dcfb680f0", + "privateKey": "d0b159fe5a7cc3d5f4b39a97621b514bc55b0a0f1aca8adeed2dd1899d93f103a3f96c50d0446220ef2f98240898515cbba8155730679ca35326d98dcfb680f0", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/0'", + "generatorKey": "b9e54121e5346cc04cc84bcf286d5e40d586ba5d39571daf57bd31bac3861a4a", + "generatorPrivateKey": "b3c4de7f7932275b7a465045e918337ffd7b7b229cef8eba28f706de8759da95b9e54121e5346cc04cc84bcf286d5e40d586ba5d39571daf57bd31bac3861a4a", + "blsKeyPath": "m/12381/134/0/0", + "blsKey": "92f020ce5e37befb86493a82686b0eedddb264350b0873cf1eeaa1fefe39d938f05f272452c1ef5e6ceb4d9b23687e31", + "blsProofOfPossession": "b92b11d66348e197c62d14af1453620d550c21d59ce572d95a03f0eaa0d0d195efbb2f2fd1577dc1a04ecdb453065d9d168ce7648bc5328e5ea47bb07d3ce6fd75f35ee51064a9903da8b90f7dc8ab4f2549b834cb5911b883097133f66b9ab9", + "blsPrivateKey": "463dd3413051366ee658c2524dd0bec85f8459bf6d70439685746406604f950d" + }, + "encrypted": {} + }, + { + "address": "lsk8dsngwh4n6hmf4unqb8gfqgkayabaqdvtq85ja", + "keyPath": "m/44'/134'/1'", + "publicKey": "0904c986211330582ef5e41ed9a2e7d6730bb7bdc59459a0caaaba55be4ec128", + "privateKey": "2475a8233503caade9542f2dd6c8c725f10bc03e3f809210b768f0a2320f06d50904c986211330582ef5e41ed9a2e7d6730bb7bdc59459a0caaaba55be4ec128", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/1'", + "generatorKey": "dd337fcb819073335382415bfdbf5e5b7e73126aafb0ac46479137328e72d438", + "generatorPrivateKey": "eaddefbdcb41468e73d7ae8e6c0b046de56f8829cbd3ea10c2abf0c74faa1598dd337fcb819073335382415bfdbf5e5b7e73126aafb0ac46479137328e72d438", + "blsKeyPath": "m/12381/134/0/1", + "blsKey": "aa5174668a4743d838fa3742092c744c3edd4ee64c535ce2a69eeae1c5f23029acd74853410867d873076639f4ce1cda", + "blsProofOfPossession": "ad79b935bd503402b83404125ef11fab81f4c6bef0688798473e430f892704b653209aaf81f16efca9965fad0850a3971662f33c25994568e1434f4f46901caa1c002cab18dff7337836617c372673714d63b01ec4db098f419c027015aa4c05", + "blsPrivateKey": "4856d774c133fc205f1950cb030eddc2286ba6662e8f5061d153a7b36d16781a" + }, + "encrypted": {} + }, + { + "address": "lskjtbchucvrd2s8qjo83e7trpem5edwa6dbjfczq", + "keyPath": "m/44'/134'/2'", + "publicKey": "b8d2422aa7ebf1f85031f0bac2403be1fb24e0196d3bbed33987d4769eb37411", + "privateKey": "03e7852c6f1c6fe5cd0c5f7e3a36e499a1e0207e867f74f5b5bc42bfcc888bc8b8d2422aa7ebf1f85031f0bac2403be1fb24e0196d3bbed33987d4769eb37411", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/2'", + "generatorKey": "3e63c0a5d4de4df114823934ceaa6c17a48e5a6650788cf1f63c826c984c0957", + "generatorPrivateKey": "c96d896fd601e71a61452465692e6f77c9f654af0c596d4d5a2285333ccc846e3e63c0a5d4de4df114823934ceaa6c17a48e5a6650788cf1f63c826c984c0957", + "blsKeyPath": "m/12381/134/0/2", + "blsKey": "8c141e5d769c22ec90122f42bef1d1e7af2d94c1da6844bd313fca2ccf0543eab5f8c6752dd47969dc34613801dfb293", + "blsProofOfPossession": "9681aa250d714befe61d71f239a9b4c09ee102addb3a5e2c884074c7ba763b5c21e53aa7b12518d32c9b874ba1910e7a0bf0bd23ae99f57f6f464403b1151b3521a7a369ff94118a436e6aa767bd462d9ca491dd3e253862c21ff078878c354e", + "blsPrivateKey": "05739256f97460ba695cb52abcc9f8d9d46d5ed052ccbb16c780c6fd44ac153b" + }, + "encrypted": {} + }, + { + "address": "lskau7uqo6afteazgyknmtotxdjgwr3p9gfr4yzke", + "keyPath": "m/44'/134'/3'", + "publicKey": "557f1b9647fd2aefa357fed8bead72d1b02e5151b57d3c32d4d3f808c0705026", + "privateKey": "985bc97b4b2aa91d590dde455c19c70818d97c56c7cfff790a1e0b71e3d15962557f1b9647fd2aefa357fed8bead72d1b02e5151b57d3c32d4d3f808c0705026", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/3'", + "generatorKey": "00245e599fdad13ed0b064c069c71c73caf868a4635c0143963a529807f8728c", + "generatorPrivateKey": "a4426b9facb99efcf6ad7702f02e3e57ea2dd6d5e4f5bbee25729595e012df8800245e599fdad13ed0b064c069c71c73caf868a4635c0143963a529807f8728c", + "blsKeyPath": "m/12381/134/0/3", + "blsKey": "aaec4e157b19c0a3f2965cc636f5f82cef9b3918c071e2c6e50f57ecb44587d58139595e8f4c1fc7f76b2f7c09b1b6d1", + "blsProofOfPossession": "866a031b5a2a6b0525053b2d870487ac2fd39cf2cf18ecf462bc19afc5ef52f129cf88624fac73057c5375004492dbfb0b8cacb906b3a7daa4d7edf99f10ab15a90b3b328e8ad6701e838a88351fecdfb5b32eebeb80fdeb8c0345d1b5257d7b", + "blsPrivateKey": "43b132328eec8064dcbd62f038ad73e372c12d94fdedad5a35a95cdd0ad858e5" + }, + "encrypted": {} + }, + { + "address": "lsksdfqvkbqpc8eczj2s3dzkxnap5pguaxdw2227r", + "keyPath": "m/44'/134'/4'", + "publicKey": "e5e4834c2c7e949ac6e97512b5ff5d44822376b1e54cae8c326de0873c0b72ad", + "privateKey": "6f2b2f6ef42f417af916fb2a29ae8c8d0c572219d7420927c2dcd336e21c9115e5e4834c2c7e949ac6e97512b5ff5d44822376b1e54cae8c326de0873c0b72ad", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/4'", + "generatorKey": "21f9d60315c1baeb513b5f7324a1211723d36948b64806541b8855988f86111f", + "generatorPrivateKey": "c467e3bbc6af24568c8a8a8ee29055c2704aab14549dd99f1f1d1cfccdad384421f9d60315c1baeb513b5f7324a1211723d36948b64806541b8855988f86111f", + "blsKeyPath": "m/12381/134/0/4", + "blsKey": "84912d2f185c2058be9ed201d970f435a408c8bb3a36c430f007b69632efb2f663b51df383be6eedb80c8768a70822bb", + "blsProofOfPossession": "aafdb397226d3a4a4cc3b7ac906ae7e3601310bd5d0e20a0682364312937e8e3e0c3b5846a53ee536cac2a2b3f556bff06c65ef24a32495dee9d38ee5b2012113d8f032d8dd0f3f5d9af50dbd307d0e7f66aaa165620d5292da91306b0a39aad", + "blsPrivateKey": "16f43c470d46b9a10a461328c9ee629b045cfd469dc3cb9c1ac9ba85a5af5b8a" + }, + "encrypted": {} + }, + { + "address": "lskvq67zzev53sa6ozt39ft3dsmwxxztb7h29275k", + "keyPath": "m/44'/134'/5'", + "publicKey": "c1e3177d1433ece7f8fcb607edc37df4fd37284f46081f846ca7852735b4145b", + "privateKey": "4d108ede8bce4330260360341229c608fcdfdf07b262cfdbdc3cb49a560ba71cc1e3177d1433ece7f8fcb607edc37df4fd37284f46081f846ca7852735b4145b", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/5'", + "generatorKey": "8b65dce85de8ed215a91477627b365ec017a01cd5a715337f772ba42715cc794", + "generatorPrivateKey": "fbdd344d5e73d45c50298c109d34f0da4eee8ca8068f893110c6a4a86bba05778b65dce85de8ed215a91477627b365ec017a01cd5a715337f772ba42715cc794", + "blsKeyPath": "m/12381/134/0/5", + "blsKey": "9006fc2c9d159b6890047e9b26c700d8c504e17b6fe476a2a1ac1477357c68eee332be587da425e37e22332348ed8007", + "blsProofOfPossession": "945ac6db93666aa21934d84c6ad897fe1acf1d208a17ec46b0ddf26cf6d9cdccef7db9eac682195ec47cb8e7a069bbe10706a4e1cce2012aadd311dafb270c9c810d80bc82c2b6c34ce236efac552fa0904b96533772f98e202f4e6f47c97f09", + "blsPrivateKey": "4adf92c505124ff3ff4f3b36fff3a2ce3d60953dbcb34b4c43ea93b82e17f970" + }, + "encrypted": {} + }, + { + "address": "lskfjd3ymhyzedgneudo2bujnm25u7stu4qpa3jnd", + "keyPath": "m/44'/134'/6'", + "publicKey": "dc5adaa7cc6e0598a4a6347ce9cb3f213835d863c377410c3eafa8b718807aa3", + "privateKey": "2926701eccc5232d51ed98a2bc9cebdd687d8a3760d3c5adb8cae7a434dbab2ddc5adaa7cc6e0598a4a6347ce9cb3f213835d863c377410c3eafa8b718807aa3", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/6'", + "generatorKey": "326cb34aa214c4952f646d93af8cfbe58ec74db76db54484b5a23918cba8743b", + "generatorPrivateKey": "b3bf887c6a4a646e444c877d2299b2aa1328251d68af051328e88eb9872e8de4326cb34aa214c4952f646d93af8cfbe58ec74db76db54484b5a23918cba8743b", + "blsKeyPath": "m/12381/134/0/6", + "blsKey": "96a70c8b1343511359f7205313eac8c73b2838e25eda58cf8c13fa1d2689aee3df70522bcbd36e0bde958409b80cc8ee", + "blsProofOfPossession": "89564da089fcc38e4973cf34b5a8abbe8e822bb59f05633156d9dc0b10f2aad8d4621ea66023ec2a10d6d581927af3bc0746cd8293ea22c8db0068c127d38c4c2dcfe777ffc03e773083fd0036894cce7c2596301381941523f4f2ae97bb79e9", + "blsPrivateKey": "01fcace0a39a0f12057671c9ca88f41811ae7cc6c928c4a79cb5e7e3883c17f3" + }, + "encrypted": {} + }, + { + "address": "lskqw45qy3ph9rwgow86rudqa7e3vmb93db5e4yad", + "keyPath": "m/44'/134'/7'", + "publicKey": "e5c559e55dbb69328dc765d732e3df31b60d243d4c1a240a3d99af413e8958c6", + "privateKey": "26e75ae42bb589e181b38ce31911d3a63e2b0d3ae1be0b29d61971c986906687e5c559e55dbb69328dc765d732e3df31b60d243d4c1a240a3d99af413e8958c6", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/7'", + "generatorKey": "1314b7d167d5829fb535d15dfb5216e10ad2e5b6a349ae347aec77317b6aa73f", + "generatorPrivateKey": "de317ea0e11dde876b6ef8f37298a0608eb78e987380da4777137b4661f023921314b7d167d5829fb535d15dfb5216e10ad2e5b6a349ae347aec77317b6aa73f", + "blsKeyPath": "m/12381/134/0/7", + "blsKey": "b40065dfa219e40c65c07d516158d722ec695abc91411ce57550c77fa2119e52b56cb74db7a1d805b631752e8f6b80be", + "blsProofOfPossession": "b7085c15521303140512fdea858231a040534a4b0c1dbbdb002c8df233634270d33e51c3699cf4956d165c0183f29a32070d8f4e00433ebcdfcae337a5f09f2c971ba97d5b35413ce032d2ec4084ed79efc917bdb75ded139fc9433df884a18e", + "blsPrivateKey": "3f78ff58a0462d09c20249fdd8b16dafc09bf5d41669a7355aaea5e9705d1c46" + }, + "encrypted": {} + }, + { + "address": "lsk8vjsq5s8jan9c8y9tmgawd6cttuszbf6jmhvj5", + "keyPath": "m/44'/134'/8'", + "publicKey": "665b67a9bfa854ea7e58a1dbde618410d9c63e50204ac3a12a4cfdc44a903d95", + "privateKey": "e98c4711a330632bd012bb0d2f73e2b3d72635e3c13c54edd9b9de6dcd6fc73f665b67a9bfa854ea7e58a1dbde618410d9c63e50204ac3a12a4cfdc44a903d95", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/8'", + "generatorKey": "00110f493d122a73628a518842e99591b91def4ef9fbd58e1b6458950da5a776", + "generatorPrivateKey": "eace487ec72fbfc569c3680713146fc354678533fb06de639b6d8a0e658ac5e200110f493d122a73628a518842e99591b91def4ef9fbd58e1b6458950da5a776", + "blsKeyPath": "m/12381/134/0/8", + "blsKey": "837e0759968b1ed95789252d1e731d7b127c9a53a74e86f3ca3d65d71cf666f2208baa782a42c45d4132630100a59462", + "blsProofOfPossession": "b97607b1478f17877b4c8042530763894dd7b79f8bbf5ca0883d08b94dc8a11cc2c2a73123160e3b01da692fb071f5fe0d808426604b5ad8aadebda9b02710698158254f6f1d822c2c9bae5c081101806e9220d79c547391e6fc6d8f26094dc7", + "blsPrivateKey": "2cf343ea5097fe55d1d1f054a76dc2766c88acadb8b2156318fc5b56f76e5200" + }, + "encrypted": {} + }, + { + "address": "lskchcsq6pgnq6nwttwe9hyj67rb9936cf2ccjk3b", + "keyPath": "m/44'/134'/9'", + "publicKey": "2c40d2354c023409c24d16dce668ae26930a675b274ae8409a0c67a2f16672e0", + "privateKey": "b1863cba481c0b16ca83b0257d71964d1ade9cb2b6895f78c4686c793c7cf5842c40d2354c023409c24d16dce668ae26930a675b274ae8409a0c67a2f16672e0", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/9'", + "generatorKey": "be4e49ea7e57ede752ce33cb224f50277552f9085a551005255ee12a9b4ca68d", + "generatorPrivateKey": "8210871092519d73ea2e2645f57333d01bfdb7e553ef188b4d57e985e461be79be4e49ea7e57ede752ce33cb224f50277552f9085a551005255ee12a9b4ca68d", + "blsKeyPath": "m/12381/134/0/9", + "blsKey": "8fd004c33814c3b452d50b2bf6855eeb03e41552c6edd50b76dee57007a34cf987da1e06425cf498391e6831d1bf6851", + "blsProofOfPossession": "a0e34bdc7dc39e09f686d6712fd0e71c61c8d06dfedbdbb9ed77c821c22d6c87f87e39e48db79aa50c19904933abb11a0b07659317079ae8f2db6e27b9139ce0830faa8dad2dcae2079f64781b0516be825b2d84689080bb8219a5ec72ba80f7", + "blsPrivateKey": "3d5f026eb2fb39cecc763f052695f75cdf52d3382148abf49a03b6f84ef9f075" + }, + "encrypted": {} + }, + { + "address": "lskc22mfaqzo722aenb6yw7awx8f22nrn54skrj8b", + "keyPath": "m/44'/134'/10'", + "publicKey": "88da43d0f056dd666cf2a8ae37db58e28bba3ae0b954930674ebe5dc03311e99", + "privateKey": "f1c8bf737f8e537dcdf202e8de94e138945d9bf9bd70ed700fcd0247bda8104b88da43d0f056dd666cf2a8ae37db58e28bba3ae0b954930674ebe5dc03311e99", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/10'", + "generatorKey": "671c72129793eb5801273ff580ce3d4c78d89fc8b4fb95b090a9af0a9a647a41", + "generatorPrivateKey": "ef19cef8e2f025de4d923fb976f5dc5ab4d5fd0e1c935f3d44e8722e6a036ffd671c72129793eb5801273ff580ce3d4c78d89fc8b4fb95b090a9af0a9a647a41", + "blsKeyPath": "m/12381/134/0/10", + "blsKey": "a38d728c1c1023651b031835818d17d0665d1fbabd8e62da26ca53f290620c23fe928244bcbcbb67412344013017cb53", + "blsProofOfPossession": "b5d455bb358eff87779b296f23a2fc9abc9d8f3ecb8ed0d9af3e23066e653a58b189c11b4a3980eaeaaa85ffcc240795187f6e8a0e8e8a2837bc20d485e1d3159c2d581614d72f94bbd049e5a9f45c0302851c87aa3c3853d8962ed75d140234", + "blsPrivateKey": "2e3c200c9927504eaab6dcb3777d394aa0d5e7c8a85e09f102bfe84b311f6eb6" + }, + "encrypted": {} + }, + { + "address": "lskezdab747v9z78hgmcxsokeetcmbdrpj3gzrdcw", + "keyPath": "m/44'/134'/11'", + "publicKey": "46ddcc48cc566faedd278169c1327bef337e32044320291f452aa60327c2cd2f", + "privateKey": "2c4f8a875c3850d8aacdb2643ce32ac3a20d61e24c69c7cba1e6315592992e1846ddcc48cc566faedd278169c1327bef337e32044320291f452aa60327c2cd2f", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/11'", + "generatorKey": "44de3820f1a1a7351953d2d000f29cb7bffecf30582a8b3da2cb80c83b9eceef", + "generatorPrivateKey": "e2fec1ce757b5865797955e9fbe074224b67ce9fe1e0f5df6ed633745da3540a44de3820f1a1a7351953d2d000f29cb7bffecf30582a8b3da2cb80c83b9eceef", + "blsKeyPath": "m/12381/134/0/11", + "blsKey": "a03ba0f1d6bf9378681b9d96dbe8176cc0ab2a424154cbbe325fc279d02cf58bc15de966cb1e272312ba2b6db31a7f05", + "blsProofOfPossession": "a20a8edd978fe911da6c933d486cb9af770179ef5ee21ad869c4c35e63103cfc2ac17350ee2d35b4bbd487193cdb33ab0116fdf2f078f289fae2922f6a7e372ef8ea543d52ae74ae395dccf2dec2c40e6596c807a14c9fce45b320321f68c612", + "blsPrivateKey": "6aa2aafb57bf3d0038bd7b0a9fd88632a6be33e51a8eeee87432d84b72dbbab0" + }, + "encrypted": {} + }, + { + "address": "lsknddzdw4xxej5znssc7aapej67s7g476osk7prc", + "keyPath": "m/44'/134'/12'", + "publicKey": "86ae660dcf148c829a17364f0fc9f7f61cb5efde7c10598923cfec376c346492", + "privateKey": "dd495f4d08928547ab5d2b39fc934e31a052181f338e0a723bc51f4305cd908c86ae660dcf148c829a17364f0fc9f7f61cb5efde7c10598923cfec376c346492", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/12'", + "generatorKey": "3c19943d614f67309dd989e2e1bdeade5ea53b0522eac3d46b9e7f68604a874d", + "generatorPrivateKey": "3fbbad2694492781f334e0a8c9a03827ce3139f5cf1c17fcf410a7d6ec0a3b653c19943d614f67309dd989e2e1bdeade5ea53b0522eac3d46b9e7f68604a874d", + "blsKeyPath": "m/12381/134/0/12", + "blsKey": "8ae81737f7b1678ece4b06db3ee1d633637da3c02cf646cdb0c7c1dae5f9eea41f2384fca8b0b12033d316ee78ea3e94", + "blsProofOfPossession": "a5150c19ac23dc15f660d9612be5f9591c1a5fc892e9f8b267de6bd39da84f254b6644e8c0f294900e5e9b7c9ecf3f260d902a56af7db5a59083eda08dd3ff083e2a07ba5d34f25312621f8686358dd2a50dcdc879eb0f9d50ff2fdc704e7d9a", + "blsPrivateKey": "0f0bb8d3299a807f35029011a71e366e134d6288a41d5cae85844b3f33e2b274" + }, + "encrypted": {} + }, + { + "address": "lskffxs3orv2au2juwa69hqtrmpcg9vq78cqbdjr4", + "keyPath": "m/44'/134'/13'", + "publicKey": "159c3170dfc8df2820e9c953ecceeaa8d8746af54687c4c266f654a3a1dd1714", + "privateKey": "d470a6f2a03a4bc359727bb957fea1efcb07ec0e07a143388d36b40d76f220c7159c3170dfc8df2820e9c953ecceeaa8d8746af54687c4c266f654a3a1dd1714", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/13'", + "generatorKey": "4e54056fabe183ab645962cf0b70e658d0eae506c4ade8756652ca7f76733227", + "generatorPrivateKey": "c35fe47d21ad0d2edc953eb17e27ce9532f30f35ba2d90e9ddfdacc06b1cfb124e54056fabe183ab645962cf0b70e658d0eae506c4ade8756652ca7f76733227", + "blsKeyPath": "m/12381/134/0/13", + "blsKey": "a3e2b645a315827618e58c1eb66dfef3744c8111a0c7b0e8535a3ec31d78ea2630646fea1da5609988c5d88997d663fb", + "blsProofOfPossession": "b55d1c525f96bba45cbefbcadad16279c9f61f790dfc3e3c824003139f9994200079faf573eddb863c6ba1fd9b7d7364146e3f20579b065355c75691e06be2c7304fe48d32fbfcb5ef38f8ecaa6905e9ca6a7c1124c45a6ab2b06668cb3decc9", + "blsPrivateKey": "58ef88d198c15101e9813bb963807ad43453422c76ff0a645e44851b482f417f" + }, + "encrypted": {} + }, + { + "address": "lskf7a93qr84d9a6ga543wernvxbsrpvtp299c5mj", + "keyPath": "m/44'/134'/14'", + "publicKey": "37002d59f3e5b66cac1a0598ea21c3360059afbd6bc6f298939cdae03a3db882", + "privateKey": "6c8d002f2b58e11940eb5c79fae119574ccda401c71cc8b451d2783d0286f91e37002d59f3e5b66cac1a0598ea21c3360059afbd6bc6f298939cdae03a3db882", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/14'", + "generatorKey": "ac34c0731cddab10726e634cec30294f831af045a0614733ac683ccdb6bc7eab", + "generatorPrivateKey": "1c91906bbd73352db1e4f89344b0851462962db0a11864a63a8ecfd805182935ac34c0731cddab10726e634cec30294f831af045a0614733ac683ccdb6bc7eab", + "blsKeyPath": "m/12381/134/0/14", + "blsKey": "a7283bff41249c3d2a0f065a27448a4c5acefaece74e51ec432c418c4bc8e6f0eb60160feec4729b9c0b933e9ec5e528", + "blsProofOfPossession": "86f1ac081ee08568266dc39727540a5d50f03e544f73d9a3ca60d87cfe9b6718832e07b2720d42e0e818c5fe2d45099a0774af1e6b123b41a3eb7eb3a1443d248a535fe9ef93f0027a8e8f44686dc33d677b79251c22022675395a347d0f3dbb", + "blsPrivateKey": "1f14d0e79b00554226cd7655f10eb22d5a5452d23665a8d06219b303e9595211" + }, + "encrypted": {} + }, + { + "address": "lskfx88g3826a4qsyxm4w3fheyymfnucpsq36d326", + "keyPath": "m/44'/134'/15'", + "publicKey": "6877e45fbe5b009d364071a1d282ebab1c1e34307c92e698d1ffb6ceb98f09e3", + "privateKey": "2afa9923109b1d4111ccf8678ff62bd63dbc97f69b6fb251442ec6b9140170b96877e45fbe5b009d364071a1d282ebab1c1e34307c92e698d1ffb6ceb98f09e3", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/15'", + "generatorKey": "028a30837b7eec19b02b06c3c2f4065290285e40a4870a677664fee3fe76d9be", + "generatorPrivateKey": "7ff68b39611f7d7b8fdc05226846abfdbbdb62becfb15032db25fe9281ebc71e028a30837b7eec19b02b06c3c2f4065290285e40a4870a677664fee3fe76d9be", + "blsKeyPath": "m/12381/134/0/15", + "blsKey": "93bddb296ef4dd5c832486b4603c1ed13805d2df1c6c2f95c8af4ae38467f1e741c1c2fbbd5f8e927b54250bffdf8536", + "blsProofOfPossession": "923415dc1db9b46715d284bd2a3f12313a24c1352bf0dfcdce2e0e0475fe0343d5cc9e463d5f04b99cb367e30e89f1371280d5897a0103658d710b07f8d9d3d8754043241a753dce60f2bdadcb9249b334e6f5a395cabfdb187f2739b512d46f", + "blsPrivateKey": "21aa5cd0043608b6b020589a039bf5b66f32bd66c84f311f22c49a53c08d6b4d" + }, + "encrypted": {} + }, + { + "address": "lskux8ew6zq6zddya4u32towauvxmbe3x9hxvbzv4", + "keyPath": "m/44'/134'/16'", + "publicKey": "89e1bad75bed903096f63cfd6c27386f91b58910dd6fcbafcc66ac084b289702", + "privateKey": "b3552dadc9e7121c89f4a0eccdbfec423078af46a926913764c66496b3ed7fe689e1bad75bed903096f63cfd6c27386f91b58910dd6fcbafcc66ac084b289702", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/16'", + "generatorKey": "563aa06b554beea30fc4455ae51e0954051a3457315b2370fde9c22d3233b522", + "generatorPrivateKey": "34c7762f0fef6090c2832a3ccaf40ef373530e9930f46746d4e3f3236f627fe6563aa06b554beea30fc4455ae51e0954051a3457315b2370fde9c22d3233b522", + "blsKeyPath": "m/12381/134/0/16", + "blsKey": "94da5ec9da5eabf2ab184de1e0ee10f63f721897475acd59c3c53adc51a9b39b0f4fa28573fcc309e576dba658425dbd", + "blsProofOfPossession": "a672d269ec605e04065fc0da8e6f520d0273b1c57a754409d9fb25cef1be67b8583fa683e27c0284c31105045f395c0c142d0648420b9b209fa88fa13025ba2b3887e04e3fbae1db6e5941ade41713a4384c139e47e72a68c964c4a5c0886d25", + "blsPrivateKey": "651060d1b4a47d4f7c036e4649f84d42885db5ea5b4b26f04498ab805f4a2634" + }, + "encrypted": {} + }, + { + "address": "lskp2kubbnvgwhw588t3wp85wthe285r7e2m64w2d", + "keyPath": "m/44'/134'/17'", + "publicKey": "32ad0d0c9f9f5b2fa4605ff4c072ec4bcf2d64f0e0046fc9df247b5cad952a87", + "privateKey": "3c4fa6c215f89226083979c01be72633b7fdeae34a2679588dc6cb41cd811f8c32ad0d0c9f9f5b2fa4605ff4c072ec4bcf2d64f0e0046fc9df247b5cad952a87", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/17'", + "generatorKey": "56d64ef16324f92efce8b0a6ee98b2925dc485d45675b2012bbf6a96d7431a36", + "generatorPrivateKey": "a105df9082f9ab10633967414b3629bb9218587d8561dca4acde6fa414a890b956d64ef16324f92efce8b0a6ee98b2925dc485d45675b2012bbf6a96d7431a36", + "blsKeyPath": "m/12381/134/0/17", + "blsKey": "98f83f66e857d954d5c5a49403e5b3a622e1bb855d785845e72faf0f7dd03ed3fd2f787a38c57f6968accaf780fd41fe", + "blsProofOfPossession": "b3131f0229df11964daba47a79729542f10672b36db017002df90d2cc6a79c8b44d032935bd214bdf69a8db181e4315a15de71a2e6802442536143c3ace9886248d502d6f38f9ea5bad26d4cee729b909d6cbde541c35313598957ddda08de15", + "blsPrivateKey": "1a835401bf4776f55c3ef62c91506f5ae6a51343ab54e83179ffbeee53ad8e7c" + }, + "encrypted": {} + }, + { + "address": "lsknatyy4944pxukrhe38bww4bn3myzjp2af4sqgh", + "keyPath": "m/44'/134'/18'", + "publicKey": "4033f18959c6b6c51c5d60321691f462b491d00912c640d0bd5cd361e50758b9", + "privateKey": "2a7743838c3e637370fcd980a7f757d54b7ec2f417d339a384405fdcd0ac71724033f18959c6b6c51c5d60321691f462b491d00912c640d0bd5cd361e50758b9", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/18'", + "generatorKey": "f8d382ac4f19ffe2ac2fa91794b65dc4c03389cbb2ea65bab50379a12e0f98fb", + "generatorPrivateKey": "a7b7b85bab2f2d4471f3ff944b16ca636353f7d8af66f085d290ad14d8b62eeaf8d382ac4f19ffe2ac2fa91794b65dc4c03389cbb2ea65bab50379a12e0f98fb", + "blsKeyPath": "m/12381/134/0/18", + "blsKey": "b0d3f0d142131962d9ab7505a3ca078c1947d6bb2972174988feddc5d4d9727927ff79290af7e1180a913a375da9b618", + "blsProofOfPossession": "90f81a87982cb983aae8c240f12c77306501bf67dcb031161cb2787ecbecfdc0ca4e62365f750714b9b0a64c10411058105bef1a725ece1c0e7c45b7e1526494d5a02ceaa4f624116a91188e7ca2503e0ae17748b11b05cd79ccc204d20e418f", + "blsPrivateKey": "3f132150625f830a749f9d98639ecf79ef6796b22e31c1b3b0284961ea68fb37" + }, + "encrypted": {} + }, + { + "address": "lskwdqjhdgvqde9yrro4pfu464cumns3t5gyzutbm", + "keyPath": "m/44'/134'/19'", + "publicKey": "63b9114c5d10b1cb818e6c3b6e4adae2a3d95e1a32d78f2b2c31c02e41dbcbef", + "privateKey": "a8f11d66e15e48150ed4226e06090d308b87a52f1e3ef5e2ccf41320177830ae63b9114c5d10b1cb818e6c3b6e4adae2a3d95e1a32d78f2b2c31c02e41dbcbef", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/19'", + "generatorKey": "902b7ed4708c476c7f0e96825cb06f95cbc86953130575d2c4589d8e3dc2f69c", + "generatorPrivateKey": "922ac8b034a28c0941cf74105c9b3780d1a790b3321f163b203d678ef84d9c9e902b7ed4708c476c7f0e96825cb06f95cbc86953130575d2c4589d8e3dc2f69c", + "blsKeyPath": "m/12381/134/0/19", + "blsKey": "a397bb33263b2850758a1b144401b741c1278b302eb8d27be6c61363d9cedafcabe05fbd7d9ce5e75a7078972d397e9b", + "blsProofOfPossession": "b22ed60a951702ec7bfd85482e59703af76c4c79fe2d3a3b81e737d53746543587d2932fcd5559d56f6530bfe48d23f5093aa30f3e299733cb56151175d22e21895ada290521908536d71480f1066bbeec7ab803376a4a81e4d7ec3bb4d71dc0", + "blsPrivateKey": "0dac58ccfee182a3e2eeb2ca51ea8c8d9e7c5db1a6535fd3ef19b041096fa39a" + }, + "encrypted": {} + }, + { + "address": "lskewnnr5x7h3ckkmys8d4orvuyyqmf8odmud6qmg", + "keyPath": "m/44'/134'/20'", + "publicKey": "ca0ebbb82059cbcdabf64d9a69fbac54e1059c88a2c3edab7ea6aff700595f3d", + "privateKey": "5d574dc371a6503cbe75dd1c79a5de3b93c570d42f0b12a8b5edb8b265205668ca0ebbb82059cbcdabf64d9a69fbac54e1059c88a2c3edab7ea6aff700595f3d", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/20'", + "generatorKey": "bf5f4408df7a1cde279b3cfe7ba6c2e2600a4bb90d883b98ef8048ec344221e0", + "generatorPrivateKey": "bb82e9722b03ced00e2eefec45c84c54ec9a0627d679e02df5fe0933a1511899bf5f4408df7a1cde279b3cfe7ba6c2e2600a4bb90d883b98ef8048ec344221e0", + "blsKeyPath": "m/12381/134/0/20", + "blsKey": "81f3810e7567ba9e1aa9fab7d5914a1f2ac8b11d952872b398930836f80395c934bd6e71c291193458de7de4382c913f", + "blsProofOfPossession": "a67d9d0708496d13f45fa3d3940954bdfdfa69814554a5618a388cab03a5e82210171f06b72b03966c8a5bd8fe3b235e06de2fc4c45333395c8e10dba086a4f50efe3a7f87f741346c07b22de2ba49eedc521cf53fab31e2033175ff3ca00f08", + "blsPrivateKey": "28934cd2f129730f86b488c07bd390b67ae9642fb98c8c7d880bfc7daa44f863" + }, + "encrypted": {} + }, + { + "address": "lsk4nst5n99meqxndr684va7hhenw7q8sxs5depnb", + "keyPath": "m/44'/134'/21'", + "publicKey": "038d1b2d152be754c4140fa7386439a0b31ee8acf9d5d90cdbde9f39e1fd8ab9", + "privateKey": "e764112ca6647920370c68e381f82629356667db347d90fe9a3ec777c3151478038d1b2d152be754c4140fa7386439a0b31ee8acf9d5d90cdbde9f39e1fd8ab9", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/21'", + "generatorKey": "71ce039f0e4502ff56ca8d33f7ba5ba5392dd7915516b2d87eb777edef454377", + "generatorPrivateKey": "6e9ffbb5c17d86c3f54fc0c4fe8b48cbb3f7148dd8639304f94ed3be088f7da571ce039f0e4502ff56ca8d33f7ba5ba5392dd7915516b2d87eb777edef454377", + "blsKeyPath": "m/12381/134/0/21", + "blsKey": "a1a95b1526c3426ccd03f46199d452c5121481cc862a43bfe616c44662b9a7fa460fcdc5f97072754296e6da7023e078", + "blsProofOfPossession": "942c76c56af0112baa7a11bb8875a2336b321e85de56fd4267e97f3fb142445648a54c97ed22e5860fe5b0e5ef240599028d4009d091ad96ad727914532e45ff9eb44303b337f44bf5ed3ac796e6e22a9ee29138bada893f89f3bebc1a4daad5", + "blsPrivateKey": "11aa8b4f68e3d7c2c0d6081f8a207cbcb0dec199362e978aa8316e1a03410e02" + }, + "encrypted": {} + }, + { + "address": "lsksmpgg7mo4m6ekc9tgvgjr8kh5h6wmgtqvq6776", + "keyPath": "m/44'/134'/22'", + "publicKey": "e37c2947f15c02d4f6928aee7320c911ec269248f2dcd6e35f15d0e85e084a95", + "privateKey": "247b7f47bbf3be42e2bf801c6bf8c141973d8568239fd57d1ea7f3ce673bb8d7e37c2947f15c02d4f6928aee7320c911ec269248f2dcd6e35f15d0e85e084a95", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/22'", + "generatorKey": "f17b9b3bdee2ef63c8fb52d85ae07516133749a1d659bd032c3a078aca65ce7a", + "generatorPrivateKey": "48811bcc2a0c1cccdcbe7100863bfd435b904ad5607add183b43481cd1d19ae4f17b9b3bdee2ef63c8fb52d85ae07516133749a1d659bd032c3a078aca65ce7a", + "blsKeyPath": "m/12381/134/0/22", + "blsKey": "96aa1c639724f5559fb1ebbe5d218511fe0fbfe6681190cd953677c6b63c0e17ac5d9f09844845cfecbb4ab4bd5a5749", + "blsProofOfPossession": "82a60d6a2432fd15c7697094a89ed34a30dc2daa2b460bdb0fe3269362e1d85c79a3d2aa9ba3ffa5b1e80f983933c96f1402e95d34fb656d20f368428ba93539191319c70e6cf6f15c5cb9df9235d115d06e0e00d7a1bf64db1433ac6acb68a6", + "blsPrivateKey": "3aea7d1b6bb1026123989eca287cdd69d2caade596840b42c677ad05ef9fd259" + }, + "encrypted": {} + }, + { + "address": "lskf6f3zj4o9fnpt7wd4fowafv8buyd72sgt2864b", + "keyPath": "m/44'/134'/23'", + "publicKey": "2ecda8618228e5679127a028d832d344f658d4c6b654b1f44bb07c6ebed39568", + "privateKey": "38d40c0a9af6f4bcf6ef3ae1a4a2002c76dfacf4872664aea0628724c3990b392ecda8618228e5679127a028d832d344f658d4c6b654b1f44bb07c6ebed39568", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/23'", + "generatorKey": "8cab5125c910702b66a83240cf836b10a0f2dc3000536799300ed8f1ed9a26ac", + "generatorPrivateKey": "ee5bb2ad10169758a9adb196d5b038870e1f345f3f3588ff64bc6abc44e074718cab5125c910702b66a83240cf836b10a0f2dc3000536799300ed8f1ed9a26ac", + "blsKeyPath": "m/12381/134/0/23", + "blsKey": "92590fccb8c847a6957213682bb798d7d18a368515f070537e1f6cfd45d8dfc50863105db9d46189b92c0e0d009fe09d", + "blsProofOfPossession": "b0aa8214fd746ec04d9cc97e9641a7ad796ed12ef08c9227b5358cf3bd9f049af2ad5376055361c34d265e5d0cf3518d05113928f487bf17012d6ec4deb53e5112b72f2e4d8dc8eed4f68514a9c6bf735c9ccb9dade32ed589bea8e677135302", + "blsPrivateKey": "37aa79f3bad6f99cab62b65498dd3c1bb08efc8c99fca5e76d1ee65575a5e767" + }, + "encrypted": {} + }, + { + "address": "lskrgqnuqub85jzcocgjsgb5rexrxc32s9dajhm69", + "keyPath": "m/44'/134'/24'", + "publicKey": "7106c368f30be7c415f8259ada56e59d9af5a143ed0a03eb5988ae1a427d8ad0", + "privateKey": "8accd9d16d0a607b6425dd86f6d54e21f121919b66bc5b12157e861e8130e8457106c368f30be7c415f8259ada56e59d9af5a143ed0a03eb5988ae1a427d8ad0", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/24'", + "generatorKey": "55d4c0e745954f0fba9629b346055060418961e7edce58c77bf2bcfc7f753d42", + "generatorPrivateKey": "71ed13fc516989f54498bc28ed3b5119eef180666eb2574a07cdb56b492b876c55d4c0e745954f0fba9629b346055060418961e7edce58c77bf2bcfc7f753d42", + "blsKeyPath": "m/12381/134/0/24", + "blsKey": "ad250adf40b559d765bb51d65340fe38de9e4cbc839b6e6509d99bb9bb3f89be1bbb96d75f709f2ae9e715e6e6ce38a4", + "blsProofOfPossession": "8943f42818d3c3374d43d1aa0b427436f4edec3e760f07aea2990b99eb3ef69952d580df862ad9034062fab57c548164143bd3b77d16ae74fd8fb84518983dfd015146ac9d0503c858f0022591345c077656e5af22cc78f1d35a02ad1e74c8c4", + "blsPrivateKey": "0e4d854f9c5f345fea96ecb91625e50bf6bb69bb71016647574e71a7f2d762d2" + }, + "encrypted": {} + }, + { + "address": "lskjtc95w5wqh5gtymqh7dqadb6kbc9x2mwr4eq8d", + "keyPath": "m/44'/134'/25'", + "publicKey": "57162b1d7e5239fd93cc1f440d1493fff3582bc28eb14badf324e06756ed19f7", + "privateKey": "d0f245387c82d06e5595624ef96f13b8a0c1eb4430d6d606091afc4de365132e57162b1d7e5239fd93cc1f440d1493fff3582bc28eb14badf324e06756ed19f7", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/25'", + "generatorKey": "633e1696edbd9f2eb19683c4f7e0d4686fefb1a15772a1affdeb49a44d8c04f2", + "generatorPrivateKey": "c74dcc813c8011ef00936750155f3c06fae9382d25d716e81b9d35238f0d97a7633e1696edbd9f2eb19683c4f7e0d4686fefb1a15772a1affdeb49a44d8c04f2", + "blsKeyPath": "m/12381/134/0/25", + "blsKey": "a6e64df0d2d676f272253b3def004bb87276bf239596c4a5611f911aa51c4e401a9387c299b2b2b1d3f86ad7e5db0f0a", + "blsProofOfPossession": "92ff87e4dfebfdee0e5572e94f62c483a9b4465eada10c3a6bed32fc92374dbbe89eed00117ddb27bfbabc5e41d90d8a0701fd215caef0233eca660d7a0bccdaf064356edaab13aff404aeb5264d8b68ab0808115e09ef541168364806a62d49", + "blsPrivateKey": "3904de0fc9bcadab43d1b2d5f79cc197e59d96e99afa03da6acedac40ab3229a" + }, + "encrypted": {} + }, + { + "address": "lskwv3bh76epo42wvj6sdq8t7dbwar7xmm7h4k92m", + "keyPath": "m/44'/134'/26'", + "publicKey": "d22846c90b31913318a4e9d5e57cda760e1e35316d16fe8b43066c407c9b148a", + "privateKey": "fff5a4e22fb9473f23b9c8d5abe45175ccb2eae77710f8d99672280c685af3f2d22846c90b31913318a4e9d5e57cda760e1e35316d16fe8b43066c407c9b148a", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/26'", + "generatorKey": "f99c543eeba441fdb22c673fa81878269c3b69a6366d8d51fb6890f2eb3118b6", + "generatorPrivateKey": "0345913f3b2283ddb51285af6e9f2454fafe9d8f4438d5e60281b8753811476ff99c543eeba441fdb22c673fa81878269c3b69a6366d8d51fb6890f2eb3118b6", + "blsKeyPath": "m/12381/134/0/26", + "blsKey": "8ae82e86c2ae47fe55b3db422b5f6e8a8ecbf4a33a0e910b4cc53d1bef0d66e3d19e8474a97ba58e31798c604758b1d5", + "blsProofOfPossession": "9215a181382a5769652e3818238e58496ca1c80eb6282b000708b2c9c19464153fcc8a541d8aa32378186b61fdb2183d15828ffa20e49a0dae0cb05e8c106f894a7ee7190c6eb60874477da236c05a275187bded6ac5a9c98656eb2199f736fd", + "blsPrivateKey": "474a20eda00f30146da307c7bd171cd5b91ea5b6d44641d4677d39d9aa9bc27c" + }, + "encrypted": {} + }, + { + "address": "lskq5attbvu8s55ngwr3c5cv8392mqayvy4yyhpuy", + "keyPath": "m/44'/134'/27'", + "publicKey": "a1f052d86f89b7848e21eb71448d8c985a79c16e51ac7c76f72da5eb6480cf58", + "privateKey": "911fb9ae6147af11ee3fc36ade5a411a4c627d08eba07ac1d38c10855bfb2556a1f052d86f89b7848e21eb71448d8c985a79c16e51ac7c76f72da5eb6480cf58", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/27'", + "generatorKey": "1819bea0ff11aa0cde16c5b32736e7df274f9421d912a307526069fa119100ca", + "generatorPrivateKey": "9e5678be030e043e8ed9876ee4012cf293b95b44759d75a8a6ae8849901afc8e1819bea0ff11aa0cde16c5b32736e7df274f9421d912a307526069fa119100ca", + "blsKeyPath": "m/12381/134/0/27", + "blsKey": "957a970041ae9b29f33cd9baaf077f77049e664c8123b22fda3793252f71916c5df0b103ffad5cb75bdb2724d9ca3eba", + "blsProofOfPossession": "80d4fdac09ce195c9d685a751fb7cd9d4da7b9dc906348b4bb741ceb53f876afd0bceba75b36327a8cbd8bd3ca8ac2cc14b4fede3ce2cdac7f0bf0ad5e58840c64bdd0a0905cd6aa5da8acfcb33a931e469cadc27a42c2a04a62fd6ecca05091", + "blsPrivateKey": "1c73ac651be2f72f2be31639e6aad77493d00afa10b7138f60ab5d9da1abdb8f" + }, + "encrypted": {} + }, + { + "address": "lskdo2dmatrfwcnzoeohorwqbef4qngvojfdtkqpj", + "keyPath": "m/44'/134'/28'", + "publicKey": "ebd7440bf10d48e5d4601b5815b69c9d74fbdf9578db8477c94f4856b85a04ca", + "privateKey": "fa77c6df262210a67e6306b286b85d8fd77bed6fe33250c170e87e7cfdf0bc91ebd7440bf10d48e5d4601b5815b69c9d74fbdf9578db8477c94f4856b85a04ca", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/28'", + "generatorKey": "bbc7ca5acae1d53e0a44a212f4c77c7601ace0e489d936c0b6f26a9fbb03601e", + "generatorPrivateKey": "34d0d867fb2a43007f160ab304ca1d779871d60fca38e64e688d85cee4dd4331bbc7ca5acae1d53e0a44a212f4c77c7601ace0e489d936c0b6f26a9fbb03601e", + "blsKeyPath": "m/12381/134/0/28", + "blsKey": "82b478f1b884ee4c152490afc8b233d003745a58c236b00ecb3cea1022d59f04bf225266bbe5b0a5aa7da0a771a66acc", + "blsProofOfPossession": "ac4d05f93e3c374c83ab9cec2a5c67dff8a02298361584267968fad8f391af083b5041a020ce7a189fd8fdbf055a265c04f55e80a8dcf06e7b4e3358b347743f47d33bd5ee0cc4d4213995c46d6d4e1a61be929f571c1a0fa1c7dec805a85805", + "blsPrivateKey": "4fda60b27305f21237ae97d5f91c52455e10a242ec60997468b1d65d3f979d48" + }, + "encrypted": {} + }, + { + "address": "lskoq2bmkpfwmmbo3c9pzdby7wmwjvokgmpgbpcj3", + "keyPath": "m/44'/134'/29'", + "publicKey": "886ababad3572e81567a65320e1d4fca7de95ad69a305564be7625cfcedb531e", + "privateKey": "9f9ca7d38aa4db5b9a6e3c7f593f7862ca8cc87da5cdb0c88e3f3a45ceb882f5886ababad3572e81567a65320e1d4fca7de95ad69a305564be7625cfcedb531e", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/29'", + "generatorKey": "8cda7b8df8975d781e053882a1373d190d5f8fd7c13ab528be8597b5d06ede57", + "generatorPrivateKey": "93771355236957f57b4bfabbc1d7e3c2cf72f5b0ef78e62471d455d44f13fffb8cda7b8df8975d781e053882a1373d190d5f8fd7c13ab528be8597b5d06ede57", + "blsKeyPath": "m/12381/134/0/29", + "blsKey": "882662250af65099ca817b2564576582981f23746f07be09ebc03ed6aa582a327d4156ff4a12851bce3ad77be854f937", + "blsProofOfPossession": "b73f34042d210b6cf0ba61b04e26bcb08e4d671a12df09e592c14c73ac55df09a01adf94b205b86a9ac9020cc719e93b0f890050891d9f8622346f45112ce502e26293a14c36501a8f1947c33fa38535d6eae6c4af6679296e76a105e899341d", + "blsPrivateKey": "130e7d4aedeaaf42ff9919b87496c80d0ef2cbe38a6e47ed7f7b8b4140a11700" + }, + "encrypted": {} + }, + { + "address": "lskgn7m77b769frqvgq7uko74wcrroqtcjv7nhv95", + "keyPath": "m/44'/134'/30'", + "publicKey": "c71ac98a32b133bc6fa8dbb6d42d87110f44fe4f3b74ca58fd60fa0d6010c285", + "privateKey": "83e39036d9000e4a92da3e96ae1a41b21d8ba158840447ac5bb7fc94db9bab9ec71ac98a32b133bc6fa8dbb6d42d87110f44fe4f3b74ca58fd60fa0d6010c285", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/30'", + "generatorKey": "0941ca2cfd9b1e0cc4bf0dbfd958d4b7d9f30af4c8626216999b88fc8a515d0a", + "generatorPrivateKey": "9b7b095990f701463a893d5534af10f3b850190ee94d3c5c114f50c82778a7bb0941ca2cfd9b1e0cc4bf0dbfd958d4b7d9f30af4c8626216999b88fc8a515d0a", + "blsKeyPath": "m/12381/134/0/30", + "blsKey": "8808cb1e4cb5c8ad18ad4a45e35388af4099993effb9069a28e56c5718944a3b4010ec1ef54b4faf4814fad854322468", + "blsProofOfPossession": "890995fe98a83721b0069aee00c2b264239b3b833b71f64a5f48b4340a969fbac1ffc0664264fbf5af626d37fb3fe6d403dc7ef0ec195cdab82e7615d73ad7a2d326a761fdcf18a6a83efc4f502c724a10ddd89f8b6981496c34b1b32f512781", + "blsPrivateKey": "00687a9dd373f8c15a883f678c6036273d34dadfb8236a840609ecbc67faa4b6" + }, + "encrypted": {} + }, + { + "address": "lskfmufdszf9ssqghf2yjkjeetyxy4v9wgawfv725", + "keyPath": "m/44'/134'/31'", + "publicKey": "81107e3e00332a827112444a1d53532e6e519acbf741ec3a58e318d6bfa05577", + "privateKey": "07ca3d10e8a88b2414ff218a849d8b66d84bd8e2290377f13b42cea907c77d7181107e3e00332a827112444a1d53532e6e519acbf741ec3a58e318d6bfa05577", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/31'", + "generatorKey": "24bab6ba79973ffaa8569af2cb69b8495d20f0c7ce674814ee0615d31abe9607", + "generatorPrivateKey": "58b70c32dea6cb47393427b3cb6c5581674e620bd771d946d4d05588c097749224bab6ba79973ffaa8569af2cb69b8495d20f0c7ce674814ee0615d31abe9607", + "blsKeyPath": "m/12381/134/0/31", + "blsKey": "96bed36ef328566d826a6f6b874ce441ad34373487b4bcc2d48d76f2dd453e418935a7b60578c43b9c4dc954e9331a3d", + "blsProofOfPossession": "b4d80456953b5111777a74931f5691a6e4c0bc4f4d552aeee9ed1002903b366abab12e2d596a4387933ec676058ae64e15d7b322786d19744281028753b621ed7d49b6e6bf87983267d3208c3dc5da983d845a7a2822da4a085446172e823b28", + "blsPrivateKey": "59c7cbf878eaf29c9e691f3c2d9bca2cf0fdec574bc037e1e156c730bf684b54" + }, + "encrypted": {} + }, + { + "address": "lskx2hume2sg9grrnj94cpqkjummtz2mpcgc8dhoe", + "keyPath": "m/44'/134'/32'", + "publicKey": "9ea73410309a58c1f0c18d8821baa56ea2fd654215ae94d0e3ae808c7ad5e90f", + "privateKey": "64be15e273d24a39a7af8b674b6af47063c7db0b5ce61fbf9a1353e94a00cbfd9ea73410309a58c1f0c18d8821baa56ea2fd654215ae94d0e3ae808c7ad5e90f", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/32'", + "generatorKey": "f07a86182356aee3fcfb37dcedbb6712c98319dc24b7be17cb322880d755b299", + "generatorPrivateKey": "6f3e9367328500bfaa95f7fd94e848fd6100f5e10bc77d439585185d20dea1dcf07a86182356aee3fcfb37dcedbb6712c98319dc24b7be17cb322880d755b299", + "blsKeyPath": "m/12381/134/0/32", + "blsKey": "b19c4385aaac82c4010cc8231233593dd479f90365186b0344c25c4e11c6c921f0c5b946028330ead690347216f65549", + "blsProofOfPossession": "b61a22f607f3652226a78747f3bb52c6d680e06a8041fc1d3a94a78fabf2895f23559059a44b0c64cd759d33e60a06060197246f6886679add69f6d306506336e15cdc7e9bde0aaca6e8191fb3535b5685ce8b3f33212441d311444a3d57fc66", + "blsPrivateKey": "4e29180852b97988e952ab7de895a55b14c283987a55f5df08cd1220b7d2df83" + }, + "encrypted": {} + }, + { + "address": "lskqxjqneh4mhkvvgga8wxtrky5ztzt6bh8rcvsvg", + "keyPath": "m/44'/134'/33'", + "publicKey": "7e4874d02ad84042e1fa3bfa61954d070308080f3cbecdf29d7fbfd66edb46a1", + "privateKey": "c17df1663305582bcc4b234e5de32a07e8c379970e101ffe3d787f082ed5f3d67e4874d02ad84042e1fa3bfa61954d070308080f3cbecdf29d7fbfd66edb46a1", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/33'", + "generatorKey": "a2b5e97ac5a5b3c3a7cd9b4401eca1f4e8da59fe567e229ea47e65bf40053402", + "generatorPrivateKey": "7e95bcfa2cb10e89f5036b3431446c5a55c115ffbe926443507943d48f8062b6a2b5e97ac5a5b3c3a7cd9b4401eca1f4e8da59fe567e229ea47e65bf40053402", + "blsKeyPath": "m/12381/134/0/33", + "blsKey": "abc1d1ef1f992a9fda45841079516169c879421f4260194c0a47e46afdb9f349c2a51e66e9f2ee8bf22231027584a6bd", + "blsProofOfPossession": "a16aa0fe3bfd5383c2fd874be4feb930f2c75f5d35d0e0ab314eb545a673aa1854ebfee7b15a026d5a9fb02842e54672149382f2898a0e12756bb949772b1316163ba774768c88fc90c2471afe94140d8d8f16974f2ebf050358cd98587b32ce", + "blsPrivateKey": "471a10414c7c89584cb2bf93a300426038301ce2b1197ab7f8752708beafc7e0" + }, + "encrypted": {} + }, + { + "address": "lskr8bmeh9q5brkctg8g44j82ootju82zu8porwvq", + "keyPath": "m/44'/134'/34'", + "publicKey": "1b62f211c18f7f707b41d0396f1a71ccfc7b27095728abb7aafda77c7d874857", + "privateKey": "6f11ae1da057f6681b404800e955b8b6ab43d742473f67e60af2e3aed04ff16e1b62f211c18f7f707b41d0396f1a71ccfc7b27095728abb7aafda77c7d874857", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/34'", + "generatorKey": "8062134a09cc464fe9465cda959b402a3d4506a1c44b3f5cba9661d42e912421", + "generatorPrivateKey": "daba1869775231db6c57d0d49ae8731693816165431889bb7506baad362d2ab58062134a09cc464fe9465cda959b402a3d4506a1c44b3f5cba9661d42e912421", + "blsKeyPath": "m/12381/134/0/34", + "blsKey": "a8271f9e8874eebb6d66dc139e984b6a6c71d2a7e23c6d7061bab7725e9c65f2e2123778130a2acd278f155440debde0", + "blsProofOfPossession": "84a3aeb2cc8329afc63f40d137b017ebcffe6df9e55bdaad8249408d01dad5025f1c83faecb53955ba5524df25b0d85e180f0335d0b5ac8c82c7f5fd0975002fe0231a83754c0034b07175afc426b17978870f8326cfe4694ff723e08d0b6a61", + "blsPrivateKey": "55416acd8c266c470540c3ed4abcbd22b1b936cffa4b8ce620bd9d8b63c0dfc8" + }, + "encrypted": {} + }, + { + "address": "lskbm49qcdcyqvavxkm69x22btvhwx6v27kfzghu3", + "keyPath": "m/44'/134'/35'", + "publicKey": "2a4aa6527e9f9bc2c3d3b4a9a22be543e95703593ed98989285e0b92ec6f3af2", + "privateKey": "134dad94b73ca57153ed7d9f37da7b94ae2f3b64d74a62e12524fe7bddf7c8af2a4aa6527e9f9bc2c3d3b4a9a22be543e95703593ed98989285e0b92ec6f3af2", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/35'", + "generatorKey": "4ec3ad70d3d35f0d684960e7938fab016d12c6c7cbb8312a8cff776dbaf2ca4a", + "generatorPrivateKey": "67bfc7dba3246b82db00c25ef844f5da3008439cefef1a9ee308accde7c7bfee4ec3ad70d3d35f0d684960e7938fab016d12c6c7cbb8312a8cff776dbaf2ca4a", + "blsKeyPath": "m/12381/134/0/35", + "blsKey": "80d7d0598d4e79ceea22c56d16e747cd5ef94469bd036945d14a5d1e06eb700f9f1099d10cfaddddf9e88ac4c9f1086a", + "blsProofOfPossession": "b7890264708b9d3341d90864f9120cd84090592a6bc5a419df94e86a638a0055e7dc3846cb89869cf46305611e49cea007711f35a5effd3099e56b5108a4103215a6ba9195c4694064ba661502e852b43e9593b0a60bcd2b567fc97565054500", + "blsPrivateKey": "1f7ad690ead2cbfc3d51e287d19158d2db2320c8498e72ff7ade0554383d0f01" + }, + "encrypted": {} + }, + { + "address": "lskatntynnut2eee2zxrpdzokrjmok43xczp2fme7", + "keyPath": "m/44'/134'/36'", + "publicKey": "8ee575c0773a3ec9164ad157b8de1b66fb30cc315e8ddb92d4f6eb007fe0f154", + "privateKey": "f012923591f4a0431781880c0adae26b162e035ffb3855e201d11903ba2d78cf8ee575c0773a3ec9164ad157b8de1b66fb30cc315e8ddb92d4f6eb007fe0f154", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/36'", + "generatorKey": "ce6bdb7380fa027c46edd15a072bbabd6b60fecb0e09589e20be560b333ca63e", + "generatorPrivateKey": "5b52fbe120967f200be5f0ba55608668cbe1a60b139f2aa646c0589fd295fcf9ce6bdb7380fa027c46edd15a072bbabd6b60fecb0e09589e20be560b333ca63e", + "blsKeyPath": "m/12381/134/0/36", + "blsKey": "97a4b205ac2b65a2f17ceb49a763393935021629068fe8a8c299e49b986e79ff8cc959a7343b5d00eae2783b825ffede", + "blsProofOfPossession": "8a86fbb8e59ff0de4f2d717ff3c7b0f3f9cb4b14f97deeffb907428666005e613b02cfac0bac4714389d898236de2d5a02df536b511675d2cbd37dcac6dc33bf4cf2d9d43cfa710b3c695bcb8cd29867477ccf3b1e5b9e3afaf7d8d4e50930ff", + "blsPrivateKey": "0fa3a86ad57f1ac10c478b2eea9c5379973316cd0484eadd1ba260da85ff908f" + }, + "encrypted": {} + }, + { + "address": "lskee8xh9oc78uhw5dhnaca9mbgmcgbwbnbarvd5d", + "keyPath": "m/44'/134'/37'", + "publicKey": "b5ca7fa887bfaab853a49e71c086023984c8ea089fd42ecf0a086810a2e6f78b", + "privateKey": "86afad2f4142a2d57e08fafaa6f1ed70af9a0831ef7d18e6ed89adaa61b66754b5ca7fa887bfaab853a49e71c086023984c8ea089fd42ecf0a086810a2e6f78b", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/37'", + "generatorKey": "aed740da1a7204422b92f733212398ce881c24a4cfe40edeea6a59a0f6453743", + "generatorPrivateKey": "71bf7039b3951c6742390e997201c7c5b13ad712f60f214846456c3f15342024aed740da1a7204422b92f733212398ce881c24a4cfe40edeea6a59a0f6453743", + "blsKeyPath": "m/12381/134/0/37", + "blsKey": "929d5be8abbc4ffd14fc5dc02ae62e51a4e8fff3fd7b5851ec3084136208ceac44366a7313447858e3814ddc4213d692", + "blsProofOfPossession": "88e7331baeba342eaa907cfd7a1b5bc839a70e78b0535d68c40ddc2e4d5157f8d1ff55d29243fe2375fcfef5c3a2133e0a0d11f8b58041278a1e9a3a9e7986f906201df48987e8f8eda2e6ee4452fe58b54805e2ca4cc256d8e42083b70f79e3", + "blsPrivateKey": "032de7290e108bb21cbd7e0084f5db140a2d365629b07cafea6c46a0c705775e" + }, + "encrypted": {} + }, + { + "address": "lskcuj9g99y36fc6em2f6zfrd83c6djsvcyzx9u3p", + "keyPath": "m/44'/134'/38'", + "publicKey": "1efe4983f0e29699afff6fa2917716b2599a88c23f21508b85a22f44c7ee1b62", + "privateKey": "9fd97aaf86fdd14e435e8b9356155d635e52fb7b885ea6e417cd7f8376720c761efe4983f0e29699afff6fa2917716b2599a88c23f21508b85a22f44c7ee1b62", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/38'", + "generatorKey": "80fb43e2c967cb9d050c0460d8a538f15f0ed3b16cb38e0414633f182d67a275", + "generatorPrivateKey": "c1aa3e4f44c0a57c27898b9055be4dc7d92b8ef0949ea812ed10eac89278978380fb43e2c967cb9d050c0460d8a538f15f0ed3b16cb38e0414633f182d67a275", + "blsKeyPath": "m/12381/134/0/38", + "blsKey": "b244cdcbc419d0efd741cd7117153f9ba1a5a914e1fa686e0f601a2d3f0a79ac765c45fb3a09a297e7bc0515562ceda5", + "blsProofOfPossession": "b7a186c0576deeacb7eb8db7fe2dcdb9652ea963d2ffe0a14ad90d7698f214948611a3866dfedcb6a8da3209fee4b94a025864f94c31e09192b6de2a71421e5b08d5ac906e77471d3643374a3d84f99d8b1315f44066c044b5cdbfdfeceef78c", + "blsPrivateKey": "0c629e3c91960c817e7993d8e2f7a567b1a704af52d08ba039b68b719bdd8247" + }, + "encrypted": {} + }, + { + "address": "lskuueow44w67rte7uoryn855hp5kw48szuhe5qmc", + "keyPath": "m/44'/134'/39'", + "publicKey": "6b01a532bd79010ee18fb75732356208d96c0524c257913b2b2ad903d55dde13", + "privateKey": "6e7c3feb90fb9f0d50d8892c491a60e9c165bc66c3e5e189f431977a0b6e7fdd6b01a532bd79010ee18fb75732356208d96c0524c257913b2b2ad903d55dde13", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/39'", + "generatorKey": "ebe1d6189c7015d175414db9621a602b0912826c1eb1aab09e69bb33ca8fcda5", + "generatorPrivateKey": "30af73eed356c281a256d2a8c94c3b0eb8676078bddc3cda67a1e8d42a44f3f2ebe1d6189c7015d175414db9621a602b0912826c1eb1aab09e69bb33ca8fcda5", + "blsKeyPath": "m/12381/134/0/39", + "blsKey": "b7c47fbb0d7e3793460949c9dd6120a310eb52de67f6cde55c022b05dd5053074c8a0e562896a482c787eb2eea82353f", + "blsProofOfPossession": "a265237ff848fe7acb4c84b6f68008ee7ec917a7a11c050f630b834e5caf22a447de94de0e7c52d03b18e003e5f9a3f2091cb5a78817ba42a7e19c714af47ad0b94824c5b90862059ed3042446143c56c4df011389eb42dfa2daa58df677d473", + "blsPrivateKey": "67cbba27c5ab5ef4f50f963cfa680bb745e565a7b26cd6a3755ece6ff0e238fe" + }, + "encrypted": {} + }, + { + "address": "lskm8g9dshwfcmfq9ctbrjm9zvb58h5c7y9ecstky", + "keyPath": "m/44'/134'/40'", + "publicKey": "4a4a974345c653a5f83e6f24f40ab4757bf07dc4f19d8070faa9852120f57549", + "privateKey": "80f077113e432f2360676c28392aad1f73012f62053c95e9fd411c9a3e9a32d44a4a974345c653a5f83e6f24f40ab4757bf07dc4f19d8070faa9852120f57549", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/40'", + "generatorKey": "497a5b80edc6b9b5cca4ca73fd0523dbd51e41c1af5f893e301cfa91d997573a", + "generatorPrivateKey": "4a7e5a09ed1049e59a3e3d10a27dca47b0f3ad8efbe25ba554de7e2e63cd522e497a5b80edc6b9b5cca4ca73fd0523dbd51e41c1af5f893e301cfa91d997573a", + "blsKeyPath": "m/12381/134/0/40", + "blsKey": "8e3f9dd02f46bbb01ec1ffbe173b6a28baa3ffaca943afe51c18dc5220256a3994cd0b0389c835988a64076b4e81c837", + "blsProofOfPossession": "980f00e7752adccb907eaea0fc31ce62dcaff9bf1c6b7066c5071829c91456a8d1e266cb0a9ef4916ffbd09295508a350d21e9123e5cc1c00d3ef65f5493c93c5b993e9768960d4210849743dc2b995657cb0aee7d46d6482e3545b89f06f895", + "blsPrivateKey": "2b67cf8da21f38b44a13674b270c912b50d3c74981d76e354558da1c1f2c829d" + }, + "encrypted": {} + }, + { + "address": "lskhamuapyyfckyg5v8u5o4jjw9bvr5bog7rgx8an", + "keyPath": "m/44'/134'/41'", + "publicKey": "439ad025289bc36c9bcaf79a04116d1cdc5ee87fd5ecb93be83ce761d69c7733", + "privateKey": "3f2353712bd5e51be220f1632571a451a9f357a4f7e292fbea8d9f7a52c8167e439ad025289bc36c9bcaf79a04116d1cdc5ee87fd5ecb93be83ce761d69c7733", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/41'", + "generatorKey": "d051790a70ffdf5bd80dc9cec003f8261128be1fc2135990accb13caeb3ed588", + "generatorPrivateKey": "ca0202c84b1675a89a53758e639447336b52042309014c9def9d84bdf5c5e229d051790a70ffdf5bd80dc9cec003f8261128be1fc2135990accb13caeb3ed588", + "blsKeyPath": "m/12381/134/0/41", + "blsKey": "a2fc837b51e6dd740fc1530e6713b0f8c04e646e91da849517901f24d9bcc78c360223f1ad3692de2e96444008a67e03", + "blsProofOfPossession": "82d6fee11dc1561ffb5f36bf07acdffb95e5c329f7adc0b8937bec191350d7c4a158c7592a179ed86b9c0e20159e903100495fcd3fb5bee481e053775b232f8e0fce602e8ec6edf0fe8ba90c06e6215d7c73e88a626d2fe63c6422826489d72a", + "blsPrivateKey": "1cc66f8abe734f69e212c028ddc5e8a5266f16bb92cbd23a11a2701374108a11" + }, + "encrypted": {} + }, + { + "address": "lskrzuuu8gkp5bxrbbz9hdjxw2yhnpxdkdz3j8rxr", + "keyPath": "m/44'/134'/42'", + "publicKey": "3de31d0eccc3e0d5c0a017a4066108ea909b7b9b97a046d55ea207b94d9f7570", + "privateKey": "67cecd53def499f8e0eb3c9cdbb4e330e2f5b4133e30e5f5398d40f966b8c0ee3de31d0eccc3e0d5c0a017a4066108ea909b7b9b97a046d55ea207b94d9f7570", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/42'", + "generatorKey": "d454f04eb0e05c980f6a3427e98d73493665860ba7a29eb915cfc0b8daae2849", + "generatorPrivateKey": "406b400c1bfa9d0462ef8fc4100a7f918c16a3823f1dff057cd7028d6865cfe9d454f04eb0e05c980f6a3427e98d73493665860ba7a29eb915cfc0b8daae2849", + "blsKeyPath": "m/12381/134/0/42", + "blsKey": "b1b4ba05e7116670be55b6d9fc28574d142824175a1e3d1cdafa37f193c342eba1a85d8520a9fd962811fe63a5a2d048", + "blsProofOfPossession": "99f7e39908f0cabbfd156c78a903d6968c455f5edbcb878525abe1217674d9745da87057f1fa93ccff79632253d5b4fd0c6301b0b9eb0e07fdd4c0abc99da0229ceb4a03b0da237657e445a7bbf6877689bfc027d65f24f05982dc2aeb34c72d", + "blsPrivateKey": "6cda6e97b66b400de912562e266710fe0df80ab4c6c9d91c9f2cf03e4e0a3834" + }, + "encrypted": {} + }, + { + "address": "lsk8dz47g5s7qxbyy46qvkrykfoj7wg7rb5ohy97c", + "keyPath": "m/44'/134'/43'", + "publicKey": "1261a41de66aaea2d66bc2b4ad5b7d25fbe013c11aae160bad70378b6049fdca", + "privateKey": "a80610578bf678af963bffabc131a791a590830abce950d15b95bae03ed5bd1c1261a41de66aaea2d66bc2b4ad5b7d25fbe013c11aae160bad70378b6049fdca", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/43'", + "generatorKey": "567e1e27c02293d7c190a1eb203c2daf1935a9901de66df73f8e4eeae6907d04", + "generatorPrivateKey": "72be4840bd46fc9566a1741499fce3fb9152e01ea28df6f1e834f35ba3d14f09567e1e27c02293d7c190a1eb203c2daf1935a9901de66df73f8e4eeae6907d04", + "blsKeyPath": "m/12381/134/0/43", + "blsKey": "a2f8fdf2b80c987ae61634125c54469928728ecb993bab3db892725b16b41ec48c36056eeee2a1c9b073d12bdf917684", + "blsProofOfPossession": "abded9f3ad588edba52b7b2a4b3ff25f630aefae0d7a91827bc1fb7b8cba36d27c310a7a58a4a66ed9a8d90ffc0aae6e17718b1fa3f8e7305498e740d531460702a7dce1e32c19e18849c786c26a30e29b464c7202dd64d021c1eef643de519a", + "blsPrivateKey": "2d11ddcb18798ed85425c100ee31309725153e3ddc769531dcc8939b9ba135b5" + }, + "encrypted": {} + }, + { + "address": "lsktn6hodzd7v4kzgpd56osqjfwnzhu4mdyokynum", + "keyPath": "m/44'/134'/44'", + "publicKey": "441132064a0a5cffb2d28f4306fdf4c784e6bcd0f72a8b0e2e70f11812afd9aa", + "privateKey": "50b8e65ecc714b5a02b3ad6e6769e4dbd8ed4b9fc87f2d0876f1c9d705af49ce441132064a0a5cffb2d28f4306fdf4c784e6bcd0f72a8b0e2e70f11812afd9aa", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/44'", + "generatorKey": "6158b2a5b662ce05c7864dff4c2aecf6109cdea1be703a79147450b082ea242d", + "generatorPrivateKey": "59e643809298d20fe0789fa76ce08a150c1d75602a8c5939b6dc468700ef2fc26158b2a5b662ce05c7864dff4c2aecf6109cdea1be703a79147450b082ea242d", + "blsKeyPath": "m/12381/134/0/44", + "blsKey": "a97efbc836dd4028813063912bcadb52fdb8e4d2ba04d7bbb477d2a97e16167c5fa6ba75e482cd7a7d476d78fed1550b", + "blsProofOfPossession": "995df23eececc27026f62816bfd07d71696e2dc5751bafb03d50bd9c66d388c562d6c1357300e4d51e5522edc3cb5ae217b3607795baa0209c6e63db01b4b7c28452c15db1366764abb9d886d0a908da07d3b7b2612e263d95721ffccefb4aa4", + "blsPrivateKey": "5b4e861123695a603833f8b442e474692b7b197e38c5be4a45a2e04244ed9582" + }, + "encrypted": {} + }, + { + "address": "lsk5pmheu78re567zd5dnddzh2c3jzn7bwcrjd7dy", + "keyPath": "m/44'/134'/45'", + "publicKey": "1f1b9cea61290f9b2380893ab949c6831315d6c2610371573de28cce16167595", + "privateKey": "d8165b1dbf9e5eb9d710739aaa552b4083d59f3a22c549b8141508a014edcc311f1b9cea61290f9b2380893ab949c6831315d6c2610371573de28cce16167595", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/45'", + "generatorKey": "62c37caa9ecdb3874354e7f780cb4463ad190bc31e75e552cb07b9bafc658f2c", + "generatorPrivateKey": "2ddf26bf710c8ed14e327cce8b8f5e196a3d43d731c1d007554f4d052edf5baa62c37caa9ecdb3874354e7f780cb4463ad190bc31e75e552cb07b9bafc658f2c", + "blsKeyPath": "m/12381/134/0/45", + "blsKey": "809c35a2a1f510fb574a223474fb6b588daca95ab1b9b04f4f0dcdcd4581f05914eb1b9683d21997899ebf730d82a8a7", + "blsProofOfPossession": "a2fd6eca6018825969d8b9de58e6594149c5114cea9c27997f2ec67b923cbe562454caa5a5e956b3eb5ea0c5bd9b0196137d4646e21b51bd21503dde474d510f62654bb7ffd141fa3462997bc6662f2893cff7d917eb07f2985dae860723bd46", + "blsPrivateKey": "692a0a8a17a80c888ef3ef9e5c7e5c11b6bf65250a03f3d22455a81c39480d6a" + }, + "encrypted": {} + }, + { + "address": "lskwdkhf2ew9ov65v7srpq2mdq48rmrgp492z3pkn", + "keyPath": "m/44'/134'/46'", + "publicKey": "9cdd0974356c09da1f6234c8f7e3ad8a08ba0e2828cbac81dddfc3f36d54ef11", + "privateKey": "510675c85299b7a430cabfab2b73a3103639c832b40cd42fa3fe6094c54353759cdd0974356c09da1f6234c8f7e3ad8a08ba0e2828cbac81dddfc3f36d54ef11", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/46'", + "generatorKey": "cc83f488c03e58d083927601658d234ffd12b5cb6fe3151206f699d031dc4161", + "generatorPrivateKey": "19d6c31d57d04b6861a868f0032d2e3f2788a06be4ee3642def28bbe1f3f3404cc83f488c03e58d083927601658d234ffd12b5cb6fe3151206f699d031dc4161", + "blsKeyPath": "m/12381/134/0/46", + "blsKey": "8c5b12f5b7aeafb07e14c5264e7f7ecf46b3ba0e6f12619e19271a733e06e913044ea2e5c955eef3567fcc2d842bc24a", + "blsProofOfPossession": "82237a5371179107af8c53ef19bf3e0d055b70ddb689763e0a8ac6d82884d12c2155166af4aa92b66fa64b6a6d2bbe7602a118d597345dc100bd6983f072b9d8da7bd0699b0f3cb51f1ec5a9f2e2feb76030125272325e7f5885399f1d26c5ac", + "blsPrivateKey": "379e94dcd6dad43376c0a0b2a4461fbcfe0bf25d99082a6000b8a52da62648c7" + }, + "encrypted": {} + }, + { + "address": "lsk67y3t2sqd7kka2agtcdm68oqvmvyw94nrjqz7f", + "keyPath": "m/44'/134'/47'", + "publicKey": "6200bdd255930cb10bbd1421d1a849298f1dc5e5dd8e8d00167bfa461745ed81", + "privateKey": "3df9184e5f715bf11494a223865c143376080ebaecd91dc8df2657e5593e52126200bdd255930cb10bbd1421d1a849298f1dc5e5dd8e8d00167bfa461745ed81", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/47'", + "generatorKey": "74f7ff53b55eda8fe9c11d66f7533c27714b121a5918a66c19b309e1c93dc3ed", + "generatorPrivateKey": "38ad961657b3d0e09b61e908362616bef7c86d2ea3b00b1f2f5b325d851ed35374f7ff53b55eda8fe9c11d66f7533c27714b121a5918a66c19b309e1c93dc3ed", + "blsKeyPath": "m/12381/134/0/47", + "blsKey": "a6d6aa277ab636486b7d879e90c541b4952264e18b8a214f58d32226fcc774a8e5bdac69223902424110cbda4ab58907", + "blsProofOfPossession": "a5b91b5e3881a36ea1b209f1cc09ab447e365b111e7529a88981e4e44c4a05eaee0507ff80460453e23187116510dc770d517e16aafc1de2aae2393ddd2e26cbe6fd096b65ba48cb6dacd0862d6c39b394117a596c0a1c9bae8d9b538d6e6dfa", + "blsPrivateKey": "0784ce0bba95107e6d4b8372f850e42ed3ea5f2a4cbc8931349bb6509e1e69f1" + }, + "encrypted": {} + }, + { + "address": "lskowvmbgn4oye4hae3keyjuzta4t499zqkjqydfd", + "keyPath": "m/44'/134'/48'", + "publicKey": "72d227ab88f971ed5da047f0a037ef302b8bb8dd3243f19bcf7f366484262a6f", + "privateKey": "b29ada34b8eea59af00ac9816ffbec398c2654ff21a7d95fc833d180b462ab9c72d227ab88f971ed5da047f0a037ef302b8bb8dd3243f19bcf7f366484262a6f", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/48'", + "generatorKey": "f926fbec6d2e461af7c58d87754524abd26ab1f617d73348ba1318d371f7cac0", + "generatorPrivateKey": "f99b68a87d6a0fbedee01e277f2c9ac0381868fd48b3dbe91687cb2ae0b3f45ef926fbec6d2e461af7c58d87754524abd26ab1f617d73348ba1318d371f7cac0", + "blsKeyPath": "m/12381/134/0/48", + "blsKey": "ac304b4ad4fdac88bf975496edc43af0e324120984d5a12ac073b3e3e80c593470b6aa4f10b9897451bd6ee6f569a2af", + "blsProofOfPossession": "b08e154f3db163391dcbef182a63ad51d56521951307b9bcc60f12c83babeb5eef80b6d8503848acf9bc864adaa82bd610e3145dd77debdfcaa8e1e15f13e6da1d5bcfca4234b46208900c6ce35d0147534a7abc728504d731f286edc31a3ae3", + "blsPrivateKey": "5fba886b2e721c7d3165f301c3f6d3722e140f36b2e3b45a53999486bcef94bd" + }, + "encrypted": {} + }, + { + "address": "lskz89nmk8tuwt93yzqm6wu2jxjdaftr9d5detn8v", + "keyPath": "m/44'/134'/49'", + "publicKey": "8baada3c82ea9bf2dc8113c02b90ae5c461eec9329322bf0ed6cbeee104c1583", + "privateKey": "2a3ead5a95ca66f56dc6e4a0f65ee3ee56417b2b1535a93a5c05d2f3471d8a078baada3c82ea9bf2dc8113c02b90ae5c461eec9329322bf0ed6cbeee104c1583", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/49'", + "generatorKey": "761b647f4cb146f168e41658d1dfe0e9c01e5d64b15e5c033d230210f7e0aaa8", + "generatorPrivateKey": "2f672b0ced7c82df2ac79fece05ec6d580b41a4dce590cca6ce68670e6485993761b647f4cb146f168e41658d1dfe0e9c01e5d64b15e5c033d230210f7e0aaa8", + "blsKeyPath": "m/12381/134/0/49", + "blsKey": "b61f2da61bf5837450dcbc3bca0d6cc4fe2ba97f0325e5ee63f879e28aa9ea4dd9979f583e30236fb519a84a9cb27975", + "blsProofOfPossession": "807bca29a9eea5717c1802aebff8c29ad3f198a369081999512d31c887d8beba1a591d80a87b1122a5d9501b737188f805f3ef9a77acd051576805981cd0c5ba6e9761b5065f4d48f0e579982b45a1e35b3c282d27bb6e04262005835107a16b", + "blsPrivateKey": "69e9d76531c5655493d7711602556385a3f5bbfbb6bbcb7beaef2c9609f561cd" + }, + "encrypted": {} + }, + { + "address": "lsksy7x68enrmjxjb8copn5m8csys6rjejx56pjqt", + "keyPath": "m/44'/134'/50'", + "publicKey": "2dba645a063a638489186b825e0c9a9f03628b13e64ad79e9d813b8f6351a308", + "privateKey": "617f7f85f1969c785830105cf75d510d1f1ecf777d5a81468b019da740adb2f52dba645a063a638489186b825e0c9a9f03628b13e64ad79e9d813b8f6351a308", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/50'", + "generatorKey": "7fb2d69906c5076fa314a4e817ce424bbd4a7a21305cec93a12d31a1589dc90c", + "generatorPrivateKey": "324425049aeff2f1b885fc968c247931703e70b8836b789e3c3b05521e12f6ee7fb2d69906c5076fa314a4e817ce424bbd4a7a21305cec93a12d31a1589dc90c", + "blsKeyPath": "m/12381/134/0/50", + "blsKey": "8a08bdac4af80e0d37ce01094440a82a7e5ac9ec893f9a7870d26a4ec52db8932f36384bc7c3d3e03232ddb7bcd1eef5", + "blsProofOfPossession": "b999cf63290a85f96f0f78326c0eb24c3acce4c2307e1a2f1d621cc75f621ccab510e42aade9b6347e95661475230fbb059cd9e4e22ae17ac73dee58a370159bc6b525ab579de9502b761010e97f6d00f60ddfed05e76a5df3dfe33866c1ebe5", + "blsPrivateKey": "5eb911d435b193fac588ef12f503da2151ae4d0999a2c716a74b5596f56ef66a" + }, + "encrypted": {} + }, + { + "address": "lsktas5pgp3tofv4ke4f2kayw9uyrqpnbf55bw5hm", + "keyPath": "m/44'/134'/51'", + "publicKey": "a0e9cf9d02e72d6ca04e26605d6b271ab8cf0e1ce0f8a3381d7cea5d33774176", + "privateKey": "e794dc66cfffe91f218982d55dd702f1aaec240f660abcc3a46fede53afc26cca0e9cf9d02e72d6ca04e26605d6b271ab8cf0e1ce0f8a3381d7cea5d33774176", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/51'", + "generatorKey": "8307181cf9d1f621261e8a97a5b3b77d64a9a1f589a2c14e42b2380d9c2d6297", + "generatorPrivateKey": "2b9b806af478989e386268a7f0b60692c787c4595369ca5aeac9c69062165eb38307181cf9d1f621261e8a97a5b3b77d64a9a1f589a2c14e42b2380d9c2d6297", + "blsKeyPath": "m/12381/134/0/51", + "blsKey": "a77de9989b5fab42dca028637f401953b9e0fd6cd61dc2fb978daafdb5478ac77d67a37135c67a2178b44e5a35a1fddc", + "blsProofOfPossession": "acafd4f724cd7b9dcaf166aaf212122360f76c2faf4d146e8d0014653c0fe09f750690ea2b9ac6df96300301fb020d3b04c1b79965cc8929e18bd93190a366851033a901e05850770cb69fc28146db719f1ac232a7947ead59e8d584eb3ddb79", + "blsPrivateKey": "611ec2b3cf68944b55c1c6984e0117a257b8978b6e4db51627a92c0806ec335a" + }, + "encrypted": {} + }, + { + "address": "lskxa4895zkxjspdvu3e5eujash7okvnkkpr8xsr5", + "keyPath": "m/44'/134'/52'", + "publicKey": "1f40e49cb0fd9fde88cc854973379fe86610bec02dd2029de291080283967350", + "privateKey": "3fa4b30dfbc3fa41e7564edf87e11356b1518572ddd2b39b8ca527ffa30f15d81f40e49cb0fd9fde88cc854973379fe86610bec02dd2029de291080283967350", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/52'", + "generatorKey": "0cc6c469088fb2163262ac41787ea4a81da50d92fd510299ba66e5a2b02d5a05", + "generatorPrivateKey": "24473a6a678d3aec6ef7a75387591473d422d48af5b2db095e8417f3818b27590cc6c469088fb2163262ac41787ea4a81da50d92fd510299ba66e5a2b02d5a05", + "blsKeyPath": "m/12381/134/0/52", + "blsKey": "a5ca55e9a0ab81d48eaad2960bd3ea259527cf85fe62cc80cfd8400dbd2511725c06c3a597868dcc257bbc279e2b3e92", + "blsProofOfPossession": "a092cff10ea18ec3dcf3f6e41cd38537e00602e35107067ace7ab7c97a2ae1de531ebea7fc0c22e8dbcee1f981c439930c7cae474a996b153a66b0cb34e66c6041348aaeb4763413afffe0d947da90424065ee573b3683edbb1e51f9a278ae82", + "blsPrivateKey": "35d93ad8f5faa1e1cbe72ebb42bee49a2219c7d6e30c25742916db086464e8a0" + }, + "encrypted": {} + }, + { + "address": "lsk56hpjtt5b8w3h2qgckr57txuw95ja29rsonweo", + "keyPath": "m/44'/134'/53'", + "publicKey": "777fcc4ed76d3a3f1984421cd9be283e6f7e3d3197c8c753d200a1bcef04b0f2", + "privateKey": "c4f107bf103ff5b3f226f612fc0e80b957549051ff9d665ad8ab9fb1b5e29ffe777fcc4ed76d3a3f1984421cd9be283e6f7e3d3197c8c753d200a1bcef04b0f2", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/53'", + "generatorKey": "d19ee9537ed38f537c2e8be0fb491331575f8e4050dc4a74ccee3244714d5969", + "generatorPrivateKey": "806c6f33920afe19a27e7f677358c72417ae0a2f51766608b83e8c351015eeb4d19ee9537ed38f537c2e8be0fb491331575f8e4050dc4a74ccee3244714d5969", + "blsKeyPath": "m/12381/134/0/53", + "blsKey": "906653b7a74dc35499e0c02f10a9d092e7dae70e5376287b5533c7a52ade678784956e6bcbb67a11239bbfa977743a1f", + "blsProofOfPossession": "a5bdd92d340281c01d90224ca58a13cc429dc47ea9d2ef6226b023ff926a43ff0a50a82028e1fc20e9faa380136f5dde00a70d7170a8de3246e39b7787771e41271351dcbf4f88b6d40dac77b2e3324a371f9fc08d1fad90fe3e5cd61caae5d8", + "blsPrivateKey": "22cde771d9674061cdaf1040d121aec3e6911b1facc29a66cd869c72cce1642f" + }, + "encrypted": {} + }, + { + "address": "lsk8netwcxgkpew8g5as2bkwbfraetf8neud25ktc", + "keyPath": "m/44'/134'/54'", + "publicKey": "b85e4331ffa96a18e48980200bed9ea7abca9ed16f5902633db46d7516ab72b0", + "privateKey": "5fc331b9319c13b85921a395cadbd79709f19cda4ffbb220b6f8d9f8961dcfb2b85e4331ffa96a18e48980200bed9ea7abca9ed16f5902633db46d7516ab72b0", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/54'", + "generatorKey": "fa7af9f8623b324e6c021b7a0899d980a41dd2de86c35cab530751eaa9e55a0a", + "generatorPrivateKey": "39793207a2f6c4cd2e32c90c2a951ae37dff4b1bb392710a4ec14863ed838faffa7af9f8623b324e6c021b7a0899d980a41dd2de86c35cab530751eaa9e55a0a", + "blsKeyPath": "m/12381/134/0/54", + "blsKey": "a3aa25a2385666122df82fa74096f30560c270b1ef981ff459e25cb5819d50a2edd8c315bf17a6a1af8d88c0e9325e50", + "blsProofOfPossession": "b543e0716990a65727b51489c90495289bae983d3a4439fe68826c2175b4396d37da0ff03910b369335377de097088720b77646a3fdf196e95c54f2ca6bd414327231996bc2dba0c1dcc7a77b8be10b84a4ef8947a0e4ba22aa09a6c025521e6", + "blsPrivateKey": "16748b6923af2e11d23c14082cdec97c9259ea163e8c232760a5151795310d5b" + }, + "encrypted": {} + }, + { + "address": "lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk", + "keyPath": "m/44'/134'/55'", + "publicKey": "a716cd8c8361700c75fabd0dfb213b611ee0b819c0bd97b20432e92f614d25c8", + "privateKey": "e63e8439fde83b57cb7d9809230fb722c527914200a7aec07bf083af1ac2ba30a716cd8c8361700c75fabd0dfb213b611ee0b819c0bd97b20432e92f614d25c8", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/55'", + "generatorKey": "91fdf7f2a3eb93e493f736a4f9fce0e1df082836bf6d06e739bb3b0e1690fada", + "generatorPrivateKey": "f5f7d8320408c3e1cf03f7d0428d07abd6a21c9bead4255f2d7d9c52eed08d9691fdf7f2a3eb93e493f736a4f9fce0e1df082836bf6d06e739bb3b0e1690fada", + "blsKeyPath": "m/12381/134/0/55", + "blsKey": "a84b3fc0a53fcb07c6057442cf11b37ef0a3d3216fc8e245f9cbf43c13193515f0de3ab9ef4f6b0e04ecdb4df212d96a", + "blsProofOfPossession": "b3de21449917e17d5eadb5211c192ee23e7df8becad8488c521dcfb0c67df64a81561653d92805b4bebae9e5b5bdef8717f1259eaeb55bd1e7eafad3d74efe20181b4ac84bb7582b637e605fe78f10eb03b2a4acbff49809e86d89aebc6076b9", + "blsPrivateKey": "3509a406fafebe2fc14186370e6bf54bc957246902b4405efba31a381220c11f" + }, + "encrypted": {} + }, + { + "address": "lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6", + "keyPath": "m/44'/134'/56'", + "publicKey": "3f24a6c7a72e7158f3440d269f0e6e8c634f4afb4c7fdf0fd3645411b9996784", + "privateKey": "c466fff076de166acde289385af11ce2150090bf73edaa6e6ab0981365d550a43f24a6c7a72e7158f3440d269f0e6e8c634f4afb4c7fdf0fd3645411b9996784", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/56'", + "generatorKey": "b53ef930d84d3ce5b4947c2502da06bcbc0fb2c71ee96f3b3a35340516712c71", + "generatorPrivateKey": "3803f627ec148e6c38f91bfc22525d375abda4b339e92e17839f66f298526755b53ef930d84d3ce5b4947c2502da06bcbc0fb2c71ee96f3b3a35340516712c71", + "blsKeyPath": "m/12381/134/0/56", + "blsKey": "8d4151757d14b1a30f7088f0bb1505bfd94a471872d565de563dbce32f696cb77afcc026170c343d0329ad554df564f6", + "blsProofOfPossession": "90df1472d40c6d1279bc96b0639ff0b8ae8cef80a0538ef00b9fc3bf7816a541d2eb9349fb6a6f1a07d80504bdf105ac0726e6b01ef75a863cafaf5356dbc03ea1c90387f79d3adf15c8a44614d80e42e7a964df2eca83a871cd378f39513414", + "blsPrivateKey": "6c9825590e74d865175bee6b34b7ce3bc302dcb040fa8cb7880a052c0f73d257" + }, + "encrypted": {} + }, + { + "address": "lskduxr23bn9pajg8antj6fzaxc7hqpdmomoyshae", + "keyPath": "m/44'/134'/57'", + "publicKey": "aa07b8f76eb58b4c284e1a573a2c40f89019c7f37026ee07b33bc2807ce9f4da", + "privateKey": "7ae45bfd25e3a72e634374dd8aceb2c3fe303904d1685763af7021eefdcda13eaa07b8f76eb58b4c284e1a573a2c40f89019c7f37026ee07b33bc2807ce9f4da", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/57'", + "generatorKey": "9b4db295e88468a37e49445443fdc364321d620dc57afe8a5a14f07ce0717055", + "generatorPrivateKey": "1ecca92ec11addd0bce634823b07878229fc2b592a6ccc8fb5d824aa4a787bd59b4db295e88468a37e49445443fdc364321d620dc57afe8a5a14f07ce0717055", + "blsKeyPath": "m/12381/134/0/57", + "blsKey": "b067f711431b1bee09000b1c27fe39a29a5603471a6993d47bf56ece01a17fa4b00e92da90d80689ed2635e7e0f90891", + "blsProofOfPossession": "91f3d5519f94424fd59c120c05d9f2f34d8cb39e092e2a354f5a7d48e7f2e23b6a21b39a7a131954320d5dbeb0a419f10304fb857fae695c180f9dedd18ffa73082af5a6ca0c62c273915cd337570ecd8649157c8dc8836d758fe1e51f4faa3f", + "blsPrivateKey": "39df532310be25d730586eceeaa25ba14093c96facbec12a75a90bea1564dedd" + }, + "encrypted": {} + }, + { + "address": "lskzot8pzdcvjhpjwrhq3dkkbf499ok7mhwkrvsq3", + "keyPath": "m/44'/134'/58'", + "publicKey": "e23148e07a0ae9f9982a3d716821b8762fa0a50cb3cc18b6a7796aeb27e8a9b1", + "privateKey": "8f7a1af93d3ddcfb23124c9970719390847c13ece831e86924ed8cb7fa4cf7afe23148e07a0ae9f9982a3d716821b8762fa0a50cb3cc18b6a7796aeb27e8a9b1", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/58'", + "generatorKey": "73de0a02eee8076cb64f8bc0591326bdd7447d85a24d501307d98aa912ebc766", + "generatorPrivateKey": "9da05ad478e3b6cdda6143d579e8d4514085306b9874249ffce5cb49bd854d9d73de0a02eee8076cb64f8bc0591326bdd7447d85a24d501307d98aa912ebc766", + "blsKeyPath": "m/12381/134/0/58", + "blsKey": "8c4167537d75e68a60e3cd208b63cfae1ffe5c13315e10a6100fcbd34ede8e38f705391c186f32f8a93df5ff3913d45f", + "blsProofOfPossession": "929e7eb36a9a379fd5cbcce326e166f897e5dfd036a5127ecaea4f5973566e24031a3aebaf131265764d642e9d435c3d0a5fb8d27b8c65e97960667b5b42f63ac34f42482afe60843eb174bd75e2eaac560bfa1935656688d013bb8087071610", + "blsPrivateKey": "5eee5d9f688bbd779526348dc125c2d325a3e861f836fb9c0f96d2661fd0b8a0" + }, + "encrypted": {} + }, + { + "address": "lsk2xxvfxaqpm42wr9reokucegh3quypqg9w9aqfo", + "keyPath": "m/44'/134'/59'", + "publicKey": "06353d9f52953ceef0138ef8b74b5cfd180adb80c88ea2e389d7f35d38b5ce61", + "privateKey": "bd3629194f166f3f80a2f3f75d144ad52da1952b6e6244382cbb2b3638546ba606353d9f52953ceef0138ef8b74b5cfd180adb80c88ea2e389d7f35d38b5ce61", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/59'", + "generatorKey": "621d52ac19aba86c4feef94c67ae62cfa3f6ac192177ae37be2e6b3205449c0a", + "generatorPrivateKey": "a0cd4e1e5a506682fe0471cc6c28ad979ff8a99872236a02d552c9b036c361ec621d52ac19aba86c4feef94c67ae62cfa3f6ac192177ae37be2e6b3205449c0a", + "blsKeyPath": "m/12381/134/0/59", + "blsKey": "81f7700c2115434acaf61e88b836be11986476751d6c02617d1087e7bb45798ac56929cb5f71c890c6159ff4d71cd1b3", + "blsProofOfPossession": "8bc04a899be3a7ac99e2ddda6567a0b01e21aaea8daf4848821e8233cbe80610a2f670922865f424e878add1de8c978e1913f95308a50693fbc88e991e6bcac3bfef8a1d03f89bb4dfd9c991cbf1c613f85203dfacc4376057f085967f2a7283", + "blsPrivateKey": "08550cb1c6fafbef49a1e66cfb10d1db62eeb66402376cef0875ea0a528e50ad" + }, + "encrypted": {} + }, + { + "address": "lskfowbrr5mdkenm2fcg2hhu76q3vhs74k692vv28", + "keyPath": "m/44'/134'/60'", + "publicKey": "797138977ed2153364f00bd497162c957506ca8fe023bc25ed8cdcfdf8392b29", + "privateKey": "c68c41607847bdacb39a919de4d1e00ab8daf35ae0b9a7b4f9a3d6a4e7486330797138977ed2153364f00bd497162c957506ca8fe023bc25ed8cdcfdf8392b29", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/60'", + "generatorKey": "5812017e0d25131165ebc256f39ccece115fb58ad5fe0766f78054f912832d6c", + "generatorPrivateKey": "1433e065e36926ca8f4e74f66997fb917efab9855d7e49a4fa085e8d0c3dc24b5812017e0d25131165ebc256f39ccece115fb58ad5fe0766f78054f912832d6c", + "blsKeyPath": "m/12381/134/0/60", + "blsKey": "b57835b4d3285a134730de7b29361998787c2b4853e7a5e15032b516335e81c0797a51d00e032585efa05c27d2345a1d", + "blsProofOfPossession": "8d9b7510b3332a22635815b809c3e1ef96427a20f15b3f41112af74a9aa1a401d83d625dc5081f51aefee7591d52afaf1451e78e4f3efe29ec171b8239af73fd87b2e8a1aaa8b701c3e5bcb0d609f098738d29e0af57ea010953297c9c9e19d9", + "blsPrivateKey": "3731e7bfbaa3ffeb747497395b0a9354bf9677bdb503941fe3ec362ff69aaca5" + }, + "encrypted": {} + }, + { + "address": "lska4qegdqzmsndn5hdn5jngy6nnt9qxjekkkd5jz", + "keyPath": "m/44'/134'/61'", + "publicKey": "a8b8d44f041f77679c1a6566459642204ea60f44a4a9fa6bb874b022b5129d4a", + "privateKey": "841d84cae4cb700430490a5ecb153fe968b15739d286573bb6c5ce8ccd183555a8b8d44f041f77679c1a6566459642204ea60f44a4a9fa6bb874b022b5129d4a", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/61'", + "generatorKey": "965e86fdfcdcd64879efe23705506faeb4dfc4244f93d47f4bf444966d2a0f3d", + "generatorPrivateKey": "0dba4efd2e90744941a1733afa5d7316d9a0f2ee57b396c094fbc6f7e105242f965e86fdfcdcd64879efe23705506faeb4dfc4244f93d47f4bf444966d2a0f3d", + "blsKeyPath": "m/12381/134/0/61", + "blsKey": "90f87fd2122689c54bcd8fb859c5b36d4b583272043deba66199ad181ca2c38cf48d453c46ec881e03d2b7e2e63e3684", + "blsProofOfPossession": "add6eb668bebf90fdd80b01cb83a31b02577b200c85845bd5260d7851c02d21aaaf6d040e6d6f27a8690c9598f92ba240cdbb6d7896d7a777c484d30ab48d71b1aee1b07083dc5d11a94416c4cf85e33ec3899b40e6222ac888104f80b8d96c5", + "blsPrivateKey": "2d7d6cbdceed7b7b2dffd74c276ebf255f5df7d5e4952134da5d34d0feeb01cd" + }, + "encrypted": {} + }, + { + "address": "lska6rtf7ndbgbx7d8puaaf3heqsqnudkdhvoabdm", + "keyPath": "m/44'/134'/62'", + "publicKey": "d6b2f2bb26d71390e2df1df211bd36fa91fa437871923d007f3aa747e3bc9dbb", + "privateKey": "66eac1338aabb25c5d66bd58763c56dd439f255e8567ecde038a5e35bf3459c3d6b2f2bb26d71390e2df1df211bd36fa91fa437871923d007f3aa747e3bc9dbb", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/62'", + "generatorKey": "f8252b40a65be6f5f6d0be446da5ab434bdc0a921fd0956b0672ea4a218d2d7a", + "generatorPrivateKey": "b7ddf78c537e6a808236f5361496cb44be3ca2cba0f2c7e0a20bb068748e8578f8252b40a65be6f5f6d0be446da5ab434bdc0a921fd0956b0672ea4a218d2d7a", + "blsKeyPath": "m/12381/134/0/62", + "blsKey": "a94d3cbfde92550eccede718499df12f33a8ec9a4b386e4ca423161d667862f45fb06397b12dc6a6cbafc14b1cfad26b", + "blsProofOfPossession": "a474ee16d276d3478e1b7005960d41c0e271652f29c3178230b7fdf395801dd62196294b7695b3ccad63887558e0f27d0b121738a42cfe9acab07e6763577ad87eccb5b1d0cd725cb4a32225e79e864c238ce3c56b6db8960ce9fda82828d5ba", + "blsPrivateKey": "0d1e5bc7255af552aa839931ec5cdf194a0296bd070c4d181ff43467f4beeaa6" + }, + "encrypted": {} + }, + { + "address": "lskrga27zfbamdcntpbxxt7sezvmubyxv9vnw2upk", + "keyPath": "m/44'/134'/63'", + "publicKey": "09b005266e78ac5cfc18a3d304403cf141842bf58c50dd754f2a20b0a18331a3", + "privateKey": "e9a9bccf06cd7dda82c50bc34b2156c4d0834749c6769d3363c0009ade5dd86109b005266e78ac5cfc18a3d304403cf141842bf58c50dd754f2a20b0a18331a3", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/63'", + "generatorKey": "d2b31ed942359b0c9cb696cae874a2dbdd6e24915dd8a5882c7c042eac1e6831", + "generatorPrivateKey": "656a2e7db1f694fc6872fd1bfe2318503bcfd3dbd841a0de9170ef5da80ebfddd2b31ed942359b0c9cb696cae874a2dbdd6e24915dd8a5882c7c042eac1e6831", + "blsKeyPath": "m/12381/134/0/63", + "blsKey": "997583cd4f633aa5aa5e616a75d9edc370d5e6eb77e2418c13648b435b0182cdb7787c7ca91ed3939b403fe59041890b", + "blsProofOfPossession": "95324d44556e3c61bd307a40c2ef7f3d988e0ea561e5ece2d2809cf078db232caea9df8b35d8411238fddfe83a6978a70ae88e29fa5b6322b73f7fc9756daf52aa6369e5e69c5b2304871bd324e8125a698e360e3d5f1ad20136370b8d9808ea", + "blsPrivateKey": "24325a46b06e684f9cfb351a4f5a5a62a419754e1a77b8ca39b6814c20655c27" + }, + "encrypted": {} + }, + { + "address": "lskw95u4yqs35jpeourx4jsgdur2br7b9nq88b4g2", + "keyPath": "m/44'/134'/64'", + "publicKey": "a6279f18be02a54af37dc4228fc731e63219a289c1cfb1607b18adf685976f9c", + "privateKey": "384a6c7cc4f39a566ba8e016508824bd5f39d25b2bfdad5c66377e521edbb92ba6279f18be02a54af37dc4228fc731e63219a289c1cfb1607b18adf685976f9c", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/64'", + "generatorKey": "e2f80871a5220be51352427077f6e93c2294d88be6b731b535d2ce9371274e7b", + "generatorPrivateKey": "eb6e2fd2214a11149332ff01b5b823c96f8e85ddb2342b7a1c03a974111791aee2f80871a5220be51352427077f6e93c2294d88be6b731b535d2ce9371274e7b", + "blsKeyPath": "m/12381/134/0/64", + "blsKey": "a58edccfbcbc35d6f9fec1535329a114cc5a2118945098c0f201345ab7de78d36a32014dbe701faf7d32b24f7a696d9e", + "blsProofOfPossession": "999cf3232240944ff9a14e6c4680fae450be8c0ed43fdbf8f92e7873b5482f88229768fdcfd86e22767ec1df3b5fa2fc0b08202ee4a343bfb19c8c8eabf74d44fa73c4517ad0a102faf4ae6fe87cd766d860408b51d31dadcc5674c92908c7ee", + "blsPrivateKey": "6f6ab0c40cc4959ffa99e9a202496527eecaf86d489943abb7b24828b1c7ea8a" + }, + "encrypted": {} + }, + { + "address": "lskk2vnyd5dq3ekexog6us6zcze9r64wk456zvj9a", + "keyPath": "m/44'/134'/65'", + "publicKey": "e550523682ba9bb8d8856cbf4870fa86402a4b21a3205dc1296de556354c9586", + "privateKey": "d30db1751e16265341b23a8f9e66dd31628916b4123cb52057180f148f18e6c0e550523682ba9bb8d8856cbf4870fa86402a4b21a3205dc1296de556354c9586", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/65'", + "generatorKey": "7ff8b45c5f6239306af0194ee41e047669e33338be3f8e6c786d90fb905c8b6a", + "generatorPrivateKey": "befa9db63277650972e0ba0427c4e5c912d7376c3e9ce8924a3397678c0c77037ff8b45c5f6239306af0194ee41e047669e33338be3f8e6c786d90fb905c8b6a", + "blsKeyPath": "m/12381/134/0/65", + "blsKey": "8739c54fb8452db4ff1857649a4144dae29f7bbd3275aaa8f0f2559095a09510e38bb0155bd01d01349e7f1392132e41", + "blsProofOfPossession": "b78a813e912849e2583d6e774740f2bef3115f1d23576d206ba15bf0c64404b48208e7b2b5becfe2386fc1ad686094251707a7bf8902a10b8ffd207394ad26b64f7a0c5bb7bfc737fd836b160bf16c4d14dcc343dbc8ff7993391795ded7e448", + "blsPrivateKey": "03fb0362a91d49d5325eb3cf24970da76d434a1585108ccf49baa283651d361c" + }, + "encrypted": {} + }, + { + "address": "lskk8yh4h2rkp3yegr5xuea62qbos6q8xd6h3wys2", + "keyPath": "m/44'/134'/66'", + "publicKey": "b265367283f1d3955366d56c9055da26fb2df23bf81022a0998dad49bebf3e42", + "privateKey": "7ad7a0c9f37312088626a5367c1d03ed941f0b476cfeaedb47613730d7295149b265367283f1d3955366d56c9055da26fb2df23bf81022a0998dad49bebf3e42", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/66'", + "generatorKey": "db1c7c22ee495ad3553394dca00c62b85e78b58e78ca68bfe5027b3346f6c854", + "generatorPrivateKey": "893644ce73b8651f23cd00c7e012ab6d7447d8c4ddd609619442ef10c9948417db1c7c22ee495ad3553394dca00c62b85e78b58e78ca68bfe5027b3346f6c854", + "blsKeyPath": "m/12381/134/0/66", + "blsKey": "95087210c7145581fd8dc397ed12ecc2eb703eaa19dd837d7c8c54cf625ba00bf88608aa89170d703c77f7dcf6707398", + "blsProofOfPossession": "b09816fd6ec0b666e1f61bde72069057a11fc78d7fe8b85873b6d909aee15d74c637076e149ff279c587efa4e6a468900e2c4a857bc55978ea292189737f95e7026514ec5e9a117f31b8339d8becf3af1bd2555df6d8f2372b54b7381ff355ed", + "blsPrivateKey": "71b1abe986e2287ad69c55edb0f9c80336c5220cb31e2ed6c728a58a925d81ac" + }, + "encrypted": {} + }, + { + "address": "lskk33a2z28ak9yy6eunbmodnynoehtyra5o4jzkn", + "keyPath": "m/44'/134'/67'", + "publicKey": "6b0d646e18db8b55ac1a6f49a05f17cdb4880cf99fed2415f3076d6022d70112", + "privateKey": "69c69d0e1906a079416cd965b32aea01de7fca2cf838336d596ecf005c4b83e26b0d646e18db8b55ac1a6f49a05f17cdb4880cf99fed2415f3076d6022d70112", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/67'", + "generatorKey": "689639f5e3808cc0efd5f8d48ca6ee6f9a7a1bd5f5776832cc9b448cff5d0aa9", + "generatorPrivateKey": "0e064b38b2c1d1f3db99a14bf07a3c48138f0e3bed3fea0d0aaa4377535985f4689639f5e3808cc0efd5f8d48ca6ee6f9a7a1bd5f5776832cc9b448cff5d0aa9", + "blsKeyPath": "m/12381/134/0/67", + "blsKey": "a1dff3e7486e27eb2bc99d4343b57e06fb8b52f8c7b6ec6d539889afcf0c221fbadcfca65f2ad7351beb8a51e67513fd", + "blsProofOfPossession": "b6447c9e317179a9160ea0c11c2ff49c11e0300332c2c0ec0bf81e936af231ffc3b6628da3e01eda821ff15e9a523f3204b32fd4fcce988c2b73b56609709dfd25ec9df9e33dee073f9d26a82d268569d117ecbf7985e012a975fa7d3ad5e4fd", + "blsPrivateKey": "4ba51a2b3505cbde5211c1a46608e6cd4eccfc9f5d53e473927d9dc34e1ae5e1" + }, + "encrypted": {} + }, + { + "address": "lskrxweey4ak83ek36go6okoxr6bxrepdv3y52k3y", + "keyPath": "m/44'/134'/68'", + "publicKey": "4a96ff97a29898a3bae678346f38d1ed6ab7ae22db602d28e8de6c7b15f91c86", + "privateKey": "c9da3d67a88c09783e5f4aa5a0f15063dc11690e83dcae2d1ab838efd6b739dd4a96ff97a29898a3bae678346f38d1ed6ab7ae22db602d28e8de6c7b15f91c86", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/68'", + "generatorKey": "21120ef22b7df438e06b3862d3f0ab99d5704b3c61c45a544c64c908da8955ad", + "generatorPrivateKey": "1576f20a78dcd0be1a7ad4d6ad85f762b255c662f976cf3ae00486ac28664a0621120ef22b7df438e06b3862d3f0ab99d5704b3c61c45a544c64c908da8955ad", + "blsKeyPath": "m/12381/134/0/68", + "blsKey": "8422c22feba709265c30a7b86a9ee9832d6b32fa4c9dc091c390e1b15e278f9009dc5d70868a56dace1ff622e9e634d7", + "blsProofOfPossession": "871ed33b68172b0ce40a3ec98d6fa9b3fd77245c2c1cb7f1071101cb459d53b05fc0168597148f976ceb1ded71999da8094fd8783cf27d1e21f9b965164573c0ca849210bd1e99f4706ca6f43636f9ea535c333a36c4267a598dc58c7c7fc108", + "blsPrivateKey": "177461dd8db1a3800214ac50efeaf2c8a1ff0c6e14fda158219c795909aef58e" + }, + "encrypted": {} + }, + { + "address": "lsku4ftwo3dvgygbnn58octduj6458h5eep2aea6e", + "keyPath": "m/44'/134'/69'", + "publicKey": "0a9f66755890c7a3b305985e5a061726ef98e0b362228a3df8d478e6c1182d58", + "privateKey": "73dd75b1544474d94bf51584c5f9604b4a44408df83930720c2e030aafc56bb30a9f66755890c7a3b305985e5a061726ef98e0b362228a3df8d478e6c1182d58", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/69'", + "generatorKey": "894289ef63ad9f51868d06e700c5dc9cac7af2e6601a99449134926cfdbb4340", + "generatorPrivateKey": "75a5ae8b87cd93c5e27d59898421a59d20e11489e036d8c813a70f39f74641b9894289ef63ad9f51868d06e700c5dc9cac7af2e6601a99449134926cfdbb4340", + "blsKeyPath": "m/12381/134/0/69", + "blsKey": "b9dc37e370cdbab50fe906b675551194e80705f5549ec07f32b95b85ec1ee1b149d156e649ebe1eac57bcc2ce9db3e56", + "blsProofOfPossession": "abefcbf20c53c10ac15054527c2ca691994f0b5cf60444aef49ba4e39312774eaa073be6b887ca5792bbfd53adc7ec3d0b0f6b34ec8a8f2fb6708d5a9d3de242f5fcccc3c3cddcfc5eb8be5aa13c333d114c091f594736e7a43d7d9212d0063d", + "blsPrivateKey": "52943b813516a5a2c72e8d7c68ee11c8d4b0e52be6ded1e18bcfaae70fc558aa" + }, + "encrypted": {} + }, + { + "address": "lskvcgy7ccuokarwqde8m8ztrur92cob6ju5quy4n", + "keyPath": "m/44'/134'/70'", + "publicKey": "26e064c253e23911282d58b71d68e507b28e4c62f50db256b1babf649a65d62e", + "privateKey": "3a5f45a46b59f9017a60bde8f4c35cd6fe98fddb15ea40b149fbc15c29aee69b26e064c253e23911282d58b71d68e507b28e4c62f50db256b1babf649a65d62e", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/70'", + "generatorKey": "83cca7ee3c7145d8022b54fab14505f6f65ed9ac933e3591de4a45d4f2298adb", + "generatorPrivateKey": "2f96617872a88de29161446d351382da43989ef67375ac840f434ad14b2b0ba783cca7ee3c7145d8022b54fab14505f6f65ed9ac933e3591de4a45d4f2298adb", + "blsKeyPath": "m/12381/134/0/70", + "blsKey": "87cf21c4649e7f2d83aa0dd0435f73f157cbbaf32352997c5ebc7004ff3f8d72f880048c824cb98493a7ad09f4f561aa", + "blsProofOfPossession": "92d1948d5d8faec69c6a389548900952014f5803f0eedc480e291bfd8fe6f31231e43fd4bd47817bdbca96e5104b92d2097df4362b94a583a1a24bbdd0382a681b5603d6b3bbfca854d5beccd45c2ebec24623666032f30fb3858b236bfcbd14", + "blsPrivateKey": "70d4a30e49639fd5e56b98f5c3aab01f775cbd7749b3543813aa5f9398ab4759" + }, + "encrypted": {} + }, + { + "address": "lskmwac26bhz5s5wo7h79dpyucckxku8jw5descbg", + "keyPath": "m/44'/134'/71'", + "publicKey": "5deaa3bebf3bb6ef06028679c43874bde94079c5fe90218926feb874236f7838", + "privateKey": "c57064b98f00dbae3e434af2055c8d60b55614e22b5dde66046b84d1ef0541b25deaa3bebf3bb6ef06028679c43874bde94079c5fe90218926feb874236f7838", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/71'", + "generatorKey": "a7340ac2220b35dd5c97e6ea45c48cfdfcaccc4c59abf9b7f316df8a1bd7e8b2", + "generatorPrivateKey": "0aac0c1c562feedc175e66b41f9cf4f874525f87a64063ff8cd3aa0b5039ead5a7340ac2220b35dd5c97e6ea45c48cfdfcaccc4c59abf9b7f316df8a1bd7e8b2", + "blsKeyPath": "m/12381/134/0/71", + "blsKey": "adeefe5ec24b210986ae56ac2d1eea5b5447e38d7c9657d4948ee2d9b312a247ba40964a58c3fc14e5fd7137602e631c", + "blsProofOfPossession": "8ffe03e68c8b3ec929a4934d61091ac1c8f42446076a7ef6e8141082ebf71fd3153c35c1745619a08defb0ca8fbe583a15190f88dbd93d22d3c4eaf3fd60fa2d9cdcd8824bdd289111ca7d537563b0e2fa7ad06cad40bc2ce17277a63a3138b2", + "blsPrivateKey": "3e6edc54aa3da90b6bb09e0ef243a6c8088050cb44d575eada89d8dcd11a05fb" + }, + "encrypted": {} + }, + { + "address": "lskqg9k3joyv9ouhjfysscame66hovq42yeev7ug7", + "keyPath": "m/44'/134'/72'", + "publicKey": "ba444a11029a29eea3046cc2bc6ed4cbdda38a80894ef6d0ad71af78f8fa9161", + "privateKey": "ebbb0c49f9f82b67003a96d9a53d295b0b4d4f69f4fbfdc3b777f2aaf68b621bba444a11029a29eea3046cc2bc6ed4cbdda38a80894ef6d0ad71af78f8fa9161", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/72'", + "generatorKey": "a9568912797914f590413c3156c9cff93c9c14193b01e7bf248195bbe8c1af19", + "generatorPrivateKey": "c128a9bd6b5e8e2edecaf7a82a03e7fc5097196cd8272b962572573285d40a21a9568912797914f590413c3156c9cff93c9c14193b01e7bf248195bbe8c1af19", + "blsKeyPath": "m/12381/134/0/72", + "blsKey": "86f828da4b3c129eb54d95bef7975281b30dd811f252b5792998718355c599aeca3dbb222678ee0af84b13f5af2400b3", + "blsProofOfPossession": "8e062f48ead9234b710dbcfebbb2e502ddff68e3d5be19a8e7e89b2141c76caeeae233999009f24f7b6e65f3774ef6cd09de9d5c0bb59a60ff6cb31b276f0172e35f89061f3c2d700543de5cf4d6e613ff6ba7d41c1379d6baefd844ef4cb517", + "blsPrivateKey": "545273aa4f588f3368a39d10f36f2b76d191c93ee01c35f348cb1357ce43e09a" + }, + "encrypted": {} + }, + { + "address": "lsknax33n2ohy872rdkfp4ud7nsv8eamwt6utw5nb", + "keyPath": "m/44'/134'/73'", + "publicKey": "7c3a54ed0b6a766af4069f53299fc2979eda629553c57d973a3e4aedb76a88cc", + "privateKey": "4c28bf9d8deb396a9db0ed5d08dda0e9cd9fddc08274a8d5c2ba357ae80e92337c3a54ed0b6a766af4069f53299fc2979eda629553c57d973a3e4aedb76a88cc", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/73'", + "generatorKey": "473d332bb27f1dab55191233884f37aaf17545b1883554b1457b2dfac7c02b0a", + "generatorPrivateKey": "ee95f0d24719c537c4a7c804dd8321a812499d97de85773a4cb7a38cff78ea54473d332bb27f1dab55191233884f37aaf17545b1883554b1457b2dfac7c02b0a", + "blsKeyPath": "m/12381/134/0/73", + "blsKey": "b29e90de05487e087cb37f34213ccc49edef8936aa15001686f947dd26b2e4c71b0c094c633067c75d3d0879c0347a45", + "blsProofOfPossession": "9866cd99328ae5d1a14f899b95782b828b404c941853f4d0f0f56a113867f9f44b177af5c6eddec16b42c405967e52c90e3c2b0acf4921fd7ad27bdca498980aec0d37923e95d56555190caed7644ac158b392af052a49a8d1df626ea3a5f034", + "blsPrivateKey": "5db5e9de794a02c507674c7092e742c70db374920078d08a77b156202acbf926" + }, + "encrypted": {} + }, + { + "address": "lskhbcq7mps5hhea5736qaggyupdsmgdj8ufzdojp", + "keyPath": "m/44'/134'/74'", + "publicKey": "e2fae8f54453c97775dd80a117fdb786852b52081d4a3f2ab1c58935a678e32f", + "privateKey": "1715d190aea38e22522d2ca170513fdb724e7b2f20799877bac79265e6775b0be2fae8f54453c97775dd80a117fdb786852b52081d4a3f2ab1c58935a678e32f", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/74'", + "generatorKey": "29e5cf287cb9c12b2bb77ef9dc673728132f9e3affef2d0de0d7db7905937435", + "generatorPrivateKey": "1b6fbfe2da1efefdd35891902ef7963aa4ac8c918a7e2d44a253f96c541b74e029e5cf287cb9c12b2bb77ef9dc673728132f9e3affef2d0de0d7db7905937435", + "blsKeyPath": "m/12381/134/0/74", + "blsKey": "ab0bf8a74c846dbd47c9e679ba26a9c0e5a7a5902b4f66cee7065b7487eba30262e4e5f0ee78d616d007021df3fbc945", + "blsProofOfPossession": "b159e28ea39b1119e4018ea19777497e1d3c4a58d1c2ecc22aa5b2efe60572cb32ff30bbeda9ce28b235fb55ab15aec206f094f37ff9a78a0931d55799c1c74a19bacfa8a4172ba078d7cad4f663a4708e47981044b1893c712c3707196451fb", + "blsPrivateKey": "158e26816907da1dbfb1a7d6c4d10c38c73bc4365883dac8fdcb5b58eb4f0eb7" + }, + "encrypted": {} + }, + { + "address": "lskbr5cnd8rjeaot7gtfo79fsywx4nb68b29xeqrh", + "keyPath": "m/44'/134'/75'", + "publicKey": "7a0cdc2106afb1bdb3cecd23175287bbcfc97225e1a775a687f97a342e9a62a4", + "privateKey": "f5b4d9ca72fc037e4f6bc8abcb454a6b336bd9269011432c3d7726e095d687b37a0cdc2106afb1bdb3cecd23175287bbcfc97225e1a775a687f97a342e9a62a4", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/75'", + "generatorKey": "552ea15981e9fa54f2b65c409e8d32c350435893744fb9937875b1ec0e3025eb", + "generatorPrivateKey": "888faa5eba1aae717ef317909f53fe87c95b0988ab079aac6fbd456ff1882f55552ea15981e9fa54f2b65c409e8d32c350435893744fb9937875b1ec0e3025eb", + "blsKeyPath": "m/12381/134/0/75", + "blsKey": "968afa71f5ba87783db371242b48962a93c91f17ec6fe2b52260c43b7db62462fc88de889445390024abbb1de1ff87ee", + "blsProofOfPossession": "b3a05e96a9fc1ba05cb80ba48e8f92e6d6d282408d77b16557dd0c8bff8bc963539d5a355cb1544e35269c4fc58f5c0816b4bc3e215d6441f06b9d2e6cd48ad5f08c5bfb35f359fe25ebcc382985bcefce0698bd3a89e655706e46e394c83693", + "blsPrivateKey": "5e5a64d90e0995efcae6083bf22d0cc3b40a9e9c14e9bbe8ebb8f0e534365ce6" + }, + "encrypted": {} + }, + { + "address": "lskq6j6w8bv4s4to8ty6rz88y2cwcx76o4wcdnsdq", + "keyPath": "m/44'/134'/76'", + "publicKey": "529ef3e0a77482bc7b22d3308833dc30a50e230f74dee3a62987ae4f9867ed5a", + "privateKey": "752ef6fd81a5f932022291c51e1fd6409e5765600582b2d3d563e952c88e116c529ef3e0a77482bc7b22d3308833dc30a50e230f74dee3a62987ae4f9867ed5a", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/76'", + "generatorKey": "6c99048cae450de8735dd410a5c8b0e4655afaebcc2c155503f890af51e067c2", + "generatorPrivateKey": "627f7390b4c6a2e4426e40e8fc35742f9c72fe14d537faacc992c5d4564805fe6c99048cae450de8735dd410a5c8b0e4655afaebcc2c155503f890af51e067c2", + "blsKeyPath": "m/12381/134/0/76", + "blsKey": "95274c1b15467d43a3b8a3a632a8fb7e1a2efbdf92559ef52ea6ff1b0ba1c7cc2f75ef357b2dc7f0130dc9c04aeaf4db", + "blsProofOfPossession": "a24ef42b04be7bcd65d8434b04f7118bf9566a0d3a36c732cf5b508ccdc12855754663bdb32c5d871eee8a0774a1331a14f25f3aeb6bddee7efaebd2214e19b7cca9f3d3bc7eed93b85b15f0a626117f24361d65688dfbe7267141f13d323d63", + "blsPrivateKey": "2746cbe68b23a69706e0cf73dfcf1ce9a8cd0bde00fcb07d5f611020747fd20a" + }, + "encrypted": {} + }, + { + "address": "lsk3oz8mycgs86jehbmpmb83n8z3ctxou47h7r9bs", + "keyPath": "m/44'/134'/77'", + "publicKey": "28c6e872795eec98a1475aad17e78f8f47baa1794a5226334f7a89ac0911be44", + "privateKey": "35b4345634c91e8ef15d6ce6d3a8038effde85dd1defd8ccc4075a313837c79e28c6e872795eec98a1475aad17e78f8f47baa1794a5226334f7a89ac0911be44", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/77'", + "generatorKey": "b9bbcd67194a7091a517faf37a7ec0fda068c4ac0dcbb8ddf526de97e67716a4", + "generatorPrivateKey": "bbdd4ce2c5eb36fd31682db37f725c02b29ef7847f5485c8798262145c607e4fb9bbcd67194a7091a517faf37a7ec0fda068c4ac0dcbb8ddf526de97e67716a4", + "blsKeyPath": "m/12381/134/0/77", + "blsKey": "8ffe1e957047e7dd979e8bcac9fcea9411ed3be947679ce26a36725b08da51ed2fa19e7f7c6bed701bf3e33a6f787b8a", + "blsProofOfPossession": "89177926eb5ed8d2be150884e0cc4eaf02a040a3ebb0af9df6922d8d7fc58da4777cc6591d3d43570ce6410077d087fe097cb30f28a164d22216859988f44ef88bc7f4a2134f882d044e4ee66d135a31cd063934cf6b4e820fcff3bbfc5b27c9", + "blsPrivateKey": "04431be991b3beb33410c5f95fd52dce7fefcac451c2dfac73562f9b439632fa" + }, + "encrypted": {} + }, + { + "address": "lsksu2u78jmmx7jgu3k8vxcmsv48x3746cts9xejf", + "keyPath": "m/44'/134'/78'", + "publicKey": "6643c7547befc7c019e96b6a3d1ff738cef395bedb5338318efdb5a07a16d259", + "privateKey": "f43e8314b17e5ce791cf07a9a4cdd21688495edba6a65e838e0641e9c974a5786643c7547befc7c019e96b6a3d1ff738cef395bedb5338318efdb5a07a16d259", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/78'", + "generatorKey": "37df5572ddb12b67b9aa5191ba9baf9d76a50307fbe188924766225d86958dbd", + "generatorPrivateKey": "5b65e4fdcce39eaeac5a4216ed37e62b793a5eb62eef2a1c28007c0db5826cfc37df5572ddb12b67b9aa5191ba9baf9d76a50307fbe188924766225d86958dbd", + "blsKeyPath": "m/12381/134/0/78", + "blsKey": "884b03c63f8d095165b67cb23131ca1053cbc73739549aa2ee21ca0b2b925994855dd46a81ebc3dedb309ceadd013f8e", + "blsProofOfPossession": "b4879cd844644b1a21f1676bf671854afb1536c5a330c1fef26b2669238efa373f70815e01028506b5cf6b75fe77e79e0efb6ef74e8111c7f1a189d4b0bf4c867190aa57e670b53dff5951a29eaaceda788ed674acdf33eff228278dc61c3cd2", + "blsPrivateKey": "0702deacefa1cedc12296f4fa5ceb618dd4f481a0f86adde2a7ae292a4da68e8" + }, + "encrypted": {} + }, + { + "address": "lskaw28kpqyffwzb8pcy47nangwwbyxjgnnvh9sfw", + "keyPath": "m/44'/134'/79'", + "publicKey": "b9fac5757bfb5f0fffb3825958f1cbfe0359d128df881ca191af00fd4243ef6c", + "privateKey": "fc34f5ee0bf978c4cd98583f6c789909bf63054535da80d388356722b63ac88fb9fac5757bfb5f0fffb3825958f1cbfe0359d128df881ca191af00fd4243ef6c", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/79'", + "generatorKey": "bfe46727c386585d8d59c02efbe48d4c1a919ff07b87267156ab96e10ac730b2", + "generatorPrivateKey": "eee04fe7d9fd8f4f6710ed5b98672707cbafd9f3a8d9f11f399230686fc5ce46bfe46727c386585d8d59c02efbe48d4c1a919ff07b87267156ab96e10ac730b2", + "blsKeyPath": "m/12381/134/0/79", + "blsKey": "b279e1a3a5edcd1045682e7029045b70dffbae55c49b14391b9f776750193269b4fd1d9f0807d9ee66e264e08ecd97cf", + "blsProofOfPossession": "83a5128e710b91ab91f7726223120b389c1f77735c9c1d408c466b7f0484b020f0d2d50edc36d49e410141d8a509b132059142e250f145810eefce03dfdda25aa84214d30cdfb6ca11a929337bf53dfe4c675117c06e4a67206119ed1e2b2b9a", + "blsPrivateKey": "6837f740126f55e5a1ecbba4d8281c171c73ae1f20e5efe54d6b6a5da2cca543" + }, + "encrypted": {} + }, + { + "address": "lsk7drqfofanzn9rf7g59a2jha5ses3rswmc26hpw", + "keyPath": "m/44'/134'/80'", + "publicKey": "1f96630d57c8ceb77d50e80931148d2fb8c66ab5d5c030f35e6fdd3bc3f0af78", + "privateKey": "8d9919a3df297df65b2f0b4565b405374b472e6d1933d790d1f0f81f841303c11f96630d57c8ceb77d50e80931148d2fb8c66ab5d5c030f35e6fdd3bc3f0af78", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/80'", + "generatorKey": "71d5b4b08ea0b7a0ff95f779aec53590a3bcb5a87fc770334f8c9ee57fdd79d9", + "generatorPrivateKey": "8c3f82e435cd1f5de4dccc93740243bb8b87e4cacb9833a8124f7016e35607b171d5b4b08ea0b7a0ff95f779aec53590a3bcb5a87fc770334f8c9ee57fdd79d9", + "blsKeyPath": "m/12381/134/0/80", + "blsKey": "a6d6315e85e8138de21f94d0c5c6f4c2515d493b17653156745155b25f9f121f6d13e7c36a57fa5002a9aa0a0b282394", + "blsProofOfPossession": "ac38044b8d84ed22d42da3a240b7c2dd16fbdf3b03655226b46b6eea46256a3ee33232771d67da1a4df6717476349647077f5cb29715333d8c55f5b6ba70c77af1944ac54c913445da29c99dd441e36d9def69c0e9709ce062ac70e4d15628a9", + "blsPrivateKey": "414e6ea6a1cdde39a74d5d4f4debed95fb523099ee5b50da5b12579bf62a7beb" + }, + "encrypted": {} + }, + { + "address": "lskayo6b7wmd3prq8fauwr52tj9ordadwrvuh5hn7", + "keyPath": "m/44'/134'/81'", + "publicKey": "af72e830f5beb4f4947f9b34574df647ccd1c2047a67f36b288b51c17a4b926d", + "privateKey": "5de29c553a012a687761d0716008b865985684796068682590d15257d258c779af72e830f5beb4f4947f9b34574df647ccd1c2047a67f36b288b51c17a4b926d", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/81'", + "generatorKey": "5ec5a5a2c91414f5cc5e3354b58671e624bc88a39fdc8f128593daa06545d6cf", + "generatorPrivateKey": "ed2c37ad4313b5b994299586dd207e22f061dc2dcac3fcfe209a2242aa96f1e35ec5a5a2c91414f5cc5e3354b58671e624bc88a39fdc8f128593daa06545d6cf", + "blsKeyPath": "m/12381/134/0/81", + "blsKey": "881fa9b753cb2f89d267e0615cbd1ad9664d331f21d89cef2131686b0af55112fe1ad4df7f2c085f78142e75d90d2cab", + "blsProofOfPossession": "898471d3356573d6445906d973f1876f1e38570b6dc9c875c88138b302806c071efbe327f66c6646f02c134c3b1b019d0227bc83acd0ca10f65adf1b8fad7c9cb383909a015fd1d678c6272e5317da58d45b89fc1c954641a61169bf1c1a1728", + "blsPrivateKey": "13003be69f241b8534150263ba8842d41a795e644f6ccfb074f0f40a2c2c5b55" + }, + "encrypted": {} + }, + { + "address": "lsk966m5mv2xk8hassrq5b8nz97qmy3nh348y6zf7", + "keyPath": "m/44'/134'/82'", + "publicKey": "2dffbbb67b9fbce2146f5ce4778d237e7081771c0094b4e0774782509a7dfb6e", + "privateKey": "df0b951a2aefa073080cabae402057853e9b8ebc862b6e298fc0899e0153bdef2dffbbb67b9fbce2146f5ce4778d237e7081771c0094b4e0774782509a7dfb6e", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/82'", + "generatorKey": "875d9a84adcf997034d5ab6189a063d9817da3a6c8599cc46c84b70b5081b18b", + "generatorPrivateKey": "25a3d63c742c8b5fb168cf2c8af45a8778fee8f87f709279bd9d35d7cbe6c4ad875d9a84adcf997034d5ab6189a063d9817da3a6c8599cc46c84b70b5081b18b", + "blsKeyPath": "m/12381/134/0/82", + "blsKey": "b847749ece25a2ef51427de371b4efc2342fb38a2c5822b941c1dbf43c3f8dabf5dc0e1620d2bdafb597d697e30ab801", + "blsProofOfPossession": "831a557a972e0ed1a9cdab88a13fea899ce1b7e6475ee2d42a1a1faa09fe9042eaab3bd8b14f2faf4ecff84780b8db6719e8d6bc8917ada1f77182b2fb4a40b544c02486fe0394b8fcc72ac69fcdf3d6c0920469225bf0ad2e047fc68b9376a3", + "blsPrivateKey": "6a934defd6cfe5fc5936d88349dd6a89afb2e8607d1f0c78f6526f5ab363a4d4" + }, + "encrypted": {} + }, + { + "address": "lsk37kucto34knfhumezkx3qdwhmbrqfonjmck59z", + "keyPath": "m/44'/134'/83'", + "publicKey": "958708971b228881efe4180d3c2ca4037fb97a2292dc23f6d8a1ccc433779f7d", + "privateKey": "4dd2f4daa47f5ab0443fe7b781d637b409c6613c0129bf6bfb9882c09f202bed958708971b228881efe4180d3c2ca4037fb97a2292dc23f6d8a1ccc433779f7d", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/83'", + "generatorKey": "edec02268c216d131fa9ec045049e6ac1526f48da772a34b1536c88c5af223da", + "generatorPrivateKey": "3a092f3763a23f8ff72b4f9a11075d385bed74bdd2d3c16c14e742ace9d7e28bedec02268c216d131fa9ec045049e6ac1526f48da772a34b1536c88c5af223da", + "blsKeyPath": "m/12381/134/0/83", + "blsKey": "94c8d9240de83f6b09905756fae29c2c3aa9092649776ebe037f20011b3bff835944eae63b2dcf6c3861f11d457a875e", + "blsProofOfPossession": "9900c9235a0365b9a0b5dce686903737cc4aaa76e8f9e47367954b07ee3a0c0ab51351cd746966556ddcc53e69eabe0c025195d1d3a6788d69c1820bd1fecc096eea09770fe43f86f898c6182ce3057fcd52b43ce096a07b4da3f2369353988e", + "blsPrivateKey": "07324357227d9af227a9adc8365933b1a0799282e033f2ad85c39e80f4a7e18a" + }, + "encrypted": {} + }, + { + "address": "lsk5y2q2tn35xrnpdc4oag8sa3ktdacmdcahvwqot", + "keyPath": "m/44'/134'/84'", + "publicKey": "1556035a614d4560066996288ca75dbcaeb5bfbffe935da23208cf8fb1d30157", + "privateKey": "fd45b5940c96ea5873baf5f5253eb214477023c63545dc7d5b281393de9aaa8a1556035a614d4560066996288ca75dbcaeb5bfbffe935da23208cf8fb1d30157", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/84'", + "generatorKey": "4ae9069cbc0e2371b037342010c5ddbd9c6d4a8c8d0a9eae59bc6a3796866119", + "generatorPrivateKey": "16d9d5a00068bbf424aa7e9d660a0993b4a260bffb25907799175a8a9d8896ba4ae9069cbc0e2371b037342010c5ddbd9c6d4a8c8d0a9eae59bc6a3796866119", + "blsKeyPath": "m/12381/134/0/84", + "blsKey": "b8396076f1ae032b572145f01ea0a3b5418f226afb0496930cb68250ca59b16fe2fb6dadacd88132b9dcd19a07d7f773", + "blsProofOfPossession": "a096515a639c004e7aecee3e88ddbb572163b914de63b528db584b27fe6a0267eb95213ccbebea849a720f1f717871ff191a4cf52c9d0a4db57cfcf8f2453d22cd432a5fe64dcb45982abe84343608a8b22740f7f3fbdfe1000fede5f0a08db3", + "blsPrivateKey": "6e893accf873971fa56db1cb2aba3efb919b41ad88db4b8189a910f6e79689a6" + }, + "encrypted": {} + }, + { + "address": "lsk6quzyfffe2xhukyq4vjwnebmnapvsgj4we7bad", + "keyPath": "m/44'/134'/85'", + "publicKey": "a9142d10c269a0c4682f153d570ea3d880031db76be7363f03a368f461e58290", + "privateKey": "117cf51251f9966fbcfc7c421d8ed2704f2e347985aef71142bc9cefd18095bea9142d10c269a0c4682f153d570ea3d880031db76be7363f03a368f461e58290", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/85'", + "generatorKey": "b5308c34412c54e4b8358b5fca16396084004ee37c6824c1ad751cbe8e50e24f", + "generatorPrivateKey": "be0eef0d6ba7e57c9366787d3706335179db8f891164388e0a9acbc13eb8590ab5308c34412c54e4b8358b5fca16396084004ee37c6824c1ad751cbe8e50e24f", + "blsKeyPath": "m/12381/134/0/85", + "blsKey": "b422e4fa8ab196e0bcc49f956ab3b5c13dc14442864dca80118dea7329308e7f7aa7547df293c826a29ef4bbfe517778", + "blsProofOfPossession": "8ce0fe2bf47180e74f315fda7bfdb376a277f394667c88661dbefcc57100af1d0a06d36ef406f7abc0282a1cb8f5091505d759a40739b11b4a1fd0060e2066edd79ad417168a977f1a59206ddac4bbabaf70feda572bb19c17b9d9034bfe28b1", + "blsPrivateKey": "6e196953fefb89d7a1aad387fc99756391b7adfb5590da079605ac95d4caaaea" + }, + "encrypted": {} + }, + { + "address": "lskvpnf7a2eg5wpxrx9p2tnnxm8y7a7emfj8c3gst", + "keyPath": "m/44'/134'/86'", + "publicKey": "1cfae47a4f613770c5dd321052cc81b569e685d71bdb7da9d4a95d8a035ed05f", + "privateKey": "1c16a7a0fbd0b063cca49264d18bfae921e038dd1fda6600e54a6588ecb093521cfae47a4f613770c5dd321052cc81b569e685d71bdb7da9d4a95d8a035ed05f", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/86'", + "generatorKey": "1d224ad4cf64a3db52b2509c5b63365db970f34c8e09babf4af8135d9234f91f", + "generatorPrivateKey": "34f86863e752c3e15b3d4a18826d55d8300fc00b31d2cc0c12999f72d90dc1c81d224ad4cf64a3db52b2509c5b63365db970f34c8e09babf4af8135d9234f91f", + "blsKeyPath": "m/12381/134/0/86", + "blsKey": "86bc497e250f34a664a3330788292ee901aa286e10fcb280a4a151a8741bc0d154b947a4d3cd9bc5b552917211081466", + "blsProofOfPossession": "97a20b81bdcbc7a4f228bc00894d53d55fbb2c53960f0ddc0cfa0f77395a33858a9907079773ad50a220cbdb49bc1d171250df83dd70572c4691eb280ae99d4501b289676b6bb0ad0e859b525752015bf5113e49050a8c70853470f2dd7e9344", + "blsPrivateKey": "6c4e85a20db21bc06ae05a2edebe13688400611e830b77fdb62bde3b1ecb715d" + }, + "encrypted": {} + }, + { + "address": "lskkqjdxujqmjn2woqjs6txv3trzh6s5gsr882scp", + "keyPath": "m/44'/134'/87'", + "publicKey": "006ab84d7246fa450123b5a476a6ecb8622ac38a06ef87948bd5b4dce0ac5c61", + "privateKey": "dd04565d95cfb8abdfdacf4ff62f93c28861dc6d0d9f927a4f18a170d04481ad006ab84d7246fa450123b5a476a6ecb8622ac38a06ef87948bd5b4dce0ac5c61", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/87'", + "generatorKey": "c0aa7af3198f0e3a6bf35c5be38e0f181827735b1c3a635e8db05b80b3647054", + "generatorPrivateKey": "0c046bcc79d3af083cb9d7fecffd601f20be44c786a3bd29461e37d1c06b7f8fc0aa7af3198f0e3a6bf35c5be38e0f181827735b1c3a635e8db05b80b3647054", + "blsKeyPath": "m/12381/134/0/87", + "blsKey": "95acb59c54e53f09d7aac37c2db59c6df0ebb1e38120690a9035c715dc9862995472c72e9f48bfb05e920494dc17e9bb", + "blsProofOfPossession": "8798b4e143b15d10965194d0350d95c374d214d14f6a0c750a1a1699f1221388f01d00c6b708167fc7fcf355591abe370ed45c55306fdc372d26432cba8efc1f83238c1f2e669111656ba61b4bff391786713c28f7d1c6e717fbe98aec2dfda3", + "blsPrivateKey": "0251ae54a957ebe5cec7315592870cf6944434934a811eed219c1e42662f37f0" + }, + "encrypted": {} + }, + { + "address": "lskvwy3xvehhpfh2aekcaro5sk36vp5z5kns2zaqt", + "keyPath": "m/44'/134'/88'", + "publicKey": "80828e04067b8630864b6a21c6c998c6ac5ee744644125e5905a08ccc9f01bf1", + "privateKey": "310c22882aeee8d4d9c5fa47613684cf4b5c4fff2343d35904b4d4757103dda780828e04067b8630864b6a21c6c998c6ac5ee744644125e5905a08ccc9f01bf1", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/88'", + "generatorKey": "20a50d60059dff36a6f6c922f55b018d288ba1f9df5120eeb8fa8e3745a800ec", + "generatorPrivateKey": "a01f3582e3adf093686463ce0f5652a821eb9ad00216d67efef465a95df153af20a50d60059dff36a6f6c922f55b018d288ba1f9df5120eeb8fa8e3745a800ec", + "blsKeyPath": "m/12381/134/0/88", + "blsKey": "96482192c99ac4569b2d139670e566ca5ccf41f39d50b7ddcf69d790bcd556e797614ecb3dda2017e5e3ac2bab4e82d0", + "blsProofOfPossession": "865e6e88cf91b061b92f2d499936f384c9a3df52de5717661b66c4fd5150f1b171350c6abeab96fb905b6294ca7694420728022d84f4c31180f903a6ab8b5b8153fdcf65d46c8a018e65c0459e64c931b6544b6f00e673c30f2a82402fe8be3c", + "blsPrivateKey": "4f5694686955714b3a71244e647c1463545af4f93ef556c8417fdabb429e554b" + }, + "encrypted": {} + }, + { + "address": "lskym4rrvgax9ubgqz6944z9q3t6quo5ugw33j3kr", + "keyPath": "m/44'/134'/89'", + "publicKey": "c5e49e11ab7f218a99d98f47f6df27c6a8a4aa1489a8a48cc54e448700125aaa", + "privateKey": "bc7226156e4882cca468daad1c4fff4dee9efb36b7c861d315b6babbd55a8323c5e49e11ab7f218a99d98f47f6df27c6a8a4aa1489a8a48cc54e448700125aaa", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/89'", + "generatorKey": "4514d1723eed164b3792f1950d3b1c7a1067441ba207cce8d9bdd6f436a119fe", + "generatorPrivateKey": "f9e9f39940de3d64a3c93ee626df1169a8f6b5bcbb3b97ed9328ff9b02e22ff34514d1723eed164b3792f1950d3b1c7a1067441ba207cce8d9bdd6f436a119fe", + "blsKeyPath": "m/12381/134/0/89", + "blsKey": "a5963aa24ed05e95d19fd9de35ae6f523aad987ab2b9897216091e798e15f5062e9734b11fcacd6b8f312162ddc10940", + "blsProofOfPossession": "8a1ae28d6d70bfa0dbcc694c811c05ac6e697a17f41d45a32e1cb5b225bd42de7c1043f4af3c17d92641c4d017569e2302dad3e32493294831da564a07154e5098129639deb89743d1146f8e01f9f6f32f382905707051467242b646d86bad05", + "blsPrivateKey": "6b15b3a0f1484c2db866606cf0c6cd8270c3ff294118d7d34ec3d0fa3d9c3d5e" + }, + "encrypted": {} + }, + { + "address": "lskmc9nhajmkqczvaeob872h9mefnw63mcec84qzd", + "keyPath": "m/44'/134'/90'", + "publicKey": "55a02f49309f5ba1ff6c55c4b5fae4d966cc17cc30e769a42ce4bc7d5c3706c6", + "privateKey": "7766e85d16e1fda134af1e4e323365f7dcc1282a49b4b08b0ff82363cb07062655a02f49309f5ba1ff6c55c4b5fae4d966cc17cc30e769a42ce4bc7d5c3706c6", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/90'", + "generatorKey": "b67f0a9ad61ad6867b54aaaed6036001485d7a7ba13770aed786b34241f37cda", + "generatorPrivateKey": "c6b7a360f60b7e2b554a47b6d51f01e9e33ea7a9fcd2254ce23af34cf08a1f3cb67f0a9ad61ad6867b54aaaed6036001485d7a7ba13770aed786b34241f37cda", + "blsKeyPath": "m/12381/134/0/90", + "blsKey": "a029f74eaf914e3dfd828502f224fff7311a964d11eb1c335eebadc38b5c20a98f79bfc53ccf6ee3630cfa282e88489d", + "blsProofOfPossession": "b5cd13eac543928db25ebb9d69dfaacc04a0d41924f2010a6f04b2457523a5a423a9c49756dbcb969a7b2c49ddcc7c710ada766fdddaedbff02f68e2b75108f111f4078d2705f06551ef524f201d50ac32c423d04a7e6e7c6c8a64d70c013ec3", + "blsPrivateKey": "40726625c04da9fb36a758b0859ec1a77d546750e454bf45dc2c77b1cc1fbb49" + }, + "encrypted": {} + }, + { + "address": "lskf5sf93qyn28wfzqvr74eca3tywuuzq6xf32p7f", + "keyPath": "m/44'/134'/91'", + "publicKey": "674d283554e152216de9a42e979924ff9b05b3e39ed5072026fc8710b4fdd926", + "privateKey": "1a72b8481a589c55ba26d2805e16b58f234b243c2c87a0c39d757ec1238e66b8674d283554e152216de9a42e979924ff9b05b3e39ed5072026fc8710b4fdd926", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/91'", + "generatorKey": "d05b69bda8b5cd103c620a814cbab2f2a131dcfda6bd4cd568155ddb1afd423b", + "generatorPrivateKey": "58d029150eeb456c86e0c2aea034d210c4d356278b4102707e2b7e4bfadcff05d05b69bda8b5cd103c620a814cbab2f2a131dcfda6bd4cd568155ddb1afd423b", + "blsKeyPath": "m/12381/134/0/91", + "blsKey": "947456674b5616341cc932afb30e42973dd17582a81e5fe958277efc828535cd7c9c778410c52e069ed23e4cf629814a", + "blsProofOfPossession": "872ce3383378215d3be299f32196e9cb2ae1f9e06101afbb9e7709eafb37eca8548f156bbdfbb120c2d06fdbfdf5455107f2c818bfbc9b4e9f5fb4c50f79b24f5fc84f9e137b286d71c3d588a7af684d36bf701425b25ece2d9fbacbadb58f4e", + "blsPrivateKey": "7122afff2e9ebeadc8575a12f8cfd205b04c9c04eb3f90a354ae4ecc8479b54c" + }, + "encrypted": {} + }, + { + "address": "lskos7tnf5jx4e6jq4bf5z4gwo2ow5he4khn75gpo", + "keyPath": "m/44'/134'/92'", + "publicKey": "3d1a78899766f0662536e49af492f961fb3f1eb22f3172dad04b30c4302af87e", + "privateKey": "fe473f20516d7fa871fa0787ffdc42eafa848619ecffd3fc57de2c8aa6f1f13c3d1a78899766f0662536e49af492f961fb3f1eb22f3172dad04b30c4302af87e", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/92'", + "generatorKey": "d5781773a9b07a569a0d87c0bf82103fd459a2185fc32f5c312a663c5bc65784", + "generatorPrivateKey": "e78ae7b42d3d6e7df38f69f3b25db40b31923b4fc088b8793ff9a8f07ef9ecf9d5781773a9b07a569a0d87c0bf82103fd459a2185fc32f5c312a663c5bc65784", + "blsKeyPath": "m/12381/134/0/92", + "blsKey": "87971b8a0520e08dc8dbb8114de7ecd44e98844c9179585806e8a1edaae1190ea85e6471767e90074d87d1dfbafc983c", + "blsProofOfPossession": "ac1fa23a608ce0be52ada7759c4631a5e3c7828a2a622c718b67c4d8996eeed61c382ec319ff2c608290c141ef741ba013f7567bf95cdfb29295dea31adb440f5d856f5688fdd553f47a06ab5692ee5fb99e5a50b329fe4406bfefb924b5665c", + "blsPrivateKey": "36d1ee8a349ef4cdc983bb55ef2fca9415f2f9ecf72df9a26e4138b534979852" + }, + "encrypted": {} + }, + { + "address": "lsk5rtz6s352qyt9vggx7uyo5b4p2ommfxz36w7ma", + "keyPath": "m/44'/134'/93'", + "publicKey": "b73d459c979435a84e70ea70bb18e14f312afe49af535ff4c9cd0f3a6d4cbd1e", + "privateKey": "dcb8988276c8aa0424bbb764125504f83b944d5422fe5b721fa8e5db29d08920b73d459c979435a84e70ea70bb18e14f312afe49af535ff4c9cd0f3a6d4cbd1e", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/93'", + "generatorKey": "d1f10929b1eab8232be9df3b792496eb56bcb5c0a8c2fd04e3be1fab26c7980e", + "generatorPrivateKey": "95c19ccad9cc85f4b8776e2ce5d12c646b6cb6bd60d2d2b89089d664f97ebbabd1f10929b1eab8232be9df3b792496eb56bcb5c0a8c2fd04e3be1fab26c7980e", + "blsKeyPath": "m/12381/134/0/93", + "blsKey": "8f96883db13e4f43e7280d8a58e7642228f46c375853a17e8cdb34fdeaf4e363a82678d2f54a8630218e097ba39d4370", + "blsProofOfPossession": "91a2efa4a407f63eb9157a4f4378bf6dfb4fc6d5d2714c2ee81f49ac90bc5dc3f1b72051a1fa1615f2e2d694cf17c27c1429e94bebc023feea2a405f7a8343dcc567636d15ac95ef84b1c673298becb766e036d9869e2113d9f4602f6e6092dd", + "blsPrivateKey": "5cffd4aceca113ca008c1d7603eabbbb0f0ba6f3595abf97b875e6687a5c9633" + }, + "encrypted": {} + }, + { + "address": "lskzbqjmwmd32sx8ya56saa4gk7tkco953btm24t8", + "keyPath": "m/44'/134'/94'", + "publicKey": "150cdf5f275aa57cae604f22f14ac2b9635ac52cd1a911a9c253842a880413fb", + "privateKey": "da4abca8970207329ad32eeee64d12e16e729cbbc75effbf3007c28f0da7071e150cdf5f275aa57cae604f22f14ac2b9635ac52cd1a911a9c253842a880413fb", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/94'", + "generatorKey": "3f44b319b82443eabb300eba5a2f323d72e44d9d2d5ed0b21a24051595582dd5", + "generatorPrivateKey": "51d9322ce03caa96cd576f48888c9a284b3e9e8f05a9a5a6395563997fecd6f03f44b319b82443eabb300eba5a2f323d72e44d9d2d5ed0b21a24051595582dd5", + "blsKeyPath": "m/12381/134/0/94", + "blsKey": "a6689556554e528964141d813c184ad4ec5c3564260d2709606c845f0c684b4bb5ff77054acb6eb8184a40fcd783670b", + "blsProofOfPossession": "831e87337aa9d7129b42ac2ac6d355395b07829148f3a4570293cb8ea00593cbbd1933a9393d8f5c4028f74c0d6c29511526e76d082fd2207f65e653129a29f22787cf19d4efe50ff43651e16463f868714354d6860e62dcd715858c4c53fc51", + "blsPrivateKey": "3980fcb82cccfce71cb76fb8860b4ef554b434db8f1a2a73578080223202802a" + }, + "encrypted": {} + }, + { + "address": "lskrskxmbv7s4czgxz5wtdqkay87ts2mfmu4ufcaw", + "keyPath": "m/44'/134'/95'", + "publicKey": "c215430e686f7f722aaa33a9652104ea23f3355906f77bc5a9e7940ab70b6fdc", + "privateKey": "e97d7dc3b6f3f0ea4445d1c3087af59d2e96b60646cce4bb417501430ae5ce91c215430e686f7f722aaa33a9652104ea23f3355906f77bc5a9e7940ab70b6fdc", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/95'", + "generatorKey": "07614fd5036d099a3caf004d46a083d12df2024fc03ef29cec22e58d1f78531f", + "generatorPrivateKey": "45569843c81a8513089ba0c1ef12c436a4397b7ed1e0fb045a6c0c0a7ec8027807614fd5036d099a3caf004d46a083d12df2024fc03ef29cec22e58d1f78531f", + "blsKeyPath": "m/12381/134/0/95", + "blsKey": "98c4f0e2b01f1b6ed07035fe46c17a40fe5409b1461a2b697afaf869e2f8c88b2db297b9a149208109bab2da195235c0", + "blsProofOfPossession": "8dad459d6b312d4a6767695029525e95f04e3ee083de85d0db5d818d15d32ef7aecb57f608c2c10355e3ca6dba8018e5192862d80f00fe1f71fd396d81d6a7649221c50bc8336efd12dc1cc13ee3c3898617971244af6a8da5ccd9224c9ea2f9", + "blsPrivateKey": "4601428462ce9b60ec00563894972ff082ff16691e45edbfef67dae7c300d2d3" + }, + "encrypted": {} + }, + { + "address": "lskjnr8jmvz45dj9z47jbky9sadh3us3rd8tdn7ww", + "keyPath": "m/44'/134'/96'", + "publicKey": "c8c2b511a2c7e697ccb8e8332e343e2db6ebbd88068422e1539011bbed669221", + "privateKey": "6841ae7fddd9f1895fcf65734faa7792f9138c9854c6786b0938f4419ee00316c8c2b511a2c7e697ccb8e8332e343e2db6ebbd88068422e1539011bbed669221", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/96'", + "generatorKey": "25ae368be016caae7066a6ce9f2ad8e4220d328ffb860a6d275d878f4882c70c", + "generatorPrivateKey": "ffd8857840f0d6c52693d21a194f1a419fe0b78b9fa4b90b1fab570ee16073da25ae368be016caae7066a6ce9f2ad8e4220d328ffb860a6d275d878f4882c70c", + "blsKeyPath": "m/12381/134/0/96", + "blsKey": "8ce6c9d2ed4f223635e3bd85476f0d56cdbb5e4090ae22b10a7fabd08d231193cf6d9c4f5b400eb4b310ef270811e424", + "blsProofOfPossession": "b896aabbcc1a165adaec26feb72fc580d4a6512dd09df40b4333381d2536b5ac36d22e91469a976ae446a6291792cb6a141013baaaae12faff26d06c6a6b722a28635c72d49fcd50ac910ca01d760e80892fc5757a18597cd1ce7f16dbabd195", + "blsPrivateKey": "47320a453378fdf5463d3a0b930fedc913ea61562b0f2eb5dc402fcdcbba9bef" + }, + "encrypted": {} + }, + { + "address": "lskmadcfr9p3qgx8upeac6xkmk8fjss7atw8p8s2a", + "keyPath": "m/44'/134'/97'", + "publicKey": "f9d11d99d4862ff2bfac4bc2306f238274cac119bc990d325732c82a09011678", + "privateKey": "1fd11f9dd4d51518021e84016507c9611ff81227fde8f51b022e57fdad05fe53f9d11d99d4862ff2bfac4bc2306f238274cac119bc990d325732c82a09011678", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/97'", + "generatorKey": "ebeb7f828aaa40ab6040e914b66b6f5d76964a0579bd29bf98c2641547f229f6", + "generatorPrivateKey": "0a48d7c8fd894f9625adb370496bdc77738a431ac859741a6e249500981c6affebeb7f828aaa40ab6040e914b66b6f5d76964a0579bd29bf98c2641547f229f6", + "blsKeyPath": "m/12381/134/0/97", + "blsKey": "a13d3a62d053b3a092d736f3c96c89fb982924b9cfd1e8283c4ced5a537732718e73c6c86c94ddd416eb94a753366b7f", + "blsProofOfPossession": "950583faae3492f5d15f9ad72bad982b2f513956cc1259e16e28ef2e18f7db3df1bf1cbab7350e390ac5a8785c574fe30878784e6c5d50668184c4c92bda196432034a7e092d9e62736ca543e1b7e594ccf6b81d37c17fabf73b846b67a0bc8f", + "blsPrivateKey": "390cc059245031c463d51a4904d080a495aa779bfe1fec5bea9e670a5211a832" + }, + "encrypted": {} + }, + { + "address": "lsknyuj2wnn95w8svk7jo38jwxhpnrx7cj3vo4vjc", + "keyPath": "m/44'/134'/98'", + "publicKey": "5bea76165e8cae84bfb3b2b65d00aa4fd63a00b6153654b5f88e27add708e04a", + "privateKey": "44c0c9eee20e7e8fc1a57564e32d8616868e76956b51794496cc3f8194c7ed0a5bea76165e8cae84bfb3b2b65d00aa4fd63a00b6153654b5f88e27add708e04a", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/98'", + "generatorKey": "4325779e64521ded42c0e2e873c16b753433d0e7f9a1e046e27a0fae9378d9c9", + "generatorPrivateKey": "3b1fe311327d7e65009c2cf5fc067f59abc2bae1aee6838158108e61d7bfa2ad4325779e64521ded42c0e2e873c16b753433d0e7f9a1e046e27a0fae9378d9c9", + "blsKeyPath": "m/12381/134/0/98", + "blsKey": "a0fb290e74bce8c5858dc1b615bac542d2280a477912ae06b8d4f07c6d451eae44a47cae6a7a1fb5cedea9efe2d4e5a5", + "blsProofOfPossession": "8b1a7d2b1566ce81c8ac2b8c88b6966b960462d0fa4e54554f53ab184c31c72c65fce904aff79d4235dd3e16e8eed2780e083a31a432e70a538de1b81d8a8a49d31bdd361f357d57fe4568d1b506492fc72f42d4b344ecfac2d560bbd2214621", + "blsPrivateKey": "3308c88c2a602c8d5cb7a84d9e70e08fc97a4e95ac27f18360496270173c27d8" + }, + "encrypted": {} + }, + { + "address": "lskrccyjmc8cybh9n3kgencq8u7fh796v2zfraco9", + "keyPath": "m/44'/134'/99'", + "publicKey": "0996481caf431af4f6ba452010898aa72b04f15115192b6b25a7e14feeee1a0c", + "privateKey": "2df44d979b4c374c2021b7dd16890943b1e2a76ba94297d35aa18023001072ef0996481caf431af4f6ba452010898aa72b04f15115192b6b25a7e14feeee1a0c", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/99'", + "generatorKey": "bf9ebe25faae5a874d97ad1772ad062ca52f63e48d806ef641e025a963224200", + "generatorPrivateKey": "18120516aa855a5be57ae46b20c7ac0efb66f9b2813ce6832e309302ea6920aebf9ebe25faae5a874d97ad1772ad062ca52f63e48d806ef641e025a963224200", + "blsKeyPath": "m/12381/134/0/99", + "blsKey": "8b436ed371b7af11b31347c12321d90a427e9aa8d93275a27faedcbe2dd06c5dce1e1a4a03b0ae030e5cd0106a942cd8", + "blsProofOfPossession": "b1dcf2ff65ba4096611f392fb56d104754927cba14ec3d193ebcf7d6eaab062c7ab770c512e815c7d52c37fa9b8622400df7939f4bbeb8566beebce1b13d67562f7bb6a01f988a501e4ef691b544cd05796010b614014ec3036b171c7392cd7d", + "blsPrivateKey": "39032c0f523eb58f549d1e5bdd0f1b38ea435bc0e26fb8a9458ca9908919980c" + }, + "encrypted": {} + }, + { + "address": "lsk3dzjyndh43tdc6vugbdqhfpt3k9juethuzsmdk", + "keyPath": "m/44'/134'/100'", + "publicKey": "c515bd1d0c9c09d3ce40eeca511489b8ed7c2ec1bc03bd5611f3a6b47c16469c", + "privateKey": "a8faeeba2da8b823b014d37165a1bfc26e74507641a65c742ae6e5cf96fb31d4c515bd1d0c9c09d3ce40eeca511489b8ed7c2ec1bc03bd5611f3a6b47c16469c", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/100'", + "generatorKey": "9f1c361befb0ae35de28e8f0e25efe75ede78aa26c703625cc17e7fe2e7208f3", + "generatorPrivateKey": "eb79f34b330f6efe29593cba5a5a8a369cfd1bd0887689020387c536e44da5249f1c361befb0ae35de28e8f0e25efe75ede78aa26c703625cc17e7fe2e7208f3", + "blsKeyPath": "m/12381/134/0/100", + "blsKey": "a1782a5f280f9894cea555d6f355c1f23e0581140c64f20ae469edd6ace7dcb6266227feecf002c2b508766e730c6f4f", + "blsProofOfPossession": "84e053bb01b22997e46ce4cbece0f5478e27cd49786cc36b1459c8930ea408e663bc725184197eb726fadf6988503c9b01be391ca3eb16587137cf5a3941717837baec7869896bae401bb513359485142778a52638429328f06a4469b7e21bb0", + "blsPrivateKey": "306651c1b7494c98b3d190fbf54b2247b9a456cb21eaadf3a0a668d740f6bdba" + }, + "encrypted": {} + }, + { + "address": "lskyunb64dg4x72ue8mzte7cbev8j4nucf9je2sh9", + "keyPath": "m/44'/134'/101'", + "publicKey": "e1383015621226361ac69c33c6b4e6148a30b08736ae0e043055b1ee9c2ad163", + "privateKey": "540473e6d615a2ebf88f99ad6387fa80b90b8847cb77fcfe09e4fb1e8a2bd6b0e1383015621226361ac69c33c6b4e6148a30b08736ae0e043055b1ee9c2ad163", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/101'", + "generatorKey": "a9b0c063fee99a903a55da57e3d16f069145e414b62e25dbbf218bd608a61f7c", + "generatorPrivateKey": "c545eee8e84f1ce916cefa07dd86818165e7187f9b33cd487060ab6944951847a9b0c063fee99a903a55da57e3d16f069145e414b62e25dbbf218bd608a61f7c", + "blsKeyPath": "m/12381/134/0/101", + "blsKey": "870db2da31a9471077677bd9a7529ee7523bdd64fdba46c514e94aa52e940566479cfdab29b07c1573aff6ba7040c684", + "blsProofOfPossession": "acbe270292cfaa154f256a83c9bdde889a9205c85c5ff0f41dae586dccc7f29f0464fbc087a5c5adb3cb4eca3b95bc14187db64cccd24e98d3e75215b69bd2bd0b357834c1ccacbdf91556fa59a86d04d1fc8aaa3be2ae5256aea3bd36d26942", + "blsPrivateKey": "4f2fdd4bb6fd739b02dea4a44ad1c4d8fa126c1ed1ebefc6f0016abd8e2c1a9c" + }, + "encrypted": {} + }, + { + "address": "lskoys3dpcyx5hkr7u2fenpjrbyd69tuyu5ar4dgy", + "keyPath": "m/44'/134'/102'", + "publicKey": "f3388194bea3a10bfb3b0b89d47417450ce078b147b7d68c7feee57f0e5d8492", + "privateKey": "3b2dfd3635ebd2c1b8b139193322422ee8ffdeba6a5ec385bb3f8fc4913a19cef3388194bea3a10bfb3b0b89d47417450ce078b147b7d68c7feee57f0e5d8492", + "plain": { + "generatorKeyPath": "m/25519'/134'/0'/102'", + "generatorKey": "3efa1c0a728a9741555b84ff1d80aedfcaf85370e1602890d7ba610bf33500bb", + "generatorPrivateKey": "f06fc00decaf4f11f2f714788f28ed0a25228a08dc002e49e16945d3e9aa2fc63efa1c0a728a9741555b84ff1d80aedfcaf85370e1602890d7ba610bf33500bb", + "blsKeyPath": "m/12381/134/0/102", + "blsKey": "a4f78f9b10c5671cca5aa2526708b95bdec56f3e404fc6c6403de83338940dfcc8d6836ba3d98566d314d34438a042d3", + "blsProofOfPossession": "91a1d0b501b7ab2caa5d240eae92c8c0ccbf296ebd3dd9d03aac1ca569f803091ec5ab57b7f6c34ad1aeb9aee0ccc17a1911c8e7a9ca681a6b803bf27e303f59dcfa32f678c4bb35189a8b7e0a3af43771ec841bd2ab32a96cb2eab0a1c2ad94", + "blsPrivateKey": "074ab003ca5c16efdcab7e925a317e657d9fdfbdb6e97bb856f1389df5599264" + }, + "encrypted": {} + } + ] +} diff --git a/examples/poa-sidechain/config/alphanet/genesis_assets.json b/examples/poa-sidechain/config/alphanet/genesis_assets.json new file mode 100644 index 00000000000..d575139c811 --- /dev/null +++ b/examples/poa-sidechain/config/alphanet/genesis_assets.json @@ -0,0 +1,2799 @@ +{ + "assets": [ + { + "module": "interoperability", + "data": { + "ownChainName": "lisk_mainchain", + "ownChainNonce": 0, + "chainInfos": [], + "terminatedStateAccounts": [], + "terminatedOutboxAccounts": [] + }, + "schema": { + "$id": "/interoperability/module/genesis", + "type": "object", + "required": [ + "ownChainName", + "ownChainNonce", + "chainInfos", + "terminatedStateAccounts", + "terminatedOutboxAccounts" + ], + "properties": { + "ownChainName": { + "dataType": "string", + "maxLength": 32, + "fieldNumber": 1 + }, + "ownChainNonce": { + "dataType": "uint64", + "fieldNumber": 2 + }, + "chainInfos": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["chainID", "chainData", "channelData", "chainValidators"], + "properties": { + "chainID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + }, + "chainData": { + "$id": "/modules/interoperability/chainData", + "type": "object", + "required": ["name", "lastCertificate", "status"], + "properties": { + "name": { + "dataType": "string", + "minLength": 1, + "maxLength": 32, + "fieldNumber": 1 + }, + "lastCertificate": { + "type": "object", + "fieldNumber": 2, + "required": ["height", "timestamp", "stateRoot", "validatorsHash"], + "properties": { + "height": { + "dataType": "uint32", + "fieldNumber": 1 + }, + "timestamp": { + "dataType": "uint32", + "fieldNumber": 2 + }, + "stateRoot": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 3 + }, + "validatorsHash": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 4 + } + } + }, + "status": { + "dataType": "uint32", + "fieldNumber": 3 + } + }, + "fieldNumber": 2 + }, + "channelData": { + "$id": "/modules/interoperability/channel", + "type": "object", + "required": [ + "inbox", + "outbox", + "partnerChainOutboxRoot", + "messageFeeTokenID", + "minReturnFeePerByte" + ], + "properties": { + "inbox": { + "type": "object", + "fieldNumber": 1, + "required": ["appendPath", "size", "root"], + "properties": { + "appendPath": { + "type": "array", + "items": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32 + }, + "fieldNumber": 1 + }, + "size": { + "fieldNumber": 2, + "dataType": "uint32" + }, + "root": { + "fieldNumber": 3, + "dataType": "bytes", + "minLength": 32, + "maxLength": 32 + } + } + }, + "outbox": { + "type": "object", + "fieldNumber": 2, + "required": ["appendPath", "size", "root"], + "properties": { + "appendPath": { + "type": "array", + "items": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32 + }, + "fieldNumber": 1 + }, + "size": { + "fieldNumber": 2, + "dataType": "uint32" + }, + "root": { + "fieldNumber": 3, + "dataType": "bytes", + "minLength": 32, + "maxLength": 32 + } + } + }, + "partnerChainOutboxRoot": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 3 + }, + "messageFeeTokenID": { + "dataType": "bytes", + "minLength": 8, + "maxLength": 8, + "fieldNumber": 4 + }, + "minReturnFeePerByte": { + "dataType": "uint64", + "fieldNumber": 5 + } + }, + "fieldNumber": 3 + }, + "chainValidators": { + "$id": "/modules/interoperability/chainValidators", + "type": "object", + "required": ["activeValidators", "certificateThreshold"], + "properties": { + "activeValidators": { + "type": "array", + "fieldNumber": 1, + "minItems": 1, + "maxItems": 199, + "items": { + "type": "object", + "required": ["blsKey", "bftWeight"], + "properties": { + "blsKey": { + "dataType": "bytes", + "minLength": 48, + "maxLength": 48, + "fieldNumber": 1 + }, + "bftWeight": { + "dataType": "uint64", + "fieldNumber": 2 + } + } + } + }, + "certificateThreshold": { + "dataType": "uint64", + "fieldNumber": 2 + } + }, + "fieldNumber": 4 + } + } + } + }, + "terminatedStateAccounts": { + "type": "array", + "fieldNumber": 4, + "items": { + "type": "object", + "required": ["chainID", "terminatedStateAccount"], + "properties": { + "chainID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + }, + "terminatedStateAccount": { + "$id": "/modules/interoperability/terminatedState", + "type": "object", + "required": ["stateRoot", "mainchainStateRoot", "initialized"], + "properties": { + "stateRoot": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 1 + }, + "mainchainStateRoot": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 2 + }, + "initialized": { + "dataType": "boolean", + "fieldNumber": 3 + } + }, + "fieldNumber": 2 + } + } + } + }, + "terminatedOutboxAccounts": { + "type": "array", + "fieldNumber": 5, + "items": { + "type": "object", + "required": ["chainID", "terminatedOutboxAccount"], + "properties": { + "chainID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + }, + "terminatedOutboxAccount": { + "$id": "/modules/interoperability/terminatedOutbox", + "type": "object", + "required": ["outboxRoot", "outboxSize", "partnerChainInboxSize"], + "properties": { + "outboxRoot": { + "dataType": "bytes", + "minLength": 32, + "maxLength": 32, + "fieldNumber": 1 + }, + "outboxSize": { + "dataType": "uint32", + "fieldNumber": 2 + }, + "partnerChainInboxSize": { + "dataType": "uint32", + "fieldNumber": 3 + } + }, + "fieldNumber": 2 + } + } + } + } + } + } + }, + { + "module": "token", + "data": { + "userSubstore": [ + { + "address": "lskzbqjmwmd32sx8ya56saa4gk7tkco953btm24t8", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskzot8pzdcvjhpjwrhq3dkkbf499ok7mhwkrvsq3", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskz89nmk8tuwt93yzqm6wu2jxjdaftr9d5detn8v", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskx2hume2sg9grrnj94cpqkjummtz2mpcgc8dhoe", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskxa4895zkxjspdvu3e5eujash7okvnkkpr8xsr5", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskvcgy7ccuokarwqde8m8ztrur92cob6ju5quy4n", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskvpnf7a2eg5wpxrx9p2tnnxm8y7a7emfj8c3gst", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskvq67zzev53sa6ozt39ft3dsmwxxztb7h29275k", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskvwy3xvehhpfh2aekcaro5sk36vp5z5kns2zaqt", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskcuj9g99y36fc6em2f6zfrd83c6djsvcyzx9u3p", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskc22mfaqzo722aenb6yw7awx8f22nrn54skrj8b", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskchcsq6pgnq6nwttwe9hyj67rb9936cf2ccjk3b", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskp2kubbnvgwhw588t3wp85wthe285r7e2m64w2d", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskmc9nhajmkqczvaeob872h9mefnw63mcec84qzd", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskm8g9dshwfcmfq9ctbrjm9zvb58h5c7y9ecstky", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskmwac26bhz5s5wo7h79dpyucckxku8jw5descbg", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskmadcfr9p3qgx8upeac6xkmk8fjss7atw8p8s2a", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskbm49qcdcyqvavxkm69x22btvhwx6v27kfzghu3", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskbr5cnd8rjeaot7gtfo79fsywx4nb68b29xeqrh", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsknyuj2wnn95w8svk7jo38jwxhpnrx7cj3vo4vjc", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsknax33n2ohy872rdkfp4ud7nsv8eamwt6utw5nb", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsknatyy4944pxukrhe38bww4bn3myzjp2af4sqgh", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsknddzdw4xxej5znssc7aapej67s7g476osk7prc", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk3oz8mycgs86jehbmpmb83n8z3ctxou47h7r9bs", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk37kucto34knfhumezkx3qdwhmbrqfonjmck59z", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk3dzjyndh43tdc6vugbdqhfpt3k9juethuzsmdk", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk4nst5n99meqxndr684va7hhenw7q8sxs5depnb", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk67y3t2sqd7kka2agtcdm68oqvmvyw94nrjqz7f", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk6quzyfffe2xhukyq4vjwnebmnapvsgj4we7bad", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk5pmheu78re567zd5dnddzh2c3jzn7bwcrjd7dy", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk56hpjtt5b8w3h2qgckr57txuw95ja29rsonweo", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk5y2q2tn35xrnpdc4oag8sa3ktdacmdcahvwqot", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk5rtz6s352qyt9vggx7uyo5b4p2ommfxz36w7ma", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskoys3dpcyx5hkr7u2fenpjrbyd69tuyu5ar4dgy", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskoq2bmkpfwmmbo3c9pzdby7wmwjvokgmpgbpcj3", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskowvmbgn4oye4hae3keyjuzta4t499zqkjqydfd", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskos7tnf5jx4e6jq4bf5z4gwo2ow5he4khn75gpo", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk966m5mv2xk8hassrq5b8nz97qmy3nh348y6zf7", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk7drqfofanzn9rf7g59a2jha5ses3rswmc26hpw", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk8vjsq5s8jan9c8y9tmgawd6cttuszbf6jmhvj5", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk8netwcxgkpew8g5as2bkwbfraetf8neud25ktc", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk8dz47g5s7qxbyy46qvkrykfoj7wg7rb5ohy97c", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk8dsngwh4n6hmf4unqb8gfqgkayabaqdvtq85ja", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskux8ew6zq6zddya4u32towauvxmbe3x9hxvbzv4", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsku4ftwo3dvgygbnn58octduj6458h5eep2aea6e", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskuueow44w67rte7uoryn855hp5kw48szuhe5qmc", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskym4rrvgax9ubgqz6944z9q3t6quo5ugw33j3kr", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskyunb64dg4x72ue8mzte7cbev8j4nucf9je2sh9", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrzuuu8gkp5bxrbbz9hdjxw2yhnpxdkdz3j8rxr", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrxweey4ak83ek36go6okoxr6bxrepdv3y52k3y", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrccyjmc8cybh9n3kgencq8u7fh796v2zfraco9", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskr8bmeh9q5brkctg8g44j82ootju82zu8porwvq", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrskxmbv7s4czgxz5wtdqkay87ts2mfmu4ufcaw", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrgqnuqub85jzcocgjsgb5rexrxc32s9dajhm69", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskrga27zfbamdcntpbxxt7sezvmubyxv9vnw2upk", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsktn6hodzd7v4kzgpd56osqjfwnzhu4mdyokynum", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsktas5pgp3tofv4ke4f2kayw9uyrqpnbf55bw5hm", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskk33a2z28ak9yy6eunbmodnynoehtyra5o4jzkn", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskk8yh4h2rkp3yegr5xuea62qbos6q8xd6h3wys2", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskkqjdxujqmjn2woqjs6txv3trzh6s5gsr882scp", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskk2vnyd5dq3ekexog6us6zcze9r64wk456zvj9a", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskqxjqneh4mhkvvgga8wxtrky5ztzt6bh8rcvsvg", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskq6j6w8bv4s4to8ty6rz88y2cwcx76o4wcdnsdq", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskq5attbvu8s55ngwr3c5cv8392mqayvy4yyhpuy", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskqw45qy3ph9rwgow86rudqa7e3vmb93db5e4yad", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskqg9k3joyv9ouhjfysscame66hovq42yeev7ug7", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskezdab747v9z78hgmcxsokeetcmbdrpj3gzrdcw", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lske5sqed53fdcs4m9et28f2k7u9fk6hno9bauday", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskee8xh9oc78uhw5dhnaca9mbgmcgbwbnbarvd5d", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskewnnr5x7h3ckkmys8d4orvuyyqmf8odmud6qmg", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskwv3bh76epo42wvj6sdq8t7dbwar7xmm7h4k92m", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskw95u4yqs35jpeourx4jsgdur2br7b9nq88b4g2", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskwdkhf2ew9ov65v7srpq2mdq48rmrgp492z3pkn", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskwdqjhdgvqde9yrro4pfu464cumns3t5gyzutbm", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsk2xxvfxaqpm42wr9reokucegh3quypqg9w9aqfo", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lska4qegdqzmsndn5hdn5jngy6nnt9qxjekkkd5jz", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lska6rtf7ndbgbx7d8puaaf3heqsqnudkdhvoabdm", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskau7uqo6afteazgyknmtotxdjgwr3p9gfr4yzke", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskayo6b7wmd3prq8fauwr52tj9ordadwrvuh5hn7", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskatntynnut2eee2zxrpdzokrjmok43xczp2fme7", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskaw28kpqyffwzb8pcy47nangwwbyxjgnnvh9sfw", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskdo2dmatrfwcnzoeohorwqbef4qngvojfdtkqpj", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskduxr23bn9pajg8antj6fzaxc7hqpdmomoyshae", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsksmpgg7mo4m6ekc9tgvgjr8kh5h6wmgtqvq6776", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsksu2u78jmmx7jgu3k8vxcmsv48x3746cts9xejf", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsksy7x68enrmjxjb8copn5m8csys6rjejx56pjqt", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lsksdfqvkbqpc8eczj2s3dzkxnap5pguaxdw2227r", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskjnr8jmvz45dj9z47jbky9sadh3us3rd8tdn7ww", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskjtc95w5wqh5gtymqh7dqadb6kbc9x2mwr4eq8d", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskjtbchucvrd2s8qjo83e7trpem5edwa6dbjfczq", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskhbcq7mps5hhea5736qaggyupdsmgdj8ufzdojp", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskhamuapyyfckyg5v8u5o4jjw9bvr5bog7rgx8an", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskfx88g3826a4qsyxm4w3fheyymfnucpsq36d326", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskfmufdszf9ssqghf2yjkjeetyxy4v9wgawfv725", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskf6f3zj4o9fnpt7wd4fowafv8buyd72sgt2864b", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskf5sf93qyn28wfzqvr74eca3tywuuzq6xf32p7f", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskfowbrr5mdkenm2fcg2hhu76q3vhs74k692vv28", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskf7a93qr84d9a6ga543wernvxbsrpvtp299c5mj", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskfjd3ymhyzedgneudo2bujnm25u7stu4qpa3jnd", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskffxs3orv2au2juwa69hqtrmpcg9vq78cqbdjr4", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + }, + { + "address": "lskgn7m77b769frqvgq7uko74wcrroqtcjv7nhv95", + "tokenID": "0400000000000000", + "availableBalance": "100000000000000", + "lockedBalances": [] + } + ], + "supplySubstore": [ + { + "tokenID": "0400000000000000", + "totalSupply": "10300000000000000" + } + ], + "escrowSubstore": [], + "supportedTokensSubstore": [] + }, + "schema": { + "$id": "/token/module/genesis", + "type": "object", + "required": ["userSubstore", "supplySubstore", "escrowSubstore", "supportedTokensSubstore"], + "properties": { + "userSubstore": { + "type": "array", + "fieldNumber": 1, + "items": { + "type": "object", + "required": ["address", "tokenID", "availableBalance", "lockedBalances"], + "properties": { + "address": { + "dataType": "bytes", + "format": "lisk32", + "fieldNumber": 1 + }, + "tokenID": { + "dataType": "bytes", + "fieldNumber": 2, + "minLength": 8, + "maxLength": 8 + }, + "availableBalance": { + "dataType": "uint64", + "fieldNumber": 3 + }, + "lockedBalances": { + "type": "array", + "fieldNumber": 4, + "items": { + "type": "object", + "required": ["module", "amount"], + "properties": { + "module": { + "dataType": "string", + "fieldNumber": 1 + }, + "amount": { + "dataType": "uint64", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "supplySubstore": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["tokenID", "totalSupply"], + "properties": { + "tokenID": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 8, + "maxLength": 8 + }, + "totalSupply": { + "dataType": "uint64", + "fieldNumber": 2 + } + } + } + }, + "escrowSubstore": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["escrowChainID", "tokenID", "amount"], + "properties": { + "escrowChainID": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 4, + "maxLength": 4 + }, + "tokenID": { + "dataType": "bytes", + "fieldNumber": 2, + "minLength": 8, + "maxLength": 8 + }, + "amount": { + "dataType": "uint64", + "fieldNumber": 3 + } + } + } + }, + "supportedTokensSubstore": { + "type": "array", + "fieldNumber": 4, + "items": { + "type": "object", + "required": ["chainID", "supportedTokenIDs"], + "properties": { + "chainID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + }, + "supportedTokenIDs": { + "type": "array", + "fieldNumber": 2, + "items": { + "dataType": "bytes", + "minLength": 8, + "maxLength": 8 + } + } + } + } + } + } + } + }, + { + "module": "pos", + "data": { + "validators": [ + { + "address": "lskzbqjmwmd32sx8ya56saa4gk7tkco953btm24t8", + "name": "genesis_0", + "blsKey": "a6689556554e528964141d813c184ad4ec5c3564260d2709606c845f0c684b4bb5ff77054acb6eb8184a40fcd783670b", + "proofOfPossession": "831e87337aa9d7129b42ac2ac6d355395b07829148f3a4570293cb8ea00593cbbd1933a9393d8f5c4028f74c0d6c29511526e76d082fd2207f65e653129a29f22787cf19d4efe50ff43651e16463f868714354d6860e62dcd715858c4c53fc51", + "generatorKey": "3f44b319b82443eabb300eba5a2f323d72e44d9d2d5ed0b21a24051595582dd5", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskzot8pzdcvjhpjwrhq3dkkbf499ok7mhwkrvsq3", + "name": "genesis_1", + "blsKey": "8c4167537d75e68a60e3cd208b63cfae1ffe5c13315e10a6100fcbd34ede8e38f705391c186f32f8a93df5ff3913d45f", + "proofOfPossession": "929e7eb36a9a379fd5cbcce326e166f897e5dfd036a5127ecaea4f5973566e24031a3aebaf131265764d642e9d435c3d0a5fb8d27b8c65e97960667b5b42f63ac34f42482afe60843eb174bd75e2eaac560bfa1935656688d013bb8087071610", + "generatorKey": "73de0a02eee8076cb64f8bc0591326bdd7447d85a24d501307d98aa912ebc766", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskz89nmk8tuwt93yzqm6wu2jxjdaftr9d5detn8v", + "name": "genesis_2", + "blsKey": "b61f2da61bf5837450dcbc3bca0d6cc4fe2ba97f0325e5ee63f879e28aa9ea4dd9979f583e30236fb519a84a9cb27975", + "proofOfPossession": "807bca29a9eea5717c1802aebff8c29ad3f198a369081999512d31c887d8beba1a591d80a87b1122a5d9501b737188f805f3ef9a77acd051576805981cd0c5ba6e9761b5065f4d48f0e579982b45a1e35b3c282d27bb6e04262005835107a16b", + "generatorKey": "761b647f4cb146f168e41658d1dfe0e9c01e5d64b15e5c033d230210f7e0aaa8", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskx2hume2sg9grrnj94cpqkjummtz2mpcgc8dhoe", + "name": "genesis_3", + "blsKey": "b19c4385aaac82c4010cc8231233593dd479f90365186b0344c25c4e11c6c921f0c5b946028330ead690347216f65549", + "proofOfPossession": "b61a22f607f3652226a78747f3bb52c6d680e06a8041fc1d3a94a78fabf2895f23559059a44b0c64cd759d33e60a06060197246f6886679add69f6d306506336e15cdc7e9bde0aaca6e8191fb3535b5685ce8b3f33212441d311444a3d57fc66", + "generatorKey": "f07a86182356aee3fcfb37dcedbb6712c98319dc24b7be17cb322880d755b299", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskxa4895zkxjspdvu3e5eujash7okvnkkpr8xsr5", + "name": "genesis_4", + "blsKey": "a5ca55e9a0ab81d48eaad2960bd3ea259527cf85fe62cc80cfd8400dbd2511725c06c3a597868dcc257bbc279e2b3e92", + "proofOfPossession": "a092cff10ea18ec3dcf3f6e41cd38537e00602e35107067ace7ab7c97a2ae1de531ebea7fc0c22e8dbcee1f981c439930c7cae474a996b153a66b0cb34e66c6041348aaeb4763413afffe0d947da90424065ee573b3683edbb1e51f9a278ae82", + "generatorKey": "0cc6c469088fb2163262ac41787ea4a81da50d92fd510299ba66e5a2b02d5a05", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskvcgy7ccuokarwqde8m8ztrur92cob6ju5quy4n", + "name": "genesis_5", + "blsKey": "87cf21c4649e7f2d83aa0dd0435f73f157cbbaf32352997c5ebc7004ff3f8d72f880048c824cb98493a7ad09f4f561aa", + "proofOfPossession": "92d1948d5d8faec69c6a389548900952014f5803f0eedc480e291bfd8fe6f31231e43fd4bd47817bdbca96e5104b92d2097df4362b94a583a1a24bbdd0382a681b5603d6b3bbfca854d5beccd45c2ebec24623666032f30fb3858b236bfcbd14", + "generatorKey": "83cca7ee3c7145d8022b54fab14505f6f65ed9ac933e3591de4a45d4f2298adb", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskvpnf7a2eg5wpxrx9p2tnnxm8y7a7emfj8c3gst", + "name": "genesis_6", + "blsKey": "86bc497e250f34a664a3330788292ee901aa286e10fcb280a4a151a8741bc0d154b947a4d3cd9bc5b552917211081466", + "proofOfPossession": "97a20b81bdcbc7a4f228bc00894d53d55fbb2c53960f0ddc0cfa0f77395a33858a9907079773ad50a220cbdb49bc1d171250df83dd70572c4691eb280ae99d4501b289676b6bb0ad0e859b525752015bf5113e49050a8c70853470f2dd7e9344", + "generatorKey": "1d224ad4cf64a3db52b2509c5b63365db970f34c8e09babf4af8135d9234f91f", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskvq67zzev53sa6ozt39ft3dsmwxxztb7h29275k", + "name": "genesis_7", + "blsKey": "9006fc2c9d159b6890047e9b26c700d8c504e17b6fe476a2a1ac1477357c68eee332be587da425e37e22332348ed8007", + "proofOfPossession": "945ac6db93666aa21934d84c6ad897fe1acf1d208a17ec46b0ddf26cf6d9cdccef7db9eac682195ec47cb8e7a069bbe10706a4e1cce2012aadd311dafb270c9c810d80bc82c2b6c34ce236efac552fa0904b96533772f98e202f4e6f47c97f09", + "generatorKey": "8b65dce85de8ed215a91477627b365ec017a01cd5a715337f772ba42715cc794", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskvwy3xvehhpfh2aekcaro5sk36vp5z5kns2zaqt", + "name": "genesis_8", + "blsKey": "96482192c99ac4569b2d139670e566ca5ccf41f39d50b7ddcf69d790bcd556e797614ecb3dda2017e5e3ac2bab4e82d0", + "proofOfPossession": "865e6e88cf91b061b92f2d499936f384c9a3df52de5717661b66c4fd5150f1b171350c6abeab96fb905b6294ca7694420728022d84f4c31180f903a6ab8b5b8153fdcf65d46c8a018e65c0459e64c931b6544b6f00e673c30f2a82402fe8be3c", + "generatorKey": "20a50d60059dff36a6f6c922f55b018d288ba1f9df5120eeb8fa8e3745a800ec", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskcuj9g99y36fc6em2f6zfrd83c6djsvcyzx9u3p", + "name": "genesis_9", + "blsKey": "b244cdcbc419d0efd741cd7117153f9ba1a5a914e1fa686e0f601a2d3f0a79ac765c45fb3a09a297e7bc0515562ceda5", + "proofOfPossession": "b7a186c0576deeacb7eb8db7fe2dcdb9652ea963d2ffe0a14ad90d7698f214948611a3866dfedcb6a8da3209fee4b94a025864f94c31e09192b6de2a71421e5b08d5ac906e77471d3643374a3d84f99d8b1315f44066c044b5cdbfdfeceef78c", + "generatorKey": "80fb43e2c967cb9d050c0460d8a538f15f0ed3b16cb38e0414633f182d67a275", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskc22mfaqzo722aenb6yw7awx8f22nrn54skrj8b", + "name": "genesis_10", + "blsKey": "a38d728c1c1023651b031835818d17d0665d1fbabd8e62da26ca53f290620c23fe928244bcbcbb67412344013017cb53", + "proofOfPossession": "b5d455bb358eff87779b296f23a2fc9abc9d8f3ecb8ed0d9af3e23066e653a58b189c11b4a3980eaeaaa85ffcc240795187f6e8a0e8e8a2837bc20d485e1d3159c2d581614d72f94bbd049e5a9f45c0302851c87aa3c3853d8962ed75d140234", + "generatorKey": "671c72129793eb5801273ff580ce3d4c78d89fc8b4fb95b090a9af0a9a647a41", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskchcsq6pgnq6nwttwe9hyj67rb9936cf2ccjk3b", + "name": "genesis_11", + "blsKey": "8fd004c33814c3b452d50b2bf6855eeb03e41552c6edd50b76dee57007a34cf987da1e06425cf498391e6831d1bf6851", + "proofOfPossession": "a0e34bdc7dc39e09f686d6712fd0e71c61c8d06dfedbdbb9ed77c821c22d6c87f87e39e48db79aa50c19904933abb11a0b07659317079ae8f2db6e27b9139ce0830faa8dad2dcae2079f64781b0516be825b2d84689080bb8219a5ec72ba80f7", + "generatorKey": "be4e49ea7e57ede752ce33cb224f50277552f9085a551005255ee12a9b4ca68d", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskp2kubbnvgwhw588t3wp85wthe285r7e2m64w2d", + "name": "genesis_12", + "blsKey": "98f83f66e857d954d5c5a49403e5b3a622e1bb855d785845e72faf0f7dd03ed3fd2f787a38c57f6968accaf780fd41fe", + "proofOfPossession": "b3131f0229df11964daba47a79729542f10672b36db017002df90d2cc6a79c8b44d032935bd214bdf69a8db181e4315a15de71a2e6802442536143c3ace9886248d502d6f38f9ea5bad26d4cee729b909d6cbde541c35313598957ddda08de15", + "generatorKey": "56d64ef16324f92efce8b0a6ee98b2925dc485d45675b2012bbf6a96d7431a36", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskmc9nhajmkqczvaeob872h9mefnw63mcec84qzd", + "name": "genesis_13", + "blsKey": "a029f74eaf914e3dfd828502f224fff7311a964d11eb1c335eebadc38b5c20a98f79bfc53ccf6ee3630cfa282e88489d", + "proofOfPossession": "b5cd13eac543928db25ebb9d69dfaacc04a0d41924f2010a6f04b2457523a5a423a9c49756dbcb969a7b2c49ddcc7c710ada766fdddaedbff02f68e2b75108f111f4078d2705f06551ef524f201d50ac32c423d04a7e6e7c6c8a64d70c013ec3", + "generatorKey": "b67f0a9ad61ad6867b54aaaed6036001485d7a7ba13770aed786b34241f37cda", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskm8g9dshwfcmfq9ctbrjm9zvb58h5c7y9ecstky", + "name": "genesis_14", + "blsKey": "8e3f9dd02f46bbb01ec1ffbe173b6a28baa3ffaca943afe51c18dc5220256a3994cd0b0389c835988a64076b4e81c837", + "proofOfPossession": "980f00e7752adccb907eaea0fc31ce62dcaff9bf1c6b7066c5071829c91456a8d1e266cb0a9ef4916ffbd09295508a350d21e9123e5cc1c00d3ef65f5493c93c5b993e9768960d4210849743dc2b995657cb0aee7d46d6482e3545b89f06f895", + "generatorKey": "497a5b80edc6b9b5cca4ca73fd0523dbd51e41c1af5f893e301cfa91d997573a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskmwac26bhz5s5wo7h79dpyucckxku8jw5descbg", + "name": "genesis_15", + "blsKey": "adeefe5ec24b210986ae56ac2d1eea5b5447e38d7c9657d4948ee2d9b312a247ba40964a58c3fc14e5fd7137602e631c", + "proofOfPossession": "8ffe03e68c8b3ec929a4934d61091ac1c8f42446076a7ef6e8141082ebf71fd3153c35c1745619a08defb0ca8fbe583a15190f88dbd93d22d3c4eaf3fd60fa2d9cdcd8824bdd289111ca7d537563b0e2fa7ad06cad40bc2ce17277a63a3138b2", + "generatorKey": "a7340ac2220b35dd5c97e6ea45c48cfdfcaccc4c59abf9b7f316df8a1bd7e8b2", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskmadcfr9p3qgx8upeac6xkmk8fjss7atw8p8s2a", + "name": "genesis_16", + "blsKey": "a13d3a62d053b3a092d736f3c96c89fb982924b9cfd1e8283c4ced5a537732718e73c6c86c94ddd416eb94a753366b7f", + "proofOfPossession": "950583faae3492f5d15f9ad72bad982b2f513956cc1259e16e28ef2e18f7db3df1bf1cbab7350e390ac5a8785c574fe30878784e6c5d50668184c4c92bda196432034a7e092d9e62736ca543e1b7e594ccf6b81d37c17fabf73b846b67a0bc8f", + "generatorKey": "ebeb7f828aaa40ab6040e914b66b6f5d76964a0579bd29bf98c2641547f229f6", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskbm49qcdcyqvavxkm69x22btvhwx6v27kfzghu3", + "name": "genesis_17", + "blsKey": "80d7d0598d4e79ceea22c56d16e747cd5ef94469bd036945d14a5d1e06eb700f9f1099d10cfaddddf9e88ac4c9f1086a", + "proofOfPossession": "b7890264708b9d3341d90864f9120cd84090592a6bc5a419df94e86a638a0055e7dc3846cb89869cf46305611e49cea007711f35a5effd3099e56b5108a4103215a6ba9195c4694064ba661502e852b43e9593b0a60bcd2b567fc97565054500", + "generatorKey": "4ec3ad70d3d35f0d684960e7938fab016d12c6c7cbb8312a8cff776dbaf2ca4a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskbr5cnd8rjeaot7gtfo79fsywx4nb68b29xeqrh", + "name": "genesis_18", + "blsKey": "968afa71f5ba87783db371242b48962a93c91f17ec6fe2b52260c43b7db62462fc88de889445390024abbb1de1ff87ee", + "proofOfPossession": "b3a05e96a9fc1ba05cb80ba48e8f92e6d6d282408d77b16557dd0c8bff8bc963539d5a355cb1544e35269c4fc58f5c0816b4bc3e215d6441f06b9d2e6cd48ad5f08c5bfb35f359fe25ebcc382985bcefce0698bd3a89e655706e46e394c83693", + "generatorKey": "552ea15981e9fa54f2b65c409e8d32c350435893744fb9937875b1ec0e3025eb", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsknyuj2wnn95w8svk7jo38jwxhpnrx7cj3vo4vjc", + "name": "genesis_19", + "blsKey": "a0fb290e74bce8c5858dc1b615bac542d2280a477912ae06b8d4f07c6d451eae44a47cae6a7a1fb5cedea9efe2d4e5a5", + "proofOfPossession": "8b1a7d2b1566ce81c8ac2b8c88b6966b960462d0fa4e54554f53ab184c31c72c65fce904aff79d4235dd3e16e8eed2780e083a31a432e70a538de1b81d8a8a49d31bdd361f357d57fe4568d1b506492fc72f42d4b344ecfac2d560bbd2214621", + "generatorKey": "4325779e64521ded42c0e2e873c16b753433d0e7f9a1e046e27a0fae9378d9c9", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsknax33n2ohy872rdkfp4ud7nsv8eamwt6utw5nb", + "name": "genesis_20", + "blsKey": "b29e90de05487e087cb37f34213ccc49edef8936aa15001686f947dd26b2e4c71b0c094c633067c75d3d0879c0347a45", + "proofOfPossession": "9866cd99328ae5d1a14f899b95782b828b404c941853f4d0f0f56a113867f9f44b177af5c6eddec16b42c405967e52c90e3c2b0acf4921fd7ad27bdca498980aec0d37923e95d56555190caed7644ac158b392af052a49a8d1df626ea3a5f034", + "generatorKey": "473d332bb27f1dab55191233884f37aaf17545b1883554b1457b2dfac7c02b0a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsknatyy4944pxukrhe38bww4bn3myzjp2af4sqgh", + "name": "genesis_21", + "blsKey": "b0d3f0d142131962d9ab7505a3ca078c1947d6bb2972174988feddc5d4d9727927ff79290af7e1180a913a375da9b618", + "proofOfPossession": "90f81a87982cb983aae8c240f12c77306501bf67dcb031161cb2787ecbecfdc0ca4e62365f750714b9b0a64c10411058105bef1a725ece1c0e7c45b7e1526494d5a02ceaa4f624116a91188e7ca2503e0ae17748b11b05cd79ccc204d20e418f", + "generatorKey": "f8d382ac4f19ffe2ac2fa91794b65dc4c03389cbb2ea65bab50379a12e0f98fb", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsknddzdw4xxej5znssc7aapej67s7g476osk7prc", + "name": "genesis_22", + "blsKey": "8ae81737f7b1678ece4b06db3ee1d633637da3c02cf646cdb0c7c1dae5f9eea41f2384fca8b0b12033d316ee78ea3e94", + "proofOfPossession": "a5150c19ac23dc15f660d9612be5f9591c1a5fc892e9f8b267de6bd39da84f254b6644e8c0f294900e5e9b7c9ecf3f260d902a56af7db5a59083eda08dd3ff083e2a07ba5d34f25312621f8686358dd2a50dcdc879eb0f9d50ff2fdc704e7d9a", + "generatorKey": "3c19943d614f67309dd989e2e1bdeade5ea53b0522eac3d46b9e7f68604a874d", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk3oz8mycgs86jehbmpmb83n8z3ctxou47h7r9bs", + "name": "genesis_23", + "blsKey": "8ffe1e957047e7dd979e8bcac9fcea9411ed3be947679ce26a36725b08da51ed2fa19e7f7c6bed701bf3e33a6f787b8a", + "proofOfPossession": "89177926eb5ed8d2be150884e0cc4eaf02a040a3ebb0af9df6922d8d7fc58da4777cc6591d3d43570ce6410077d087fe097cb30f28a164d22216859988f44ef88bc7f4a2134f882d044e4ee66d135a31cd063934cf6b4e820fcff3bbfc5b27c9", + "generatorKey": "b9bbcd67194a7091a517faf37a7ec0fda068c4ac0dcbb8ddf526de97e67716a4", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk37kucto34knfhumezkx3qdwhmbrqfonjmck59z", + "name": "genesis_24", + "blsKey": "94c8d9240de83f6b09905756fae29c2c3aa9092649776ebe037f20011b3bff835944eae63b2dcf6c3861f11d457a875e", + "proofOfPossession": "9900c9235a0365b9a0b5dce686903737cc4aaa76e8f9e47367954b07ee3a0c0ab51351cd746966556ddcc53e69eabe0c025195d1d3a6788d69c1820bd1fecc096eea09770fe43f86f898c6182ce3057fcd52b43ce096a07b4da3f2369353988e", + "generatorKey": "edec02268c216d131fa9ec045049e6ac1526f48da772a34b1536c88c5af223da", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk3dzjyndh43tdc6vugbdqhfpt3k9juethuzsmdk", + "name": "genesis_25", + "blsKey": "a1782a5f280f9894cea555d6f355c1f23e0581140c64f20ae469edd6ace7dcb6266227feecf002c2b508766e730c6f4f", + "proofOfPossession": "84e053bb01b22997e46ce4cbece0f5478e27cd49786cc36b1459c8930ea408e663bc725184197eb726fadf6988503c9b01be391ca3eb16587137cf5a3941717837baec7869896bae401bb513359485142778a52638429328f06a4469b7e21bb0", + "generatorKey": "9f1c361befb0ae35de28e8f0e25efe75ede78aa26c703625cc17e7fe2e7208f3", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk4nst5n99meqxndr684va7hhenw7q8sxs5depnb", + "name": "genesis_26", + "blsKey": "a1a95b1526c3426ccd03f46199d452c5121481cc862a43bfe616c44662b9a7fa460fcdc5f97072754296e6da7023e078", + "proofOfPossession": "942c76c56af0112baa7a11bb8875a2336b321e85de56fd4267e97f3fb142445648a54c97ed22e5860fe5b0e5ef240599028d4009d091ad96ad727914532e45ff9eb44303b337f44bf5ed3ac796e6e22a9ee29138bada893f89f3bebc1a4daad5", + "generatorKey": "71ce039f0e4502ff56ca8d33f7ba5ba5392dd7915516b2d87eb777edef454377", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk67y3t2sqd7kka2agtcdm68oqvmvyw94nrjqz7f", + "name": "genesis_27", + "blsKey": "a6d6aa277ab636486b7d879e90c541b4952264e18b8a214f58d32226fcc774a8e5bdac69223902424110cbda4ab58907", + "proofOfPossession": "a5b91b5e3881a36ea1b209f1cc09ab447e365b111e7529a88981e4e44c4a05eaee0507ff80460453e23187116510dc770d517e16aafc1de2aae2393ddd2e26cbe6fd096b65ba48cb6dacd0862d6c39b394117a596c0a1c9bae8d9b538d6e6dfa", + "generatorKey": "74f7ff53b55eda8fe9c11d66f7533c27714b121a5918a66c19b309e1c93dc3ed", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk6quzyfffe2xhukyq4vjwnebmnapvsgj4we7bad", + "name": "genesis_28", + "blsKey": "b422e4fa8ab196e0bcc49f956ab3b5c13dc14442864dca80118dea7329308e7f7aa7547df293c826a29ef4bbfe517778", + "proofOfPossession": "8ce0fe2bf47180e74f315fda7bfdb376a277f394667c88661dbefcc57100af1d0a06d36ef406f7abc0282a1cb8f5091505d759a40739b11b4a1fd0060e2066edd79ad417168a977f1a59206ddac4bbabaf70feda572bb19c17b9d9034bfe28b1", + "generatorKey": "b5308c34412c54e4b8358b5fca16396084004ee37c6824c1ad751cbe8e50e24f", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk5pmheu78re567zd5dnddzh2c3jzn7bwcrjd7dy", + "name": "genesis_29", + "blsKey": "809c35a2a1f510fb574a223474fb6b588daca95ab1b9b04f4f0dcdcd4581f05914eb1b9683d21997899ebf730d82a8a7", + "proofOfPossession": "a2fd6eca6018825969d8b9de58e6594149c5114cea9c27997f2ec67b923cbe562454caa5a5e956b3eb5ea0c5bd9b0196137d4646e21b51bd21503dde474d510f62654bb7ffd141fa3462997bc6662f2893cff7d917eb07f2985dae860723bd46", + "generatorKey": "62c37caa9ecdb3874354e7f780cb4463ad190bc31e75e552cb07b9bafc658f2c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk56hpjtt5b8w3h2qgckr57txuw95ja29rsonweo", + "name": "genesis_30", + "blsKey": "906653b7a74dc35499e0c02f10a9d092e7dae70e5376287b5533c7a52ade678784956e6bcbb67a11239bbfa977743a1f", + "proofOfPossession": "a5bdd92d340281c01d90224ca58a13cc429dc47ea9d2ef6226b023ff926a43ff0a50a82028e1fc20e9faa380136f5dde00a70d7170a8de3246e39b7787771e41271351dcbf4f88b6d40dac77b2e3324a371f9fc08d1fad90fe3e5cd61caae5d8", + "generatorKey": "d19ee9537ed38f537c2e8be0fb491331575f8e4050dc4a74ccee3244714d5969", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk5y2q2tn35xrnpdc4oag8sa3ktdacmdcahvwqot", + "name": "genesis_31", + "blsKey": "b8396076f1ae032b572145f01ea0a3b5418f226afb0496930cb68250ca59b16fe2fb6dadacd88132b9dcd19a07d7f773", + "proofOfPossession": "a096515a639c004e7aecee3e88ddbb572163b914de63b528db584b27fe6a0267eb95213ccbebea849a720f1f717871ff191a4cf52c9d0a4db57cfcf8f2453d22cd432a5fe64dcb45982abe84343608a8b22740f7f3fbdfe1000fede5f0a08db3", + "generatorKey": "4ae9069cbc0e2371b037342010c5ddbd9c6d4a8c8d0a9eae59bc6a3796866119", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk5rtz6s352qyt9vggx7uyo5b4p2ommfxz36w7ma", + "name": "genesis_32", + "blsKey": "8f96883db13e4f43e7280d8a58e7642228f46c375853a17e8cdb34fdeaf4e363a82678d2f54a8630218e097ba39d4370", + "proofOfPossession": "91a2efa4a407f63eb9157a4f4378bf6dfb4fc6d5d2714c2ee81f49ac90bc5dc3f1b72051a1fa1615f2e2d694cf17c27c1429e94bebc023feea2a405f7a8343dcc567636d15ac95ef84b1c673298becb766e036d9869e2113d9f4602f6e6092dd", + "generatorKey": "d1f10929b1eab8232be9df3b792496eb56bcb5c0a8c2fd04e3be1fab26c7980e", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskoys3dpcyx5hkr7u2fenpjrbyd69tuyu5ar4dgy", + "name": "genesis_33", + "blsKey": "a4f78f9b10c5671cca5aa2526708b95bdec56f3e404fc6c6403de83338940dfcc8d6836ba3d98566d314d34438a042d3", + "proofOfPossession": "91a1d0b501b7ab2caa5d240eae92c8c0ccbf296ebd3dd9d03aac1ca569f803091ec5ab57b7f6c34ad1aeb9aee0ccc17a1911c8e7a9ca681a6b803bf27e303f59dcfa32f678c4bb35189a8b7e0a3af43771ec841bd2ab32a96cb2eab0a1c2ad94", + "generatorKey": "3efa1c0a728a9741555b84ff1d80aedfcaf85370e1602890d7ba610bf33500bb", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskoq2bmkpfwmmbo3c9pzdby7wmwjvokgmpgbpcj3", + "name": "genesis_34", + "blsKey": "882662250af65099ca817b2564576582981f23746f07be09ebc03ed6aa582a327d4156ff4a12851bce3ad77be854f937", + "proofOfPossession": "b73f34042d210b6cf0ba61b04e26bcb08e4d671a12df09e592c14c73ac55df09a01adf94b205b86a9ac9020cc719e93b0f890050891d9f8622346f45112ce502e26293a14c36501a8f1947c33fa38535d6eae6c4af6679296e76a105e899341d", + "generatorKey": "8cda7b8df8975d781e053882a1373d190d5f8fd7c13ab528be8597b5d06ede57", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskowvmbgn4oye4hae3keyjuzta4t499zqkjqydfd", + "name": "genesis_35", + "blsKey": "ac304b4ad4fdac88bf975496edc43af0e324120984d5a12ac073b3e3e80c593470b6aa4f10b9897451bd6ee6f569a2af", + "proofOfPossession": "b08e154f3db163391dcbef182a63ad51d56521951307b9bcc60f12c83babeb5eef80b6d8503848acf9bc864adaa82bd610e3145dd77debdfcaa8e1e15f13e6da1d5bcfca4234b46208900c6ce35d0147534a7abc728504d731f286edc31a3ae3", + "generatorKey": "f926fbec6d2e461af7c58d87754524abd26ab1f617d73348ba1318d371f7cac0", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskos7tnf5jx4e6jq4bf5z4gwo2ow5he4khn75gpo", + "name": "genesis_36", + "blsKey": "87971b8a0520e08dc8dbb8114de7ecd44e98844c9179585806e8a1edaae1190ea85e6471767e90074d87d1dfbafc983c", + "proofOfPossession": "ac1fa23a608ce0be52ada7759c4631a5e3c7828a2a622c718b67c4d8996eeed61c382ec319ff2c608290c141ef741ba013f7567bf95cdfb29295dea31adb440f5d856f5688fdd553f47a06ab5692ee5fb99e5a50b329fe4406bfefb924b5665c", + "generatorKey": "d5781773a9b07a569a0d87c0bf82103fd459a2185fc32f5c312a663c5bc65784", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk966m5mv2xk8hassrq5b8nz97qmy3nh348y6zf7", + "name": "genesis_37", + "blsKey": "b847749ece25a2ef51427de371b4efc2342fb38a2c5822b941c1dbf43c3f8dabf5dc0e1620d2bdafb597d697e30ab801", + "proofOfPossession": "831a557a972e0ed1a9cdab88a13fea899ce1b7e6475ee2d42a1a1faa09fe9042eaab3bd8b14f2faf4ecff84780b8db6719e8d6bc8917ada1f77182b2fb4a40b544c02486fe0394b8fcc72ac69fcdf3d6c0920469225bf0ad2e047fc68b9376a3", + "generatorKey": "875d9a84adcf997034d5ab6189a063d9817da3a6c8599cc46c84b70b5081b18b", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk7drqfofanzn9rf7g59a2jha5ses3rswmc26hpw", + "name": "genesis_38", + "blsKey": "a6d6315e85e8138de21f94d0c5c6f4c2515d493b17653156745155b25f9f121f6d13e7c36a57fa5002a9aa0a0b282394", + "proofOfPossession": "ac38044b8d84ed22d42da3a240b7c2dd16fbdf3b03655226b46b6eea46256a3ee33232771d67da1a4df6717476349647077f5cb29715333d8c55f5b6ba70c77af1944ac54c913445da29c99dd441e36d9def69c0e9709ce062ac70e4d15628a9", + "generatorKey": "71d5b4b08ea0b7a0ff95f779aec53590a3bcb5a87fc770334f8c9ee57fdd79d9", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk8vjsq5s8jan9c8y9tmgawd6cttuszbf6jmhvj5", + "name": "genesis_39", + "blsKey": "837e0759968b1ed95789252d1e731d7b127c9a53a74e86f3ca3d65d71cf666f2208baa782a42c45d4132630100a59462", + "proofOfPossession": "b97607b1478f17877b4c8042530763894dd7b79f8bbf5ca0883d08b94dc8a11cc2c2a73123160e3b01da692fb071f5fe0d808426604b5ad8aadebda9b02710698158254f6f1d822c2c9bae5c081101806e9220d79c547391e6fc6d8f26094dc7", + "generatorKey": "00110f493d122a73628a518842e99591b91def4ef9fbd58e1b6458950da5a776", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk8netwcxgkpew8g5as2bkwbfraetf8neud25ktc", + "name": "genesis_40", + "blsKey": "a3aa25a2385666122df82fa74096f30560c270b1ef981ff459e25cb5819d50a2edd8c315bf17a6a1af8d88c0e9325e50", + "proofOfPossession": "b543e0716990a65727b51489c90495289bae983d3a4439fe68826c2175b4396d37da0ff03910b369335377de097088720b77646a3fdf196e95c54f2ca6bd414327231996bc2dba0c1dcc7a77b8be10b84a4ef8947a0e4ba22aa09a6c025521e6", + "generatorKey": "fa7af9f8623b324e6c021b7a0899d980a41dd2de86c35cab530751eaa9e55a0a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk", + "name": "genesis_41", + "blsKey": "a84b3fc0a53fcb07c6057442cf11b37ef0a3d3216fc8e245f9cbf43c13193515f0de3ab9ef4f6b0e04ecdb4df212d96a", + "proofOfPossession": "b3de21449917e17d5eadb5211c192ee23e7df8becad8488c521dcfb0c67df64a81561653d92805b4bebae9e5b5bdef8717f1259eaeb55bd1e7eafad3d74efe20181b4ac84bb7582b637e605fe78f10eb03b2a4acbff49809e86d89aebc6076b9", + "generatorKey": "91fdf7f2a3eb93e493f736a4f9fce0e1df082836bf6d06e739bb3b0e1690fada", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk8dz47g5s7qxbyy46qvkrykfoj7wg7rb5ohy97c", + "name": "genesis_42", + "blsKey": "a2f8fdf2b80c987ae61634125c54469928728ecb993bab3db892725b16b41ec48c36056eeee2a1c9b073d12bdf917684", + "proofOfPossession": "abded9f3ad588edba52b7b2a4b3ff25f630aefae0d7a91827bc1fb7b8cba36d27c310a7a58a4a66ed9a8d90ffc0aae6e17718b1fa3f8e7305498e740d531460702a7dce1e32c19e18849c786c26a30e29b464c7202dd64d021c1eef643de519a", + "generatorKey": "567e1e27c02293d7c190a1eb203c2daf1935a9901de66df73f8e4eeae6907d04", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk8dsngwh4n6hmf4unqb8gfqgkayabaqdvtq85ja", + "name": "genesis_43", + "blsKey": "aa5174668a4743d838fa3742092c744c3edd4ee64c535ce2a69eeae1c5f23029acd74853410867d873076639f4ce1cda", + "proofOfPossession": "ad79b935bd503402b83404125ef11fab81f4c6bef0688798473e430f892704b653209aaf81f16efca9965fad0850a3971662f33c25994568e1434f4f46901caa1c002cab18dff7337836617c372673714d63b01ec4db098f419c027015aa4c05", + "generatorKey": "dd337fcb819073335382415bfdbf5e5b7e73126aafb0ac46479137328e72d438", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskux8ew6zq6zddya4u32towauvxmbe3x9hxvbzv4", + "name": "genesis_44", + "blsKey": "94da5ec9da5eabf2ab184de1e0ee10f63f721897475acd59c3c53adc51a9b39b0f4fa28573fcc309e576dba658425dbd", + "proofOfPossession": "a672d269ec605e04065fc0da8e6f520d0273b1c57a754409d9fb25cef1be67b8583fa683e27c0284c31105045f395c0c142d0648420b9b209fa88fa13025ba2b3887e04e3fbae1db6e5941ade41713a4384c139e47e72a68c964c4a5c0886d25", + "generatorKey": "563aa06b554beea30fc4455ae51e0954051a3457315b2370fde9c22d3233b522", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsku4ftwo3dvgygbnn58octduj6458h5eep2aea6e", + "name": "genesis_45", + "blsKey": "b9dc37e370cdbab50fe906b675551194e80705f5549ec07f32b95b85ec1ee1b149d156e649ebe1eac57bcc2ce9db3e56", + "proofOfPossession": "abefcbf20c53c10ac15054527c2ca691994f0b5cf60444aef49ba4e39312774eaa073be6b887ca5792bbfd53adc7ec3d0b0f6b34ec8a8f2fb6708d5a9d3de242f5fcccc3c3cddcfc5eb8be5aa13c333d114c091f594736e7a43d7d9212d0063d", + "generatorKey": "894289ef63ad9f51868d06e700c5dc9cac7af2e6601a99449134926cfdbb4340", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskuueow44w67rte7uoryn855hp5kw48szuhe5qmc", + "name": "genesis_46", + "blsKey": "b7c47fbb0d7e3793460949c9dd6120a310eb52de67f6cde55c022b05dd5053074c8a0e562896a482c787eb2eea82353f", + "proofOfPossession": "a265237ff848fe7acb4c84b6f68008ee7ec917a7a11c050f630b834e5caf22a447de94de0e7c52d03b18e003e5f9a3f2091cb5a78817ba42a7e19c714af47ad0b94824c5b90862059ed3042446143c56c4df011389eb42dfa2daa58df677d473", + "generatorKey": "ebe1d6189c7015d175414db9621a602b0912826c1eb1aab09e69bb33ca8fcda5", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskym4rrvgax9ubgqz6944z9q3t6quo5ugw33j3kr", + "name": "genesis_47", + "blsKey": "a5963aa24ed05e95d19fd9de35ae6f523aad987ab2b9897216091e798e15f5062e9734b11fcacd6b8f312162ddc10940", + "proofOfPossession": "8a1ae28d6d70bfa0dbcc694c811c05ac6e697a17f41d45a32e1cb5b225bd42de7c1043f4af3c17d92641c4d017569e2302dad3e32493294831da564a07154e5098129639deb89743d1146f8e01f9f6f32f382905707051467242b646d86bad05", + "generatorKey": "4514d1723eed164b3792f1950d3b1c7a1067441ba207cce8d9bdd6f436a119fe", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskyunb64dg4x72ue8mzte7cbev8j4nucf9je2sh9", + "name": "genesis_48", + "blsKey": "870db2da31a9471077677bd9a7529ee7523bdd64fdba46c514e94aa52e940566479cfdab29b07c1573aff6ba7040c684", + "proofOfPossession": "acbe270292cfaa154f256a83c9bdde889a9205c85c5ff0f41dae586dccc7f29f0464fbc087a5c5adb3cb4eca3b95bc14187db64cccd24e98d3e75215b69bd2bd0b357834c1ccacbdf91556fa59a86d04d1fc8aaa3be2ae5256aea3bd36d26942", + "generatorKey": "a9b0c063fee99a903a55da57e3d16f069145e414b62e25dbbf218bd608a61f7c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrzuuu8gkp5bxrbbz9hdjxw2yhnpxdkdz3j8rxr", + "name": "genesis_49", + "blsKey": "b1b4ba05e7116670be55b6d9fc28574d142824175a1e3d1cdafa37f193c342eba1a85d8520a9fd962811fe63a5a2d048", + "proofOfPossession": "99f7e39908f0cabbfd156c78a903d6968c455f5edbcb878525abe1217674d9745da87057f1fa93ccff79632253d5b4fd0c6301b0b9eb0e07fdd4c0abc99da0229ceb4a03b0da237657e445a7bbf6877689bfc027d65f24f05982dc2aeb34c72d", + "generatorKey": "d454f04eb0e05c980f6a3427e98d73493665860ba7a29eb915cfc0b8daae2849", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrxweey4ak83ek36go6okoxr6bxrepdv3y52k3y", + "name": "genesis_50", + "blsKey": "8422c22feba709265c30a7b86a9ee9832d6b32fa4c9dc091c390e1b15e278f9009dc5d70868a56dace1ff622e9e634d7", + "proofOfPossession": "871ed33b68172b0ce40a3ec98d6fa9b3fd77245c2c1cb7f1071101cb459d53b05fc0168597148f976ceb1ded71999da8094fd8783cf27d1e21f9b965164573c0ca849210bd1e99f4706ca6f43636f9ea535c333a36c4267a598dc58c7c7fc108", + "generatorKey": "21120ef22b7df438e06b3862d3f0ab99d5704b3c61c45a544c64c908da8955ad", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrccyjmc8cybh9n3kgencq8u7fh796v2zfraco9", + "name": "genesis_51", + "blsKey": "8b436ed371b7af11b31347c12321d90a427e9aa8d93275a27faedcbe2dd06c5dce1e1a4a03b0ae030e5cd0106a942cd8", + "proofOfPossession": "b1dcf2ff65ba4096611f392fb56d104754927cba14ec3d193ebcf7d6eaab062c7ab770c512e815c7d52c37fa9b8622400df7939f4bbeb8566beebce1b13d67562f7bb6a01f988a501e4ef691b544cd05796010b614014ec3036b171c7392cd7d", + "generatorKey": "bf9ebe25faae5a874d97ad1772ad062ca52f63e48d806ef641e025a963224200", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskr8bmeh9q5brkctg8g44j82ootju82zu8porwvq", + "name": "genesis_52", + "blsKey": "a8271f9e8874eebb6d66dc139e984b6a6c71d2a7e23c6d7061bab7725e9c65f2e2123778130a2acd278f155440debde0", + "proofOfPossession": "84a3aeb2cc8329afc63f40d137b017ebcffe6df9e55bdaad8249408d01dad5025f1c83faecb53955ba5524df25b0d85e180f0335d0b5ac8c82c7f5fd0975002fe0231a83754c0034b07175afc426b17978870f8326cfe4694ff723e08d0b6a61", + "generatorKey": "8062134a09cc464fe9465cda959b402a3d4506a1c44b3f5cba9661d42e912421", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrskxmbv7s4czgxz5wtdqkay87ts2mfmu4ufcaw", + "name": "genesis_53", + "blsKey": "98c4f0e2b01f1b6ed07035fe46c17a40fe5409b1461a2b697afaf869e2f8c88b2db297b9a149208109bab2da195235c0", + "proofOfPossession": "8dad459d6b312d4a6767695029525e95f04e3ee083de85d0db5d818d15d32ef7aecb57f608c2c10355e3ca6dba8018e5192862d80f00fe1f71fd396d81d6a7649221c50bc8336efd12dc1cc13ee3c3898617971244af6a8da5ccd9224c9ea2f9", + "generatorKey": "07614fd5036d099a3caf004d46a083d12df2024fc03ef29cec22e58d1f78531f", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrgqnuqub85jzcocgjsgb5rexrxc32s9dajhm69", + "name": "genesis_54", + "blsKey": "ad250adf40b559d765bb51d65340fe38de9e4cbc839b6e6509d99bb9bb3f89be1bbb96d75f709f2ae9e715e6e6ce38a4", + "proofOfPossession": "8943f42818d3c3374d43d1aa0b427436f4edec3e760f07aea2990b99eb3ef69952d580df862ad9034062fab57c548164143bd3b77d16ae74fd8fb84518983dfd015146ac9d0503c858f0022591345c077656e5af22cc78f1d35a02ad1e74c8c4", + "generatorKey": "55d4c0e745954f0fba9629b346055060418961e7edce58c77bf2bcfc7f753d42", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskrga27zfbamdcntpbxxt7sezvmubyxv9vnw2upk", + "name": "genesis_55", + "blsKey": "997583cd4f633aa5aa5e616a75d9edc370d5e6eb77e2418c13648b435b0182cdb7787c7ca91ed3939b403fe59041890b", + "proofOfPossession": "95324d44556e3c61bd307a40c2ef7f3d988e0ea561e5ece2d2809cf078db232caea9df8b35d8411238fddfe83a6978a70ae88e29fa5b6322b73f7fc9756daf52aa6369e5e69c5b2304871bd324e8125a698e360e3d5f1ad20136370b8d9808ea", + "generatorKey": "d2b31ed942359b0c9cb696cae874a2dbdd6e24915dd8a5882c7c042eac1e6831", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsktn6hodzd7v4kzgpd56osqjfwnzhu4mdyokynum", + "name": "genesis_56", + "blsKey": "a97efbc836dd4028813063912bcadb52fdb8e4d2ba04d7bbb477d2a97e16167c5fa6ba75e482cd7a7d476d78fed1550b", + "proofOfPossession": "995df23eececc27026f62816bfd07d71696e2dc5751bafb03d50bd9c66d388c562d6c1357300e4d51e5522edc3cb5ae217b3607795baa0209c6e63db01b4b7c28452c15db1366764abb9d886d0a908da07d3b7b2612e263d95721ffccefb4aa4", + "generatorKey": "6158b2a5b662ce05c7864dff4c2aecf6109cdea1be703a79147450b082ea242d", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsktas5pgp3tofv4ke4f2kayw9uyrqpnbf55bw5hm", + "name": "genesis_57", + "blsKey": "a77de9989b5fab42dca028637f401953b9e0fd6cd61dc2fb978daafdb5478ac77d67a37135c67a2178b44e5a35a1fddc", + "proofOfPossession": "acafd4f724cd7b9dcaf166aaf212122360f76c2faf4d146e8d0014653c0fe09f750690ea2b9ac6df96300301fb020d3b04c1b79965cc8929e18bd93190a366851033a901e05850770cb69fc28146db719f1ac232a7947ead59e8d584eb3ddb79", + "generatorKey": "8307181cf9d1f621261e8a97a5b3b77d64a9a1f589a2c14e42b2380d9c2d6297", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskk33a2z28ak9yy6eunbmodnynoehtyra5o4jzkn", + "name": "genesis_58", + "blsKey": "a1dff3e7486e27eb2bc99d4343b57e06fb8b52f8c7b6ec6d539889afcf0c221fbadcfca65f2ad7351beb8a51e67513fd", + "proofOfPossession": "b6447c9e317179a9160ea0c11c2ff49c11e0300332c2c0ec0bf81e936af231ffc3b6628da3e01eda821ff15e9a523f3204b32fd4fcce988c2b73b56609709dfd25ec9df9e33dee073f9d26a82d268569d117ecbf7985e012a975fa7d3ad5e4fd", + "generatorKey": "689639f5e3808cc0efd5f8d48ca6ee6f9a7a1bd5f5776832cc9b448cff5d0aa9", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskk8yh4h2rkp3yegr5xuea62qbos6q8xd6h3wys2", + "name": "genesis_59", + "blsKey": "95087210c7145581fd8dc397ed12ecc2eb703eaa19dd837d7c8c54cf625ba00bf88608aa89170d703c77f7dcf6707398", + "proofOfPossession": "b09816fd6ec0b666e1f61bde72069057a11fc78d7fe8b85873b6d909aee15d74c637076e149ff279c587efa4e6a468900e2c4a857bc55978ea292189737f95e7026514ec5e9a117f31b8339d8becf3af1bd2555df6d8f2372b54b7381ff355ed", + "generatorKey": "db1c7c22ee495ad3553394dca00c62b85e78b58e78ca68bfe5027b3346f6c854", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskkqjdxujqmjn2woqjs6txv3trzh6s5gsr882scp", + "name": "genesis_60", + "blsKey": "95acb59c54e53f09d7aac37c2db59c6df0ebb1e38120690a9035c715dc9862995472c72e9f48bfb05e920494dc17e9bb", + "proofOfPossession": "8798b4e143b15d10965194d0350d95c374d214d14f6a0c750a1a1699f1221388f01d00c6b708167fc7fcf355591abe370ed45c55306fdc372d26432cba8efc1f83238c1f2e669111656ba61b4bff391786713c28f7d1c6e717fbe98aec2dfda3", + "generatorKey": "c0aa7af3198f0e3a6bf35c5be38e0f181827735b1c3a635e8db05b80b3647054", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskk2vnyd5dq3ekexog6us6zcze9r64wk456zvj9a", + "name": "genesis_61", + "blsKey": "8739c54fb8452db4ff1857649a4144dae29f7bbd3275aaa8f0f2559095a09510e38bb0155bd01d01349e7f1392132e41", + "proofOfPossession": "b78a813e912849e2583d6e774740f2bef3115f1d23576d206ba15bf0c64404b48208e7b2b5becfe2386fc1ad686094251707a7bf8902a10b8ffd207394ad26b64f7a0c5bb7bfc737fd836b160bf16c4d14dcc343dbc8ff7993391795ded7e448", + "generatorKey": "7ff8b45c5f6239306af0194ee41e047669e33338be3f8e6c786d90fb905c8b6a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6", + "name": "genesis_62", + "blsKey": "8d4151757d14b1a30f7088f0bb1505bfd94a471872d565de563dbce32f696cb77afcc026170c343d0329ad554df564f6", + "proofOfPossession": "90df1472d40c6d1279bc96b0639ff0b8ae8cef80a0538ef00b9fc3bf7816a541d2eb9349fb6a6f1a07d80504bdf105ac0726e6b01ef75a863cafaf5356dbc03ea1c90387f79d3adf15c8a44614d80e42e7a964df2eca83a871cd378f39513414", + "generatorKey": "b53ef930d84d3ce5b4947c2502da06bcbc0fb2c71ee96f3b3a35340516712c71", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskqxjqneh4mhkvvgga8wxtrky5ztzt6bh8rcvsvg", + "name": "genesis_63", + "blsKey": "abc1d1ef1f992a9fda45841079516169c879421f4260194c0a47e46afdb9f349c2a51e66e9f2ee8bf22231027584a6bd", + "proofOfPossession": "a16aa0fe3bfd5383c2fd874be4feb930f2c75f5d35d0e0ab314eb545a673aa1854ebfee7b15a026d5a9fb02842e54672149382f2898a0e12756bb949772b1316163ba774768c88fc90c2471afe94140d8d8f16974f2ebf050358cd98587b32ce", + "generatorKey": "a2b5e97ac5a5b3c3a7cd9b4401eca1f4e8da59fe567e229ea47e65bf40053402", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskq6j6w8bv4s4to8ty6rz88y2cwcx76o4wcdnsdq", + "name": "genesis_64", + "blsKey": "95274c1b15467d43a3b8a3a632a8fb7e1a2efbdf92559ef52ea6ff1b0ba1c7cc2f75ef357b2dc7f0130dc9c04aeaf4db", + "proofOfPossession": "a24ef42b04be7bcd65d8434b04f7118bf9566a0d3a36c732cf5b508ccdc12855754663bdb32c5d871eee8a0774a1331a14f25f3aeb6bddee7efaebd2214e19b7cca9f3d3bc7eed93b85b15f0a626117f24361d65688dfbe7267141f13d323d63", + "generatorKey": "6c99048cae450de8735dd410a5c8b0e4655afaebcc2c155503f890af51e067c2", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskq5attbvu8s55ngwr3c5cv8392mqayvy4yyhpuy", + "name": "genesis_65", + "blsKey": "957a970041ae9b29f33cd9baaf077f77049e664c8123b22fda3793252f71916c5df0b103ffad5cb75bdb2724d9ca3eba", + "proofOfPossession": "80d4fdac09ce195c9d685a751fb7cd9d4da7b9dc906348b4bb741ceb53f876afd0bceba75b36327a8cbd8bd3ca8ac2cc14b4fede3ce2cdac7f0bf0ad5e58840c64bdd0a0905cd6aa5da8acfcb33a931e469cadc27a42c2a04a62fd6ecca05091", + "generatorKey": "1819bea0ff11aa0cde16c5b32736e7df274f9421d912a307526069fa119100ca", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskqw45qy3ph9rwgow86rudqa7e3vmb93db5e4yad", + "name": "genesis_66", + "blsKey": "b40065dfa219e40c65c07d516158d722ec695abc91411ce57550c77fa2119e52b56cb74db7a1d805b631752e8f6b80be", + "proofOfPossession": "b7085c15521303140512fdea858231a040534a4b0c1dbbdb002c8df233634270d33e51c3699cf4956d165c0183f29a32070d8f4e00433ebcdfcae337a5f09f2c971ba97d5b35413ce032d2ec4084ed79efc917bdb75ded139fc9433df884a18e", + "generatorKey": "1314b7d167d5829fb535d15dfb5216e10ad2e5b6a349ae347aec77317b6aa73f", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskqg9k3joyv9ouhjfysscame66hovq42yeev7ug7", + "name": "genesis_67", + "blsKey": "86f828da4b3c129eb54d95bef7975281b30dd811f252b5792998718355c599aeca3dbb222678ee0af84b13f5af2400b3", + "proofOfPossession": "8e062f48ead9234b710dbcfebbb2e502ddff68e3d5be19a8e7e89b2141c76caeeae233999009f24f7b6e65f3774ef6cd09de9d5c0bb59a60ff6cb31b276f0172e35f89061f3c2d700543de5cf4d6e613ff6ba7d41c1379d6baefd844ef4cb517", + "generatorKey": "a9568912797914f590413c3156c9cff93c9c14193b01e7bf248195bbe8c1af19", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskezdab747v9z78hgmcxsokeetcmbdrpj3gzrdcw", + "name": "genesis_68", + "blsKey": "a03ba0f1d6bf9378681b9d96dbe8176cc0ab2a424154cbbe325fc279d02cf58bc15de966cb1e272312ba2b6db31a7f05", + "proofOfPossession": "a20a8edd978fe911da6c933d486cb9af770179ef5ee21ad869c4c35e63103cfc2ac17350ee2d35b4bbd487193cdb33ab0116fdf2f078f289fae2922f6a7e372ef8ea543d52ae74ae395dccf2dec2c40e6596c807a14c9fce45b320321f68c612", + "generatorKey": "44de3820f1a1a7351953d2d000f29cb7bffecf30582a8b3da2cb80c83b9eceef", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lske5sqed53fdcs4m9et28f2k7u9fk6hno9bauday", + "name": "genesis_69", + "blsKey": "92f020ce5e37befb86493a82686b0eedddb264350b0873cf1eeaa1fefe39d938f05f272452c1ef5e6ceb4d9b23687e31", + "proofOfPossession": "b92b11d66348e197c62d14af1453620d550c21d59ce572d95a03f0eaa0d0d195efbb2f2fd1577dc1a04ecdb453065d9d168ce7648bc5328e5ea47bb07d3ce6fd75f35ee51064a9903da8b90f7dc8ab4f2549b834cb5911b883097133f66b9ab9", + "generatorKey": "b9e54121e5346cc04cc84bcf286d5e40d586ba5d39571daf57bd31bac3861a4a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskee8xh9oc78uhw5dhnaca9mbgmcgbwbnbarvd5d", + "name": "genesis_70", + "blsKey": "929d5be8abbc4ffd14fc5dc02ae62e51a4e8fff3fd7b5851ec3084136208ceac44366a7313447858e3814ddc4213d692", + "proofOfPossession": "88e7331baeba342eaa907cfd7a1b5bc839a70e78b0535d68c40ddc2e4d5157f8d1ff55d29243fe2375fcfef5c3a2133e0a0d11f8b58041278a1e9a3a9e7986f906201df48987e8f8eda2e6ee4452fe58b54805e2ca4cc256d8e42083b70f79e3", + "generatorKey": "aed740da1a7204422b92f733212398ce881c24a4cfe40edeea6a59a0f6453743", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskewnnr5x7h3ckkmys8d4orvuyyqmf8odmud6qmg", + "name": "genesis_71", + "blsKey": "81f3810e7567ba9e1aa9fab7d5914a1f2ac8b11d952872b398930836f80395c934bd6e71c291193458de7de4382c913f", + "proofOfPossession": "a67d9d0708496d13f45fa3d3940954bdfdfa69814554a5618a388cab03a5e82210171f06b72b03966c8a5bd8fe3b235e06de2fc4c45333395c8e10dba086a4f50efe3a7f87f741346c07b22de2ba49eedc521cf53fab31e2033175ff3ca00f08", + "generatorKey": "bf5f4408df7a1cde279b3cfe7ba6c2e2600a4bb90d883b98ef8048ec344221e0", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskwv3bh76epo42wvj6sdq8t7dbwar7xmm7h4k92m", + "name": "genesis_72", + "blsKey": "8ae82e86c2ae47fe55b3db422b5f6e8a8ecbf4a33a0e910b4cc53d1bef0d66e3d19e8474a97ba58e31798c604758b1d5", + "proofOfPossession": "9215a181382a5769652e3818238e58496ca1c80eb6282b000708b2c9c19464153fcc8a541d8aa32378186b61fdb2183d15828ffa20e49a0dae0cb05e8c106f894a7ee7190c6eb60874477da236c05a275187bded6ac5a9c98656eb2199f736fd", + "generatorKey": "f99c543eeba441fdb22c673fa81878269c3b69a6366d8d51fb6890f2eb3118b6", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskw95u4yqs35jpeourx4jsgdur2br7b9nq88b4g2", + "name": "genesis_73", + "blsKey": "a58edccfbcbc35d6f9fec1535329a114cc5a2118945098c0f201345ab7de78d36a32014dbe701faf7d32b24f7a696d9e", + "proofOfPossession": "999cf3232240944ff9a14e6c4680fae450be8c0ed43fdbf8f92e7873b5482f88229768fdcfd86e22767ec1df3b5fa2fc0b08202ee4a343bfb19c8c8eabf74d44fa73c4517ad0a102faf4ae6fe87cd766d860408b51d31dadcc5674c92908c7ee", + "generatorKey": "e2f80871a5220be51352427077f6e93c2294d88be6b731b535d2ce9371274e7b", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskwdkhf2ew9ov65v7srpq2mdq48rmrgp492z3pkn", + "name": "genesis_74", + "blsKey": "8c5b12f5b7aeafb07e14c5264e7f7ecf46b3ba0e6f12619e19271a733e06e913044ea2e5c955eef3567fcc2d842bc24a", + "proofOfPossession": "82237a5371179107af8c53ef19bf3e0d055b70ddb689763e0a8ac6d82884d12c2155166af4aa92b66fa64b6a6d2bbe7602a118d597345dc100bd6983f072b9d8da7bd0699b0f3cb51f1ec5a9f2e2feb76030125272325e7f5885399f1d26c5ac", + "generatorKey": "cc83f488c03e58d083927601658d234ffd12b5cb6fe3151206f699d031dc4161", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskwdqjhdgvqde9yrro4pfu464cumns3t5gyzutbm", + "name": "genesis_75", + "blsKey": "a397bb33263b2850758a1b144401b741c1278b302eb8d27be6c61363d9cedafcabe05fbd7d9ce5e75a7078972d397e9b", + "proofOfPossession": "b22ed60a951702ec7bfd85482e59703af76c4c79fe2d3a3b81e737d53746543587d2932fcd5559d56f6530bfe48d23f5093aa30f3e299733cb56151175d22e21895ada290521908536d71480f1066bbeec7ab803376a4a81e4d7ec3bb4d71dc0", + "generatorKey": "902b7ed4708c476c7f0e96825cb06f95cbc86953130575d2c4589d8e3dc2f69c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsk2xxvfxaqpm42wr9reokucegh3quypqg9w9aqfo", + "name": "genesis_76", + "blsKey": "81f7700c2115434acaf61e88b836be11986476751d6c02617d1087e7bb45798ac56929cb5f71c890c6159ff4d71cd1b3", + "proofOfPossession": "8bc04a899be3a7ac99e2ddda6567a0b01e21aaea8daf4848821e8233cbe80610a2f670922865f424e878add1de8c978e1913f95308a50693fbc88e991e6bcac3bfef8a1d03f89bb4dfd9c991cbf1c613f85203dfacc4376057f085967f2a7283", + "generatorKey": "621d52ac19aba86c4feef94c67ae62cfa3f6ac192177ae37be2e6b3205449c0a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lska4qegdqzmsndn5hdn5jngy6nnt9qxjekkkd5jz", + "name": "genesis_77", + "blsKey": "90f87fd2122689c54bcd8fb859c5b36d4b583272043deba66199ad181ca2c38cf48d453c46ec881e03d2b7e2e63e3684", + "proofOfPossession": "add6eb668bebf90fdd80b01cb83a31b02577b200c85845bd5260d7851c02d21aaaf6d040e6d6f27a8690c9598f92ba240cdbb6d7896d7a777c484d30ab48d71b1aee1b07083dc5d11a94416c4cf85e33ec3899b40e6222ac888104f80b8d96c5", + "generatorKey": "965e86fdfcdcd64879efe23705506faeb4dfc4244f93d47f4bf444966d2a0f3d", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lska6rtf7ndbgbx7d8puaaf3heqsqnudkdhvoabdm", + "name": "genesis_78", + "blsKey": "a94d3cbfde92550eccede718499df12f33a8ec9a4b386e4ca423161d667862f45fb06397b12dc6a6cbafc14b1cfad26b", + "proofOfPossession": "a474ee16d276d3478e1b7005960d41c0e271652f29c3178230b7fdf395801dd62196294b7695b3ccad63887558e0f27d0b121738a42cfe9acab07e6763577ad87eccb5b1d0cd725cb4a32225e79e864c238ce3c56b6db8960ce9fda82828d5ba", + "generatorKey": "f8252b40a65be6f5f6d0be446da5ab434bdc0a921fd0956b0672ea4a218d2d7a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskau7uqo6afteazgyknmtotxdjgwr3p9gfr4yzke", + "name": "genesis_79", + "blsKey": "aaec4e157b19c0a3f2965cc636f5f82cef9b3918c071e2c6e50f57ecb44587d58139595e8f4c1fc7f76b2f7c09b1b6d1", + "proofOfPossession": "866a031b5a2a6b0525053b2d870487ac2fd39cf2cf18ecf462bc19afc5ef52f129cf88624fac73057c5375004492dbfb0b8cacb906b3a7daa4d7edf99f10ab15a90b3b328e8ad6701e838a88351fecdfb5b32eebeb80fdeb8c0345d1b5257d7b", + "generatorKey": "00245e599fdad13ed0b064c069c71c73caf868a4635c0143963a529807f8728c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskayo6b7wmd3prq8fauwr52tj9ordadwrvuh5hn7", + "name": "genesis_80", + "blsKey": "881fa9b753cb2f89d267e0615cbd1ad9664d331f21d89cef2131686b0af55112fe1ad4df7f2c085f78142e75d90d2cab", + "proofOfPossession": "898471d3356573d6445906d973f1876f1e38570b6dc9c875c88138b302806c071efbe327f66c6646f02c134c3b1b019d0227bc83acd0ca10f65adf1b8fad7c9cb383909a015fd1d678c6272e5317da58d45b89fc1c954641a61169bf1c1a1728", + "generatorKey": "5ec5a5a2c91414f5cc5e3354b58671e624bc88a39fdc8f128593daa06545d6cf", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskatntynnut2eee2zxrpdzokrjmok43xczp2fme7", + "name": "genesis_81", + "blsKey": "97a4b205ac2b65a2f17ceb49a763393935021629068fe8a8c299e49b986e79ff8cc959a7343b5d00eae2783b825ffede", + "proofOfPossession": "8a86fbb8e59ff0de4f2d717ff3c7b0f3f9cb4b14f97deeffb907428666005e613b02cfac0bac4714389d898236de2d5a02df536b511675d2cbd37dcac6dc33bf4cf2d9d43cfa710b3c695bcb8cd29867477ccf3b1e5b9e3afaf7d8d4e50930ff", + "generatorKey": "ce6bdb7380fa027c46edd15a072bbabd6b60fecb0e09589e20be560b333ca63e", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskaw28kpqyffwzb8pcy47nangwwbyxjgnnvh9sfw", + "name": "genesis_82", + "blsKey": "b279e1a3a5edcd1045682e7029045b70dffbae55c49b14391b9f776750193269b4fd1d9f0807d9ee66e264e08ecd97cf", + "proofOfPossession": "83a5128e710b91ab91f7726223120b389c1f77735c9c1d408c466b7f0484b020f0d2d50edc36d49e410141d8a509b132059142e250f145810eefce03dfdda25aa84214d30cdfb6ca11a929337bf53dfe4c675117c06e4a67206119ed1e2b2b9a", + "generatorKey": "bfe46727c386585d8d59c02efbe48d4c1a919ff07b87267156ab96e10ac730b2", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskdo2dmatrfwcnzoeohorwqbef4qngvojfdtkqpj", + "name": "genesis_83", + "blsKey": "82b478f1b884ee4c152490afc8b233d003745a58c236b00ecb3cea1022d59f04bf225266bbe5b0a5aa7da0a771a66acc", + "proofOfPossession": "ac4d05f93e3c374c83ab9cec2a5c67dff8a02298361584267968fad8f391af083b5041a020ce7a189fd8fdbf055a265c04f55e80a8dcf06e7b4e3358b347743f47d33bd5ee0cc4d4213995c46d6d4e1a61be929f571c1a0fa1c7dec805a85805", + "generatorKey": "bbc7ca5acae1d53e0a44a212f4c77c7601ace0e489d936c0b6f26a9fbb03601e", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskduxr23bn9pajg8antj6fzaxc7hqpdmomoyshae", + "name": "genesis_84", + "blsKey": "b067f711431b1bee09000b1c27fe39a29a5603471a6993d47bf56ece01a17fa4b00e92da90d80689ed2635e7e0f90891", + "proofOfPossession": "91f3d5519f94424fd59c120c05d9f2f34d8cb39e092e2a354f5a7d48e7f2e23b6a21b39a7a131954320d5dbeb0a419f10304fb857fae695c180f9dedd18ffa73082af5a6ca0c62c273915cd337570ecd8649157c8dc8836d758fe1e51f4faa3f", + "generatorKey": "9b4db295e88468a37e49445443fdc364321d620dc57afe8a5a14f07ce0717055", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsksmpgg7mo4m6ekc9tgvgjr8kh5h6wmgtqvq6776", + "name": "genesis_85", + "blsKey": "96aa1c639724f5559fb1ebbe5d218511fe0fbfe6681190cd953677c6b63c0e17ac5d9f09844845cfecbb4ab4bd5a5749", + "proofOfPossession": "82a60d6a2432fd15c7697094a89ed34a30dc2daa2b460bdb0fe3269362e1d85c79a3d2aa9ba3ffa5b1e80f983933c96f1402e95d34fb656d20f368428ba93539191319c70e6cf6f15c5cb9df9235d115d06e0e00d7a1bf64db1433ac6acb68a6", + "generatorKey": "f17b9b3bdee2ef63c8fb52d85ae07516133749a1d659bd032c3a078aca65ce7a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsksu2u78jmmx7jgu3k8vxcmsv48x3746cts9xejf", + "name": "genesis_86", + "blsKey": "884b03c63f8d095165b67cb23131ca1053cbc73739549aa2ee21ca0b2b925994855dd46a81ebc3dedb309ceadd013f8e", + "proofOfPossession": "b4879cd844644b1a21f1676bf671854afb1536c5a330c1fef26b2669238efa373f70815e01028506b5cf6b75fe77e79e0efb6ef74e8111c7f1a189d4b0bf4c867190aa57e670b53dff5951a29eaaceda788ed674acdf33eff228278dc61c3cd2", + "generatorKey": "37df5572ddb12b67b9aa5191ba9baf9d76a50307fbe188924766225d86958dbd", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsksy7x68enrmjxjb8copn5m8csys6rjejx56pjqt", + "name": "genesis_87", + "blsKey": "8a08bdac4af80e0d37ce01094440a82a7e5ac9ec893f9a7870d26a4ec52db8932f36384bc7c3d3e03232ddb7bcd1eef5", + "proofOfPossession": "b999cf63290a85f96f0f78326c0eb24c3acce4c2307e1a2f1d621cc75f621ccab510e42aade9b6347e95661475230fbb059cd9e4e22ae17ac73dee58a370159bc6b525ab579de9502b761010e97f6d00f60ddfed05e76a5df3dfe33866c1ebe5", + "generatorKey": "7fb2d69906c5076fa314a4e817ce424bbd4a7a21305cec93a12d31a1589dc90c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lsksdfqvkbqpc8eczj2s3dzkxnap5pguaxdw2227r", + "name": "genesis_88", + "blsKey": "84912d2f185c2058be9ed201d970f435a408c8bb3a36c430f007b69632efb2f663b51df383be6eedb80c8768a70822bb", + "proofOfPossession": "aafdb397226d3a4a4cc3b7ac906ae7e3601310bd5d0e20a0682364312937e8e3e0c3b5846a53ee536cac2a2b3f556bff06c65ef24a32495dee9d38ee5b2012113d8f032d8dd0f3f5d9af50dbd307d0e7f66aaa165620d5292da91306b0a39aad", + "generatorKey": "21f9d60315c1baeb513b5f7324a1211723d36948b64806541b8855988f86111f", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskjnr8jmvz45dj9z47jbky9sadh3us3rd8tdn7ww", + "name": "genesis_89", + "blsKey": "8ce6c9d2ed4f223635e3bd85476f0d56cdbb5e4090ae22b10a7fabd08d231193cf6d9c4f5b400eb4b310ef270811e424", + "proofOfPossession": "b896aabbcc1a165adaec26feb72fc580d4a6512dd09df40b4333381d2536b5ac36d22e91469a976ae446a6291792cb6a141013baaaae12faff26d06c6a6b722a28635c72d49fcd50ac910ca01d760e80892fc5757a18597cd1ce7f16dbabd195", + "generatorKey": "25ae368be016caae7066a6ce9f2ad8e4220d328ffb860a6d275d878f4882c70c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskjtc95w5wqh5gtymqh7dqadb6kbc9x2mwr4eq8d", + "name": "genesis_90", + "blsKey": "a6e64df0d2d676f272253b3def004bb87276bf239596c4a5611f911aa51c4e401a9387c299b2b2b1d3f86ad7e5db0f0a", + "proofOfPossession": "92ff87e4dfebfdee0e5572e94f62c483a9b4465eada10c3a6bed32fc92374dbbe89eed00117ddb27bfbabc5e41d90d8a0701fd215caef0233eca660d7a0bccdaf064356edaab13aff404aeb5264d8b68ab0808115e09ef541168364806a62d49", + "generatorKey": "633e1696edbd9f2eb19683c4f7e0d4686fefb1a15772a1affdeb49a44d8c04f2", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskjtbchucvrd2s8qjo83e7trpem5edwa6dbjfczq", + "name": "genesis_91", + "blsKey": "8c141e5d769c22ec90122f42bef1d1e7af2d94c1da6844bd313fca2ccf0543eab5f8c6752dd47969dc34613801dfb293", + "proofOfPossession": "9681aa250d714befe61d71f239a9b4c09ee102addb3a5e2c884074c7ba763b5c21e53aa7b12518d32c9b874ba1910e7a0bf0bd23ae99f57f6f464403b1151b3521a7a369ff94118a436e6aa767bd462d9ca491dd3e253862c21ff078878c354e", + "generatorKey": "3e63c0a5d4de4df114823934ceaa6c17a48e5a6650788cf1f63c826c984c0957", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskhbcq7mps5hhea5736qaggyupdsmgdj8ufzdojp", + "name": "genesis_92", + "blsKey": "ab0bf8a74c846dbd47c9e679ba26a9c0e5a7a5902b4f66cee7065b7487eba30262e4e5f0ee78d616d007021df3fbc945", + "proofOfPossession": "b159e28ea39b1119e4018ea19777497e1d3c4a58d1c2ecc22aa5b2efe60572cb32ff30bbeda9ce28b235fb55ab15aec206f094f37ff9a78a0931d55799c1c74a19bacfa8a4172ba078d7cad4f663a4708e47981044b1893c712c3707196451fb", + "generatorKey": "29e5cf287cb9c12b2bb77ef9dc673728132f9e3affef2d0de0d7db7905937435", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskhamuapyyfckyg5v8u5o4jjw9bvr5bog7rgx8an", + "name": "genesis_93", + "blsKey": "a2fc837b51e6dd740fc1530e6713b0f8c04e646e91da849517901f24d9bcc78c360223f1ad3692de2e96444008a67e03", + "proofOfPossession": "82d6fee11dc1561ffb5f36bf07acdffb95e5c329f7adc0b8937bec191350d7c4a158c7592a179ed86b9c0e20159e903100495fcd3fb5bee481e053775b232f8e0fce602e8ec6edf0fe8ba90c06e6215d7c73e88a626d2fe63c6422826489d72a", + "generatorKey": "d051790a70ffdf5bd80dc9cec003f8261128be1fc2135990accb13caeb3ed588", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskfx88g3826a4qsyxm4w3fheyymfnucpsq36d326", + "name": "genesis_94", + "blsKey": "93bddb296ef4dd5c832486b4603c1ed13805d2df1c6c2f95c8af4ae38467f1e741c1c2fbbd5f8e927b54250bffdf8536", + "proofOfPossession": "923415dc1db9b46715d284bd2a3f12313a24c1352bf0dfcdce2e0e0475fe0343d5cc9e463d5f04b99cb367e30e89f1371280d5897a0103658d710b07f8d9d3d8754043241a753dce60f2bdadcb9249b334e6f5a395cabfdb187f2739b512d46f", + "generatorKey": "028a30837b7eec19b02b06c3c2f4065290285e40a4870a677664fee3fe76d9be", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskfmufdszf9ssqghf2yjkjeetyxy4v9wgawfv725", + "name": "genesis_95", + "blsKey": "96bed36ef328566d826a6f6b874ce441ad34373487b4bcc2d48d76f2dd453e418935a7b60578c43b9c4dc954e9331a3d", + "proofOfPossession": "b4d80456953b5111777a74931f5691a6e4c0bc4f4d552aeee9ed1002903b366abab12e2d596a4387933ec676058ae64e15d7b322786d19744281028753b621ed7d49b6e6bf87983267d3208c3dc5da983d845a7a2822da4a085446172e823b28", + "generatorKey": "24bab6ba79973ffaa8569af2cb69b8495d20f0c7ce674814ee0615d31abe9607", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskf6f3zj4o9fnpt7wd4fowafv8buyd72sgt2864b", + "name": "genesis_96", + "blsKey": "92590fccb8c847a6957213682bb798d7d18a368515f070537e1f6cfd45d8dfc50863105db9d46189b92c0e0d009fe09d", + "proofOfPossession": "b0aa8214fd746ec04d9cc97e9641a7ad796ed12ef08c9227b5358cf3bd9f049af2ad5376055361c34d265e5d0cf3518d05113928f487bf17012d6ec4deb53e5112b72f2e4d8dc8eed4f68514a9c6bf735c9ccb9dade32ed589bea8e677135302", + "generatorKey": "8cab5125c910702b66a83240cf836b10a0f2dc3000536799300ed8f1ed9a26ac", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskf5sf93qyn28wfzqvr74eca3tywuuzq6xf32p7f", + "name": "genesis_97", + "blsKey": "947456674b5616341cc932afb30e42973dd17582a81e5fe958277efc828535cd7c9c778410c52e069ed23e4cf629814a", + "proofOfPossession": "872ce3383378215d3be299f32196e9cb2ae1f9e06101afbb9e7709eafb37eca8548f156bbdfbb120c2d06fdbfdf5455107f2c818bfbc9b4e9f5fb4c50f79b24f5fc84f9e137b286d71c3d588a7af684d36bf701425b25ece2d9fbacbadb58f4e", + "generatorKey": "d05b69bda8b5cd103c620a814cbab2f2a131dcfda6bd4cd568155ddb1afd423b", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskfowbrr5mdkenm2fcg2hhu76q3vhs74k692vv28", + "name": "genesis_98", + "blsKey": "b57835b4d3285a134730de7b29361998787c2b4853e7a5e15032b516335e81c0797a51d00e032585efa05c27d2345a1d", + "proofOfPossession": "8d9b7510b3332a22635815b809c3e1ef96427a20f15b3f41112af74a9aa1a401d83d625dc5081f51aefee7591d52afaf1451e78e4f3efe29ec171b8239af73fd87b2e8a1aaa8b701c3e5bcb0d609f098738d29e0af57ea010953297c9c9e19d9", + "generatorKey": "5812017e0d25131165ebc256f39ccece115fb58ad5fe0766f78054f912832d6c", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskf7a93qr84d9a6ga543wernvxbsrpvtp299c5mj", + "name": "genesis_99", + "blsKey": "a7283bff41249c3d2a0f065a27448a4c5acefaece74e51ec432c418c4bc8e6f0eb60160feec4729b9c0b933e9ec5e528", + "proofOfPossession": "86f1ac081ee08568266dc39727540a5d50f03e544f73d9a3ca60d87cfe9b6718832e07b2720d42e0e818c5fe2d45099a0774af1e6b123b41a3eb7eb3a1443d248a535fe9ef93f0027a8e8f44686dc33d677b79251c22022675395a347d0f3dbb", + "generatorKey": "ac34c0731cddab10726e634cec30294f831af045a0614733ac683ccdb6bc7eab", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskfjd3ymhyzedgneudo2bujnm25u7stu4qpa3jnd", + "name": "genesis_100", + "blsKey": "96a70c8b1343511359f7205313eac8c73b2838e25eda58cf8c13fa1d2689aee3df70522bcbd36e0bde958409b80cc8ee", + "proofOfPossession": "89564da089fcc38e4973cf34b5a8abbe8e822bb59f05633156d9dc0b10f2aad8d4621ea66023ec2a10d6d581927af3bc0746cd8293ea22c8db0068c127d38c4c2dcfe777ffc03e773083fd0036894cce7c2596301381941523f4f2ae97bb79e9", + "generatorKey": "326cb34aa214c4952f646d93af8cfbe58ec74db76db54484b5a23918cba8743b", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskffxs3orv2au2juwa69hqtrmpcg9vq78cqbdjr4", + "name": "genesis_101", + "blsKey": "a3e2b645a315827618e58c1eb66dfef3744c8111a0c7b0e8535a3ec31d78ea2630646fea1da5609988c5d88997d663fb", + "proofOfPossession": "b55d1c525f96bba45cbefbcadad16279c9f61f790dfc3e3c824003139f9994200079faf573eddb863c6ba1fd9b7d7364146e3f20579b065355c75691e06be2c7304fe48d32fbfcb5ef38f8ecaa6905e9ca6a7c1124c45a6ab2b06668cb3decc9", + "generatorKey": "4e54056fabe183ab645962cf0b70e658d0eae506c4ade8756652ca7f76733227", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + }, + { + "address": "lskgn7m77b769frqvgq7uko74wcrroqtcjv7nhv95", + "name": "genesis_102", + "blsKey": "8808cb1e4cb5c8ad18ad4a45e35388af4099993effb9069a28e56c5718944a3b4010ec1ef54b4faf4814fad854322468", + "proofOfPossession": "890995fe98a83721b0069aee00c2b264239b3b833b71f64a5f48b4340a969fbac1ffc0664264fbf5af626d37fb3fe6d403dc7ef0ec195cdab82e7615d73ad7a2d326a761fdcf18a6a83efc4f502c724a10ddd89f8b6981496c34b1b32f512781", + "generatorKey": "0941ca2cfd9b1e0cc4bf0dbfd958d4b7d9f30af4c8626216999b88fc8a515d0a", + "lastGeneratedHeight": 0, + "isBanned": false, + "reportMisbehaviorHeights": [], + "consecutiveMissedBlocks": 0, + "commission": 0, + "lastCommissionIncreaseHeight": 0, + "sharingCoefficients": [] + } + ], + "stakers": [], + "genesisData": { + "initRounds": 3, + "initValidators": [ + "lskzbqjmwmd32sx8ya56saa4gk7tkco953btm24t8", + "lskzot8pzdcvjhpjwrhq3dkkbf499ok7mhwkrvsq3", + "lskz89nmk8tuwt93yzqm6wu2jxjdaftr9d5detn8v", + "lskx2hume2sg9grrnj94cpqkjummtz2mpcgc8dhoe", + "lskxa4895zkxjspdvu3e5eujash7okvnkkpr8xsr5", + "lskvcgy7ccuokarwqde8m8ztrur92cob6ju5quy4n", + "lskvpnf7a2eg5wpxrx9p2tnnxm8y7a7emfj8c3gst", + "lskvq67zzev53sa6ozt39ft3dsmwxxztb7h29275k", + "lskvwy3xvehhpfh2aekcaro5sk36vp5z5kns2zaqt", + "lskcuj9g99y36fc6em2f6zfrd83c6djsvcyzx9u3p", + "lskc22mfaqzo722aenb6yw7awx8f22nrn54skrj8b", + "lskchcsq6pgnq6nwttwe9hyj67rb9936cf2ccjk3b", + "lskp2kubbnvgwhw588t3wp85wthe285r7e2m64w2d", + "lskmc9nhajmkqczvaeob872h9mefnw63mcec84qzd", + "lskm8g9dshwfcmfq9ctbrjm9zvb58h5c7y9ecstky", + "lskmwac26bhz5s5wo7h79dpyucckxku8jw5descbg", + "lskmadcfr9p3qgx8upeac6xkmk8fjss7atw8p8s2a", + "lskbm49qcdcyqvavxkm69x22btvhwx6v27kfzghu3", + "lskbr5cnd8rjeaot7gtfo79fsywx4nb68b29xeqrh", + "lsknyuj2wnn95w8svk7jo38jwxhpnrx7cj3vo4vjc", + "lsknax33n2ohy872rdkfp4ud7nsv8eamwt6utw5nb", + "lsknatyy4944pxukrhe38bww4bn3myzjp2af4sqgh", + "lsknddzdw4xxej5znssc7aapej67s7g476osk7prc", + "lsk3oz8mycgs86jehbmpmb83n8z3ctxou47h7r9bs", + "lsk37kucto34knfhumezkx3qdwhmbrqfonjmck59z", + "lsk3dzjyndh43tdc6vugbdqhfpt3k9juethuzsmdk", + "lsk4nst5n99meqxndr684va7hhenw7q8sxs5depnb", + "lsk67y3t2sqd7kka2agtcdm68oqvmvyw94nrjqz7f", + "lsk6quzyfffe2xhukyq4vjwnebmnapvsgj4we7bad", + "lsk5pmheu78re567zd5dnddzh2c3jzn7bwcrjd7dy", + "lsk56hpjtt5b8w3h2qgckr57txuw95ja29rsonweo", + "lsk5y2q2tn35xrnpdc4oag8sa3ktdacmdcahvwqot", + "lsk5rtz6s352qyt9vggx7uyo5b4p2ommfxz36w7ma", + "lskoys3dpcyx5hkr7u2fenpjrbyd69tuyu5ar4dgy", + "lskoq2bmkpfwmmbo3c9pzdby7wmwjvokgmpgbpcj3", + "lskowvmbgn4oye4hae3keyjuzta4t499zqkjqydfd", + "lskos7tnf5jx4e6jq4bf5z4gwo2ow5he4khn75gpo", + "lsk966m5mv2xk8hassrq5b8nz97qmy3nh348y6zf7", + "lsk7drqfofanzn9rf7g59a2jha5ses3rswmc26hpw", + "lsk8vjsq5s8jan9c8y9tmgawd6cttuszbf6jmhvj5", + "lsk8netwcxgkpew8g5as2bkwbfraetf8neud25ktc", + "lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk", + "lsk8dz47g5s7qxbyy46qvkrykfoj7wg7rb5ohy97c", + "lsk8dsngwh4n6hmf4unqb8gfqgkayabaqdvtq85ja", + "lskux8ew6zq6zddya4u32towauvxmbe3x9hxvbzv4", + "lsku4ftwo3dvgygbnn58octduj6458h5eep2aea6e", + "lskuueow44w67rte7uoryn855hp5kw48szuhe5qmc", + "lskym4rrvgax9ubgqz6944z9q3t6quo5ugw33j3kr", + "lskyunb64dg4x72ue8mzte7cbev8j4nucf9je2sh9", + "lskrzuuu8gkp5bxrbbz9hdjxw2yhnpxdkdz3j8rxr", + "lskrxweey4ak83ek36go6okoxr6bxrepdv3y52k3y", + "lskrccyjmc8cybh9n3kgencq8u7fh796v2zfraco9", + "lskr8bmeh9q5brkctg8g44j82ootju82zu8porwvq", + "lskrskxmbv7s4czgxz5wtdqkay87ts2mfmu4ufcaw", + "lskrgqnuqub85jzcocgjsgb5rexrxc32s9dajhm69", + "lskrga27zfbamdcntpbxxt7sezvmubyxv9vnw2upk", + "lsktn6hodzd7v4kzgpd56osqjfwnzhu4mdyokynum", + "lsktas5pgp3tofv4ke4f2kayw9uyrqpnbf55bw5hm", + "lskk33a2z28ak9yy6eunbmodnynoehtyra5o4jzkn", + "lskk8yh4h2rkp3yegr5xuea62qbos6q8xd6h3wys2", + "lskkqjdxujqmjn2woqjs6txv3trzh6s5gsr882scp", + "lskk2vnyd5dq3ekexog6us6zcze9r64wk456zvj9a", + "lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6", + "lskqxjqneh4mhkvvgga8wxtrky5ztzt6bh8rcvsvg", + "lskq6j6w8bv4s4to8ty6rz88y2cwcx76o4wcdnsdq", + "lskq5attbvu8s55ngwr3c5cv8392mqayvy4yyhpuy", + "lskqw45qy3ph9rwgow86rudqa7e3vmb93db5e4yad", + "lskqg9k3joyv9ouhjfysscame66hovq42yeev7ug7", + "lskezdab747v9z78hgmcxsokeetcmbdrpj3gzrdcw", + "lske5sqed53fdcs4m9et28f2k7u9fk6hno9bauday", + "lskee8xh9oc78uhw5dhnaca9mbgmcgbwbnbarvd5d", + "lskewnnr5x7h3ckkmys8d4orvuyyqmf8odmud6qmg", + "lskwv3bh76epo42wvj6sdq8t7dbwar7xmm7h4k92m", + "lskw95u4yqs35jpeourx4jsgdur2br7b9nq88b4g2", + "lskwdkhf2ew9ov65v7srpq2mdq48rmrgp492z3pkn", + "lskwdqjhdgvqde9yrro4pfu464cumns3t5gyzutbm", + "lsk2xxvfxaqpm42wr9reokucegh3quypqg9w9aqfo", + "lska4qegdqzmsndn5hdn5jngy6nnt9qxjekkkd5jz", + "lska6rtf7ndbgbx7d8puaaf3heqsqnudkdhvoabdm", + "lskau7uqo6afteazgyknmtotxdjgwr3p9gfr4yzke", + "lskayo6b7wmd3prq8fauwr52tj9ordadwrvuh5hn7", + "lskatntynnut2eee2zxrpdzokrjmok43xczp2fme7", + "lskaw28kpqyffwzb8pcy47nangwwbyxjgnnvh9sfw", + "lskdo2dmatrfwcnzoeohorwqbef4qngvojfdtkqpj", + "lskduxr23bn9pajg8antj6fzaxc7hqpdmomoyshae", + "lsksmpgg7mo4m6ekc9tgvgjr8kh5h6wmgtqvq6776", + "lsksu2u78jmmx7jgu3k8vxcmsv48x3746cts9xejf", + "lsksy7x68enrmjxjb8copn5m8csys6rjejx56pjqt", + "lsksdfqvkbqpc8eczj2s3dzkxnap5pguaxdw2227r", + "lskjnr8jmvz45dj9z47jbky9sadh3us3rd8tdn7ww", + "lskjtc95w5wqh5gtymqh7dqadb6kbc9x2mwr4eq8d", + "lskjtbchucvrd2s8qjo83e7trpem5edwa6dbjfczq", + "lskhbcq7mps5hhea5736qaggyupdsmgdj8ufzdojp", + "lskhamuapyyfckyg5v8u5o4jjw9bvr5bog7rgx8an", + "lskfx88g3826a4qsyxm4w3fheyymfnucpsq36d326", + "lskfmufdszf9ssqghf2yjkjeetyxy4v9wgawfv725", + "lskf6f3zj4o9fnpt7wd4fowafv8buyd72sgt2864b", + "lskf5sf93qyn28wfzqvr74eca3tywuuzq6xf32p7f", + "lskfowbrr5mdkenm2fcg2hhu76q3vhs74k692vv28", + "lskf7a93qr84d9a6ga543wernvxbsrpvtp299c5mj", + "lskfjd3ymhyzedgneudo2bujnm25u7stu4qpa3jnd" + ] + } + }, + "schema": { + "$id": "/pos/module/genesis", + "type": "object", + "required": ["validators", "stakers", "genesisData"], + "properties": { + "validators": { + "type": "array", + "fieldNumber": 1, + "items": { + "type": "object", + "required": [ + "address", + "name", + "blsKey", + "proofOfPossession", + "generatorKey", + "lastGeneratedHeight", + "isBanned", + "reportMisbehaviorHeights", + "consecutiveMissedBlocks", + "commission", + "lastCommissionIncreaseHeight", + "sharingCoefficients" + ], + "properties": { + "address": { + "dataType": "bytes", + "format": "lisk32", + "fieldNumber": 1 + }, + "name": { + "dataType": "string", + "fieldNumber": 2, + "minLength": 1, + "maxLength": 20 + }, + "blsKey": { + "dataType": "bytes", + "fieldNumber": 3, + "minLength": 48, + "maxLength": 48 + }, + "proofOfPossession": { + "dataType": "bytes", + "fieldNumber": 4, + "minLength": 96, + "maxLength": 96 + }, + "generatorKey": { + "dataType": "bytes", + "fieldNumber": 5, + "minLength": 32, + "maxLength": 32 + }, + "lastGeneratedHeight": { + "dataType": "uint32", + "fieldNumber": 6 + }, + "isBanned": { + "dataType": "boolean", + "fieldNumber": 7 + }, + "reportMisbehaviorHeights": { + "type": "array", + "fieldNumber": 8, + "items": { + "dataType": "uint32" + } + }, + "consecutiveMissedBlocks": { + "dataType": "uint32", + "fieldNumber": 9 + }, + "commission": { + "dataType": "uint32", + "fieldNumber": 10 + }, + "lastCommissionIncreaseHeight": { + "dataType": "uint32", + "fieldNumber": 11 + }, + "sharingCoefficients": { + "type": "array", + "fieldNumber": 12, + "items": { + "type": "object", + "required": ["tokenID", "coefficient"], + "properties": { + "tokenID": { + "dataType": "bytes", + "fieldNumber": 1 + }, + "coefficient": { + "dataType": "bytes", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "stakers": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["address", "stakes", "pendingUnlocks"], + "properties": { + "address": { + "dataType": "bytes", + "format": "lisk32", + "fieldNumber": 1 + }, + "stakes": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["validatorAddress", "amount", "sharingCoefficients"], + "properties": { + "validatorAddress": { + "dataType": "bytes", + "fieldNumber": 1 + }, + "amount": { + "dataType": "uint64", + "fieldNumber": 2 + }, + "sharingCoefficients": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["tokenID", "coefficient"], + "properties": { + "tokenID": { + "dataType": "bytes", + "fieldNumber": 1 + }, + "coefficient": { + "dataType": "bytes", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "pendingUnlocks": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["validatorAddress", "amount", "unstakeHeight"], + "properties": { + "validatorAddress": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 20, + "maxLength": 20 + }, + "amount": { + "dataType": "uint64", + "fieldNumber": 2 + }, + "unstakeHeight": { + "dataType": "uint32", + "fieldNumber": 3 + } + } + } + } + } + } + }, + "genesisData": { + "type": "object", + "fieldNumber": 3, + "required": ["initRounds", "initValidators"], + "properties": { + "initRounds": { + "dataType": "uint32", + "fieldNumber": 1 + }, + "initValidators": { + "type": "array", + "fieldNumber": 2, + "items": { + "dataType": "bytes", + "format": "lisk32" + } + } + } + } + } + } + } + ] +} diff --git a/examples/poa-sidechain/config/alphanet/genesis_block.blob b/examples/poa-sidechain/config/alphanet/genesis_block.blob new file mode 100644 index 0000000000000000000000000000000000000000..9a23b2e52bf4c0bd5f7e9cfc6341f17c52b86a40 GIT binary patch literal 30475 zcmagmRZtvV8>nsE-JL;#4DRmk?i$=RxD(uhOOW911b6q~Zoypw1Pij~`>XzXDQZ{k z9HerPlZ(~UbN5o)fNq20G9~-5{^EUCz-AWin+hH$4=7KG2(G$Hzy#$|C$8- zMHJ9sL-CM^$6A6GR)Fs|d*VHt>M}sHRuW7u&!XbTG65kAfJIkEy>7fb*w6?aI!9qM zfd=>aS5ix1lr;ah6h);?QyEMf%oyAT5OQ;F`K$NTXK`JyhY%T(0)mJntYF<8F*6a0 zh-Q_g(gg+%3n~my7zikK)~>cj_MfdCOsznF zp@9)BkAfpWIyt&xOo9Hf4UT{XUeUMSCdr9AOs9F1{2G@HGgJkG0dHyUVD4(|YQ#*y zTx^x1siCMEX@*4{#E&OCf2Pl2MukibZ)_K0gk&Wr*ZAfEEj#Yef+s8dy!6!)k=*zz zNjRH#*%C&kXeDrHPJ>4eHaJ20rcetaX*@0$I%&KUpRJ5X;G4cM&AmLb9jyvB)uBBc z(<~ss{6G!kJMA@f_!R#9dO>4TNCnEGNEiplyCc4&;e+OrA}ASB*^-$E&N z($0+1rpv@C;IgNX&0siPPe2KcoubXKzzoI>CJpw#ALIXLgrvHL6vO3JG&CsZkR=?o z)sez&{?Eulz#JoDspjW)5M{hO35YhGswREa$7C@?D@H>dpHtk7Fz?PmKAom$8M9p6DHKzmljl@YOg(uG-i8XI4lkt z5a7Cr0C8~yYuBU{-J^p^)wv|@7n-l2j0w9ERfcgoZ1K^!>N0Z^dN41J4XiONt?rI~@Ar36{K_@R-Q6_YC zj(C8+x%}=?Ijy2)1)WYbJ=o@u_PG(pNJ08)-z%L?B5zlZpN4_D-2sXU0R2@3HqRCa za3?eikgt>cW3`8)J+t-uxQE2Rtju_$nRip%*m-r!liE7RfiS>f7=9#JQMRKmwc)8Sb zxbhCeqtLA0eg4O_e%@}0G5QsMowB(Lcbt_ba7m*+Z#CIlZn8oWntvbPv|cCF#SboVX+gRd^)Ie z=mZnVjdK&H{-ET=^{?OD?Gep|a{Y#Z*j0gr@&4uAJn9YH-c%#$DtSgCKU({>{TMXB zlZ513EhU>_i_L3MJI;P!XDq@VRsGYQ9kb@`_lneNyr{7Gg%%&@*YkD~mB)O~>R=!M zX=uP2?pr+$t4XDZr+;B7aS?LjiwZv^8-A9v4 zoQ3zh<)lk4`kI$7*E!7%v?A1mTx@86E>&OXnwZyT?CO)GurOx5L2U?)rm%hP!~z1o zPLy2mJ4-A>(5XMwNkHG-8Lm_&334QC%1X>%(?+d*FruR#TH-ui2#+vOQMrcHMAx6< zuK#D`Bw!Bfkn#V7%3f?%zy=#Z%Xkc4PUC>~Tpw7Nr&9WbuxCcSRjP1qGIOv|HNgcP z4$FevI4vJBsB?U{@S3IrEK)&j!Kj^HEfp1c9q9?xgGZMwG%5oYHqEt4IUg{-CeuNT zi=n*rb;C)EUNYg72H`kc0=z!b($==N3MDjCRZA6I?=QNb3^YQFQz*OB^@e|vI1oTg zE;~PERlu{0`@3r3O=Oyj-dpm&;=ufJxC&YBT^I2#7Xc zKQcHvC(})ka;I)EKLhs$pVW0$hJO~`1$mg=S!t_7jRz>A9VpE_p_B%7{^)PM1MCdp&&$)d0{ z5DX!={pf*@ki`|zdO>P0Q|9w_QQ&drJrX%)qbyD8a*kjJuKlRMU}2`CE`A^YP=ss@ zo&CmHd^bw|R}VavCOYqNO9gPz@)XA0;U*aDxe16(t2Q;=O{dp0& zadTc+kCy#KW%Frl^DDz-t2twt>FnF@JlPdw_w;M5j zM|qat35iXcKxa`BJ-CI+cn1$DVootGS%Hwp>}X8vTVabH@y5ySt+R{!7$6|l({;)hBzoJQz1MC%Y3(Y#vM@SkT4rrv$6+ir2(o@s5smOA!ij zaAw?bHFD#|d5w0CxVLbROj<{Z{O9i-+1~`m+&P%0u5K zC}C6Z0vw`H;-YA{Iso&b+jH1i4B9wYOH9e_(=z*IxB8F}p+w>3{M>5GsftMB6X^wr#hY(AMQyY8@)s_d@!58r`y%~nm^zVK8>?`e*g$~h zqT(M@%16fMquSz&^!h}DfzWwPw|a28ZkyC4F#^sHNSamu!_Xp0aEl4Hjkuppe=Z8K z+~2JvsC9B_?-gqj6a`*_Lm{py-|krmQWeloiP#KJtNNq$0cGF3x(E5E9Cl5So@f{& zq_aVn>m=sMpjcvTyk zksGI@f*ra6*T>`it8<@OewnP-Ja!Y=I#~?D;?a&`X_n6HSd1g&+$K2wn@&toj^aQj zJH`;y@9jYryfacy8s*o^5KOa5MHWR1NfTSA%h1#sN4W5lT$EJh0pK=y(71jlCU=ZcSl%g!Ke46bJ+YGy41>imxvrWAt8<*|(1=~~#yrkxa)}zJZ2-A( z3iQ{fYWBpxGe+N+=&I7`m{fQ)Copuj9cV5Y@$T0I{&W+yHFKcwAPkmz>T4rO~(i5Xb~5|~pb)E{UWGe}Px8;?g(1EYW7Y(U?W zkq~B1(b;SwD{y9y7)Z;)@j5`;!9x74gSd@8pd<&~S2)+)WDzfaL!?p?_#|@e&oci}tE7K(>dA8u4x=LB*5% zj~3-i$I)PDY;O~G+o0$y4T6*xMm0>!{FSf01WC1cp>MT~H)-Rq&9cj#`F&9FL=*VY zVO#tP8mXjbDeA&!cYnIqibB<1kadX=!6X@rqs8fFrmZxBCw>RT{wpCywd0|LZ8dE}U>5}%9q z>>RoF+Pbl`O&>ny{g&MEMy*crTp9g<6ci01MLIJ)lT-hLiOl^E^E`3W=07AWsCepg z;y0nC{o#BY0@%s;CuGhqBRR{l!EnMJr8cPQ_l60P;N?x3Er$&R;JkX+y(K_*SBuFE z)~NlNJf+CY6i>R~XkqzeJhj{DtG_nr+?J2Ga=6}qht4;9ZAAy5Q~gy(QTXW)rew%o zyk~vZ3)9mP&XBnh1ydHxHSm$Dm&{~)wHp#^pn)=_Gg~GO1w~h&;*k zG;gb#B`@UCpA_CvqT3|k#rftd3nFZLNY@cUPkK%CLCvCwfNnewbHSBL_-{LvOl7#q zO$Gx!Y?0&jC+2O9kY0I=sOkScB7Y(qkRi9!lKK!fB%Pu1 z?w1_Qn&9>#o2g$x5B;6wWbJV8>i*+JAt^=3b7^fIfNc)v!t+EhncTPt8wtOXVjcU= zc;z$Q{-X{Nfze3f@k71krtMsIsnRDo3-P0#>*RP8!%W|tDIqH4c%WvDUt>}H*Yn)i zxi>gLAZ(if`?VT|329guN9=47^5lruDQdRz8`HXzqThEQfFD0u;Ion?bM{K)&UWX? zreP5uH2F#YylqZ^m9cEN!Uv=n*!4ib+QRskK{EMU;@`ptI3kSikf7rE_DYiCBz3rv zmJ>ZT`h1d%e$IC+Wtp{O!|9?2x2kf^l$R6WYkTfQc(cpv=;;$hZXAj0MRjVpJllm0 z7xH^Tu?8YnxZt$5Ci^atAvRzzw$Q_ONQYQJObh8i1k7VP{1x7}0hK1tY?d4+G$Z0x z@gaKnHXlmcWXs);9`{@=Q{-STc##kqF;ynNSX*e=uMY}ZpL8KXsHDuy6??{7_XZ=*yiUM7 zoWOga?g@oGqz~cI??MMms{Z8voto>TP;kweq?R5B1e~8iP{okhW0IDgK`F}|RAN)z z#+JAg$YFDi#OPjAe9St@pphR2vebE{%8!kP4@nY)Bf5U?kf7qp^8^~vpr$APD$-cI z(dfMvgbu<&GP_3Dvp!#}JX~+0GNFDwyMpLzgmZUrMRHUkHxBu&)(&1zo3>}SH-7f} zuT&iMq>QIszb%%|ND@jR+<|F_i%JN-e>2t7mUVjFJ65f!&+63vy3xOMXmVJ#5wgNT#xvbLIIlji7Z$MDtd{4grt z&WZEW1n%K0qYK>4hn1AM=g6*ND91dJ^_EEUXcE}aI^%nX{9o~u>0wj#i`q>>-hR%Q zR~^K_3YrK5igh303`m-^mOM$KP7Xdgxwwg@9;`Z1{PqNGe$cxQ+FYU2m3yPNN4Vv) z*|L&^Zfd@WS{?@o)rpF0N*Bqeos;i}q3+l2Us6J6K*S2ePbXBRR=Id#sWD2t<@^+b zY~Z?;`+LqioO-YW%-KocZd;8MioEISAW$e@00NwULH!cB?{G8kNR}~M?O2+(trMqSJXJYu4>WMW#P82K+3(+)BypF%rWdD^y+&p9MMK&I6TH7V2ZoGR+PO} zZbV&i$~#IQkUEHKB2y`;C6ID6e{($`rNBH0cHbdE#S@stk)QV$?Vpw`Is2D?w%W0k zWx92(wMt6JlamrbS2|dygsER=@zWx6x?PcjOH0U&^IsgsjPZhXte0CiwGVVeWCqdY zPqL^p0vLyU68XC{Db>e{ijFlKP7QMhJ2SykG5sVZcL-HF$&>{)r4&?9P0Zz*-)2Of z*iABghAfz9lBVuga8F^c(+#S_U@1B!fdG?!-}0QvhHx?U!~4K-anmY%#C{UDebsT; z*0yK!Z}cCKx^0Szwrm%1JKe;T&GO9cS`*v#{~_5x#S?F#)?A{{ub%O{hY780I`MGz z5Jkwqk9X zHqxwV%Td4DKvS2|jcot|WRGF8I#4K_Yq{6~XoDM_S@yCqu?RWUIvqA# zsbQb-KO&2v?GXn}IiueY^Kj@#sqj=E0Pm2X;`x>uAy6l%Bz8!H9Ho6|Mox2U$EB^7 z=O44i{&I4=Yg$U>Ir~>OjF~JB-lrg2%!%AMA^)6728`$%(L76pVH96tg}4h z5vfdNe1}2*pJsqc-V+Y?_0D4Q6mFj{7VWXzX%EHg2~gO``>WXcprxrjc4f+CNZpVt zZS+~Q#c$4)upBbXm0M#b2jj#I&`SP+r>#3_p`bh7;`5?RJ=N@J>?!Sgf!ggNt)LoC zNBMv>bB{%HM))Vgi}yE9QF>-JlUow;9THSLh4q9g3;wi6 zaIst)$m%De`6i1IO8AR+$>&J@5wyqCEW{4QK!#{{)%Lkok*cFMPQj7@V+(#iu~*); zQez7~mtZTo9=gTm`zQp`F#a(gY9yF)B=L_ha&|`vbozaW9h0Oyc}``5Z}?LELIt55 zizf#IH5Oj94(@r-M;YwIKtRl@PwYdQfhP$xcW@q;06wzOx20a*Mw+hBw8m+NO|6f} z0X5%aGrOFFk)nbT2rv2B#o(Xskf7qJWR{bif2oY_S;zzSxHjw_ zA?dI?HI*x&wMKqX>2x^wYn@+1Zd@COtt3!q%0oPUi3c>TQdux3OTmO~?HEGE7~!ip zHM|ULT2|(ku6*nWlU*&PTSVK%GFb9E-5M+1-ZsKKSS8k*8WUyzX#y9IWVdSoE~O^x z-Qf}nwZw8AcHU3Gy9)$7QvEx#XOtwkAB+umlc21awW+(qU1F1N!^E3&z8~xPfV9|P zd-O%HtJa{p?)kBiZoafemHiG0DxUB(!YF9K@7R&G7IcNfvw6kz5cvczZEcvNyz}yH ze3Vi{GiP`Icvyw-nXR_xbbfNsnUpt@#}}yr(-DO=w;xAJ3?tNuLF1!LK)@74iXTldO@f zjZC_J=~6y%6I5Mh^7SwLJ0z%hTBN?@{Q8uCsUqsP>-_VwkDaL@ie8(%Rith95fF>py8dyh%!?)SrL&ZbP}1h-FzTrxHAtVAT5V~jTb$s z;9IU!VyaP`dXNfiS+m4HTI`6#t;)N14zB+vE8HmFLmBo8ncVU`3>^9~d9&D)2xU#K zcU8p*6)+T?DMis2o%m{Q>D-WW=_@ zjk4mkrRN&wLATKq>9GF}2`ZlAMHa)*Bh1(x(&XvEq1!$c0~tPCa~91nDjM}hKQKt` zF%SD~w4Rh9%E}QCX(&L?la<_1aA_#n+ysealzoJfAM5!_{E z^&qxfB~Z9?{^HJ_Dg_&$U!R7}CJ>|Xx2er(*!xei?4W!CyTmH(X!g9wu6_2Ubr_Zd`heHPsbVvj&WvRO+ z96Z~9qk>l$1_Hp)QDp=$fUYJ{DiNZ`DG9B_mx_=77UBrav{R6aO58snz0Ub!gz^|X zUc5Gu?^LV&Q$Dw*1qQ8;L=eq&I!a{l>p zKXx~6ab8VqM9(p+p)%;0KQu2s}4 z9<&B)HZ_k;c&7*#L=Q6?p)GueltCr>;!Y7UY6|?+4u&=);t6k$mM*jwS;dEBZxp$F zVD5K22o16WAmGXS@xg?TRnZQD&>JpeC9sfqb~CJBzd{XG<)mz1_v0RqAAI}X5GiE< z`uRdM6M_n&$E#iIJ0z%hO67!liiE~thoHZRPN6sWUlq)eIgac|JdWS;W8!mQUv2WX zUMkt5K%K2ATw|<&_FWq`$;2~oxBU#O8p(+88FvKz9=gVsrDIfyr)r1%?qq{Banx35 zpnrC?9q%`GUWViT`IJ-Ls5f(X@-(-k_zJ)ylpT?4)}}M{H#R!_hIR^BUs&0Fn+|_u zA6eaD?A{6lB)r^T7n~;TCEartK0g26-h!jy?6!wFCTcYZ zEG?s%o7koMbKM8dYdD2pEeBLR5pP>eyqb#2I!2Vt+PVwX*-Vf@-Uo4`e#Fn|AxJ2RUzsL)2C?3eG>F*ThoNNzXGt*WsNvny# zSuVT6TJYTdB3dOkuJUT-=u~EhXkmxKF#JPW5p+A$b!8QvE+r_28c7Y+qz3q26ZFU7 zxh&PF3QoBo4aek$|5Jv9)wYV*@nN!0DtgU*XXC&KoGBcV7GN;=o zgFX}V$7_O`pv(1g{2EEE7ok^KUDcPqI3Ys`QU4A~y!tk?a5o9VL$Mr|@1P}oOe?Yd zMXhT1OaKwpmi;X18&i{0tZufzj_BX#iT?h{^=HGDF5NtSHUV^bcv2lH&ci|hzeJ2_ z7y%$4QZ(|?wCaaSSS-vT*x-6rrT6uLF+qlS0(+v}OS_ox2V@BScoCOc5g#v7vf|&G zLIcLMGqiU|Q1LVm1hgajb0tZ_%ZzS(1{9#3s%~1|P44SM&_Qn~tHH`gp=i>i76uQ8 zpE8~Va|nSRn$0N!9;9Er$K^ws?gHU1{6}$1@`#{OO%cB;>erALN^K@@qWG##^Wpu5 z+%h9_z)iNI|WE!oa;mc8|!Zk47SY=tv{&Y6Rvl*HoK9M}TKj64i6sgci5lxab&78KqKVzs@Z+vfi408R zYUWTttj_IHCecc>tZK@_Vk;`AW2EwIwxo&8U^&KtKi=IiannLOIjKmxh&D+s+g5=- zlM+}nE`sMca$JdjQjqzGywNPh3G-nmj#fGsZ#nbuUq2vYP|T@~qzxi;_My(UzxM@T zc%n|eLxPGY9Jzj#rA!LV!_sG^L^bD7m2boBrA=}W>sYpkF&WxID(j_!w$>NhwdStP zNq7i!cdhKAhDe+$$5#4e^L4ayGva$9^oYLE)h%(gw*AEL^$#ese?8$vgH;XVier2! z9awmNP4W}7is^HQs@P4Lvz>?>p6tC7m7R~+nol~V_E0m=QRRF))vB7+1)ZF;)}lZ_ zS#6K$>+$z^UX4|)-5E!i1c^PYCdN-|-DJ^=aK)s)ACU26a6-4_6Zm*Nj&40$=*J_( zYQpc3pyH|f*#>=xZsF9W(X{eRqosgFLy4@HGMW-&H*ZKo!G$`-+*2?Ab<=5 z<(kg#mixCYx5?a9MaF`Y9RKG5U3Gc0QMlDejjE5xVG=AyB8A0ltd}_6E|h!y1w^oS zNKo-ai#g0WH`kyyU`q8;kgXty`hPE7VRg$7s9x`4n6@+cMM41j!B<0~=ue~BB-1aG z8`rI0znOzZ2|knZFg4nvNvS9LwqczK;0t!#FP>Du(0eyK4PqQ&4=>PjJ!OWG3nSkr z{gCTw(X_qj*sc?>)MWB$$|X&YQYKNnOK23Ig!VEzFy|ayE9(vn4zH_D-MMnq3UD%&&JaxI8ftfRohUu+@b(JAZ?Y z2@0y~C%)3w3<(L(aMj|TPQBVc?(3~q1g0C(>7!iGaI&0+7Xw#tF`c%7Yh=&Tv zG2K`h6FmI)1>Oye={E(zS2uYu_F8ARngOahFVAq)uT)ce)=Kvjzhe2OfY5P zCnS}QCG}TRGKGNx5-@oKazgrTsh{T=6DY|(AXASp(iK-FQ9}?r{_0vVeYp>QsC|b7 z6;JxW)lMxbp~JKj$E#$oxA3bve4Qi#owfJVgZ0kCNHjxzT5FyRAPBy#eidJpqler$ zwn`$~mW4sq($ZR)R@E@&N>T9l*UixBHG`m7>^a8!>T#_*xV~OUjomT(wm`gnd>WHw zRIpc4=NBIPpv4liM6yA|5jKYxjCG=3!QKAIFx)f@@fw@hqKOr9`JDVmAOQBW(gLJC z{CEBuFa^onuQLqS5K29Q*I8%e`>~{+YVYSj|2JEgcH*fCq3wUo#!d;a4(S+l3;#2-{PajGTS|j}NgA4t{YL zf{yOQZfWr5`neRuX37yozi{53p9#97!dB;JAZDBj-essR1a5@^S0IH=o*I4CgUqn_ z=9>L*s=vH^YmvZ97kB|zk*v&yh8)qpg7}oct`F<3xnDy*;rVAy7ov*f%g6v|W?;VO zP$ESMwJnvlK@wWoSS0fE;rTD^VV~=c=KwbW(GM$`b`uUcH2Q^YA`H&rd>Mis87(sZ z9THSL8E#)El}vey$_+o;xUHP`J1rcXdhCeAV46jX>467NHhcQ|mXXXQWeN-J$BRUQ z-jqmTRS?&3;Q!po>@D1P86c1zhf?%;|7>SAFzd>5je@?qY%7{$Sp7b2sGYdU!z)XYQ2AurAGl3PB=$-grRF4$B80fL@*1^$%Po34tD|s%r=m$ zh;n2iWi_Ra9ev4P+i;*vFjy{%p!bDhtOU*YeL!X`ELssbB?on{N>h1< z1Qk!2|GyE=4PlxfX43?^u{G6~mc7|FsHOIw9|AsGXGSW46c^IxU;ktafq>82^+io4 zzo3W16yD^4XLo2>n|WPMyk1yelxu@eC>cH=bKL8FBQp6ZXGia7(>H2k%EdC@J!)}) zil@ZyI6c#-LUcWs#xy_}U#<1`i`^n|-@mlj@|Q-bs9`_L0%wjPZ!*uHin<(mFYBNf zsF}Z~ob<^a`(sglje$X7d~e58qk!cQ3x;LRkNW$E8%+EO-S?rbRAxx$+tDj5@dXy4Xl!NRzqOS;keYt<1xcud{M5PrqH2ayA<^7G!?^P77=SI$AFiHD>%tz0HM4cqWE1%|xnk1n%Z|5GTd$*FH z;>p{(Ih1ywKBXJY787gqhxr$+`Y_ydX0MhWSu=%1PTIEqTxvvAB>7aer zbetE5o+gX!JHkyDn0T!`(&5;EqZVz~rWN?=ZG$gET(AyUKd!w7!!HXD3a#Q%=w(BF z26a47Xvrd715yql%(2c4zoQ4Rw6JAIpWW0D&T1IkEnjodsW)?z-e{Zy0c%9QP>$ylKEryK~9rUb+1#OsLWI9jnFYuWGO zu5&R^iP5*kJ(GT=h#_UPNI*BYEhdzE?!xY!T*NB zYkQ-ww;P9whez$IN5pGt7+b3s*kI=L5m^l7dF)-zAP4= ztcZ%6A68ufs#C;OJ2rIpimVi#%Yyl)ra;FoleJy5_j3;wE)u%{ByE+3!e6sH(6G`L zmdiYnJ%(3DYOU#ytCs4R%fMWxS{y)Tn8$^$6yTLWbXG^KM~}6iQ+(qlg}snQbwSeJypK=!j1(xX%>8BURL1W z|C#Lj31StdqXV^mnB>@zkC%fT8poO5`2(`lOKq);$%-uC(XXFJl@6+xgRth^MBVMAUqQ?002vU1CFReE+eMKY|$JZCp*W@e^ZXv~>?T1QkyT zwLBrVq@P$pOF=Pf=|6u0#(!Ih8aaO%i{A(l^M)sw+%7qM7>2Jfkw<%EPj1&hI|GWh zbWmMBljh4}a!~i^(T|!o$h@4xtW$c*F_XPC8g7!NoelH`;UpJPuwU}n2(Ye=cu#FN zF8rTPXUP=tn~*AL46Nep~i^MBebEEBh}tyj}}0vIrpe+NMX*zC(hFC&fDr zOr$!K_J)6@tp+y011iTtK8T{8X_YJK6O(g-oxxQdtqWUJVe!gzo1lYlvX}B@FtN+W>xihi}vNpn!Gqn;kf7rE31+^P zkH3dx-s7k8S$m26%v!f2L4;`EO?fyVA3aC4(XLscId2)diN%fan{8kh=*9!5kFAOc zi3N@EauOQMk}IqxD~Ckfz6M4gd(CDl>NF>)(r=x0n_`cn5B~M~J1Z>mH$^Zp!H%u5 zU9O_5AM|O2Wqx`bBK*HuXU~K~&b=;2aXXt0&M|+CiU~Y~kv)Z0-{E|R1Qk!%1I?-& zKSoZYLQ2aY5T3nvob@URZzQMfh1X zN2eb7v#{{_zz@4AgHqa$dTTz;4AgQY@i1ZvAKvfdxsU12;VYD3z8mW01Qky%U+$m9 z?j%nu!tB(wBV4c|{om+bgJX+v5^4&JHahzTf&ggHXA&W9BkkKU z`l?-$fV5p}jR&~brQr_2vh_f}p(z^wGqBfH`GSGtXZw6OKK~k91vt*j^_Ay!&?Ca6!X*(*gW|kza$y}tfW>$7(jsdCO6lCv#Fj^pgJaB|B=hx+jm8`$lv$nbm|C!R1Lwcq%?O0l!9s6!2LB*4J1^8vi z)%7n-hBw&C7Ug`S-i!kg&UK39B=7Z=XNCL9h?-Kh_tMbrR3VeX%AW;vszrysXezy( zHpGBcgQaGItbs(fkhSl!q6>L-k~=+RUZAuU)o-Li>61(g`?wBz%U5`3wmaMJptF$HeG#&KcDo+=5xFTE zdqd%cur>o8E#exjPL{U*Fap^R$i^I!#;>m1Yc&RfQmM&XG_SVl2mc|tK*f`ot#?$> zq4a|FY|sNdU{onQdb@e?(N;NQ4#6~m`&B8Sq_8fExd?g*HsX+tu)2+% zu{_@Q#hXxXgr@|>vsTT(Y5;kiQ9(uPVdhO^Hc{-A!tMF>Z+||fAObS_Lt~%_bri{W z-W;#6M;HL{ZDjb-!+HL}g}CagcB3@(&X|0k=JFokYcr}B=s3N4NqCjO1xl1I@t%#0 zBK=nc5oO`j9?Is4jZW^J1ee%Ho6E(1*|2aj_L#Ru z1#`*p>YXo7)yq)Nnx7lAXDw4^p;=wC=6jXnFa>b*m(5g(AILp z3jdMFnkSbsLGpZFv)IQfKnN`V{x= z7$eBFB1iD0Uj|B_v5S)Z{(x+WcWBk5z>JQL+AM!;m+Uh8om~A62`ZkbBgU}4YN=O^ zhBZ+-BZsKC@!JAklmtZiK>@-`WQ*OIoRBYNK1FdXUNOc}+I0({SADVbg1CWN*5-`d zcocEkGIn_*C`~kUV6brYqrJ&y*g_Lg>cmk66rOmtpI_?n1h9j@JpuNhyWx;FufNa--8fq7PkYmL_@ajIn=q=o3|N9OJDxRXa^{I{yj>X5< zUNtq^Jgf;_GQ4Ev^qy;QcHQPp&p8`baD^@>(i%V3dL?gb`y9|ibJh(7xp1=5W1ga& zWZ=`Da#sw>ywKXiBcrElqcl?ld79PB)UpG)yMON%pHcoZA{>BmuRyH3E-NOk;$A`g z$#p=*dpZx|>9*SO$al$N*;qJQWsbONLi5WgE!^-05U}$A=UhaNxR0qS>f~{E%uk-Y z9DUHt(#SFUE6JH!(dPrQt8B|;$rh?8axH-__@vA?nknJkyINeJ;)&72_}g4vQ|pg4 zNTnFyKPB1FhT@3vIR~Gbz*P|D7!yh{e}7ct;zl!If+2*iPnO&`n8I7l88-p8CPwWN zzgrL)TF(hIi{LJZ5H+++6EZ_jrh#K~Tb|hDSS)8_PuJxRk%zaC#%|CH)@l9q%B6LA z)o0o|6P3S_lw`2%dgry-m>EOWh1D=XJCx@KG1Xut5HRueHljyRd-`jlJGgl)h0+T~ zi$mfs9ieINY)s@R*@-hNYWfVb5KNSMrtt6;;3ewuysQ74<-J%Gw z#KD_Ida0wC89~3=;a~{UbmiCTbH#6?PQR@ELtQ7&GzK33OmgFT#zlk_T!=HDmr!X& z9Vgy!yalL&v}tVPn%L-Wvy5bs zx~sI%WbvV#OIU$_U~Ie2yjvi-Y-EG>md^NoE)n+t0r7PH^G-2Rb^$1jhAk#Lc1IG? z!r6tMZszVrYzOh#4=wz#UBWAB(3*DK$ay4w2>ZwF644AO2BLdpBQtkPDl>LAR?RJQ zYe~q;PhxANrGrU=*~X7x(DLt`5^2nDDUUp>W;SEe;_xva)!>R?lKzdvWsum8^><%J z5ko#?{@hv_O&I?(g!!Nfxm7v9Wvq1-ni>Ff`3f2ZC04D(uPC)sx_FegtTve{xU0k` z^Qh+PVzgz27SH-PGcacBc#_LFRZ!E$`w%cF#lNO}{q9{YE>Q8rKLpHTP(=>PO@3?9 z8EmkZ(`I#n5;!gXoKb~Gl;0n78!N#tc@{weIorH*AjkqDL(Wz>@6iNg!OHh6{Eqt(e?Hv|Bs`D+ejS^|CX(`g)+rw8{fw@9@iICue z5Ef2gaE2gR#7_RfknM~+<0pyL3u1Y8dwNyuK*yb-BX@t?XF+b8CQ7 zDQP}H?p8e29*8RNVI{|kgh1!y6S&rUVXk}b3mZzvY6kCC5>z~83jE!hi5e&q=ZAPQ z*?*YWO3%K_aXZKtQs59Dh5MXQJ)L4Q6hBxk(8mUL;GUm2*#Nn5Z(1aMS*M?T3hg%UwwS`hBd{-_lged)a@Y zv2J?OMgM03`KQC2FnNlY-9cOXmQ%Z&;bG^Ppf{Lg7OcC1$zo+0jRey-{ z?*sj1CJ$bZ^Ulyds@B<0iSUIW9v#DP@}$G}woJb8bxkv%H}GvB33Y+C(4V0B7{Z~# zE1Bl9uBY(MZcRGyYX&b>|IkiFm?%}cLi?(@fr%%s{X>kYY=vnkSuI>TfBb<~fnA2k z#~K&4=tS~0DuRRbY^$+>@$@wC-H_dy}n6 zBpD%_Y_dZdR^n?WQbe|ltTf2T9%Y2G3Rx-RAeDsY{GRK%p0CSuKmWR3|J>IH=l#CN z`&jK;j8McYo;zGmUKsXBM89{MvbCsZ8+Fu^;o=_2Sml;-cB9%fVBBRJn!G7WVI7FX z{YPm;OYj}^2=76;%f25@n30Y8u0>tpRJ=&-(behS8LC)KbjzKbb9+Ycr@P}RoG6p7 z@=?M^M1rrwOZ&U&f0|DoO(+Y=s18qjNN7FqA+TLg-0;Yl>CmO@nrF~nY?oP~tgMNx#J&hC} zMTg0M7BeTBG;}jK?Ek)J@OpRqgiNFE`sk3#A8%?EmrFgd@6()4hx93PUdlZ7=hw*4 z9HrzQ7SroC?tAMGQOK#~0dsPZ+Ilye-5y#>(@ac*3AKz$f!cv0-{Jl+R{nZQtP&n$ z5t8-mRSsv!!oDK$il-`QIyZm$9sNmXaW7$#OJ0-PPmNpCkIF@x)>uSEujM(ioKnI~0F)z|WgNG)ccQZT@mbtd6x~uDCaHPpo#|dN0 zy38peg5Tq5oOKdxxov70XVxUXJsYRXmXw9N7A4?qQj$7N)ggKC+;>naXWnPC-AnrI zcmCjqpY`^iB~*f&jQz8My%*E^CgydR>|}e`BgeEo5ucY*6$d}+(| z9iKZ%i%ju519ctU=H1i7YE&n37=Ltx1Q3=^%|?&PH$7i>&8j1|;XLq?L&Tz(4i}le zM!_uRDF@!Gu6oRo`-;RXo(iX549AZiU7;kPW*6L5%6Vu)e45QAdFa}vXD?y)&AbZQ zyJLwXWYNn4atl*i6iN8??2TcAtb1De!x@Z+NJrN<^kSq z7|g_KBu!)4P?5*FMoh9DdGo1@1q0h&Av$%g9;aE-`&<`4vFgw z_wY}cp5xLlR>xw}^{P_7MmXo*&^}>we5b`hn!}E!EqFJ^9N|KyyuFMM6gjHB_p(&m z*p;UGcVm~@BWq)yUH^SW;uTM7F}wYJzD?t-%I^)9+>s2sbdB@Q42kbjAHf57Af|Ak0oE7eFYLUBU z_aZHaTtmONd>)s~`1XlVJ)XzrSyaZz35T<6+-pv5KfNP$wwdMIawXsFuDc1i@W%g9 zQ1=S8A|#3=tLby|-@Uq!NxSX&>s%;9$69vuP(`EeZSTb5i}PMJs(WSzIhn=1V}bD> z{7NTfR@a3Dp1yPUv?Gt@?UcCs11~vY z!K~Ji6v)XsV@q50`da+3_@&T&MdB4t3`KpZ&Tm>8g8|Gb0(drKaz23W? zYZH5!EFrIWwxeZmN?Q8Ui`N4yoA}PagFbsrP?bb-?Dv{kyt-p)2l-e8m9uw(DA zwqx(Ep`XQ;Ek2jMk>ZH_dFkp%Ge&2>V5fsSR98%My_nP6YxqkpK3Y6454K}}#^j8mh z=c&@GRX^=(I5xr?JeOY{igGTb;K9#Aig&8-^LQN7(QSEAn&`SPW5YyWZ%K=J?9A&R zC9LpuW~!wo!qsra(7hBZqHgTAN7in&t|P5&x$;PHY_#k=z z!cW)Y!zP$vVX-GnWEHs&%kbMDTOWv--_(9LP`38v%jQ19JLq!e`57{!qj!wc5^ggb z9H&pu?jjAH&Gu07=_iL*kA35Qc2<91k$A-uJKyzwS)WHhAN`;{ zE~Ci7FJDtC|6V-l)WiF(v!@D#kKFBXMf6Oy#ZMXk>_{r`|vBPt`eMm=@Zb% zo08g^Z_Aa$md}1pgDp9#`9W1x<=}7Ek8|U6RQS%oUexU5_nj45W1mHRyVme0&rVsD zWgj}`wk-Yku7Y0O*UV)ChU?>kjkT|>G)HM-$O(72EuP|d)w&&Nf~nt*;T+^V$4Z#W ze~>(_5zza`xrBm(!HRO#h{0L@3|WDgHWp*Ab~tspK1;kZHN5rL)R6O))ynLPKH23v z-?j7fVoBBy{JPG*Rco?Kt$gt@Hdu7AGZnjBGH1T8NW9{SMVyua89Ymgj1pQ6YXb}A zV)x#RIiIMPQtuY+BR&4D=6Acl*iewmgsiP1;bc`Zk4+0}Ut?{%do&zdr) ziOR;0S&81!@aw1zP`2QnJ62f9&oU^Q9(6W5i8h!TSI_(O!RE~?r%n)8G9Q)WF3fe= zyT=f7+|#wtss5B$Mqbhl;ek3p6-0^&1 z?AlM2F!wZF%7e!*i%}O*y7e{pvQA^)y>^lhJv-jGeMRCGPYLz!Lfr_D`qN$WLA3%; zn&t}g6GikLdl$$q`9*!tC9<2H!>t5f?A2X|kW(ftOr4d+4qdZjq~0KWrq^XIV0@T4rTmk$AqiZ zODDHe=30b*l{LLgzP7~5bpB&&wrPhM_DJT4TLvwLIWtj;K->DAdQHP@=#=5rOS}^C zbiFp>@$JjF-G81OBAe&74DtOMW9K0;uj0TH<`Df6i^cRC1W|eIOFRAF{w=^D z)WFruWR_Uk!_@s?h+}Tf=rGvcS-n~apl|4qs=PjI|?=EVFy zq8_WxD0PhQjhqN>@?EbuEs6gw@$uoenWt2(NnT}CJI&BWuPHFz8jcPoB)%NyO-=rL zba2F9<2WCiziO||dVN{XUF~Yw`OVyv?#6MJn}SL;j6+wj7@`=-@M|}gSSv)xTAF{5 zT}TwN(#VUVatd(Ro!Jc-Z8%Wmn(eYXr{^#0Un2>^J?7qrE(lC+?R&}pD4vEv&kZ3H zk1*FOZc)0knq{&IvQf`oHxI=HtbaPGrWq|)_?$GbRXIbi%V<%CO%*Q&Mo3IjlnofJ z2Kyy*nIsj=Hoex@GsdngF4GewD$BdpR*H+6xgL*7R%;I+jhR2k{IQxR(1X=aE0ie8 z@HzMLb?xW#jZtaRPJ@_O)wZ!T)d=%oA)YZE3Zqj;#KV;FR*SFpd2P^r^*==>57&EK zUTRxna2>r)XV3~6^B`M8D*1sT*I(cA7QGU$UHeD>LC9}?S>8hXeT^(7yyCfQM)&q* z=jnnJA0}s!7ik{{V&o&4abAWuxZHP6j!d>u*wb6S8nTUkB}z*}kTvxPzZg**hIafs zoAffeZlr1!mIZkZh~r}K3f9QQZq#RyJX|j`3?MbMZP633vOKh55J$?OB=jSy@d%-q zXY1!0H3P;M65@Jsohw5>Bhi!gM!bXRc~mwdJ{;XpG=G)JkO+TuS7N~5MeilzSR}2{ z7w$&?czr^Wz|iS|B<;xB@pqZi_@vg#Gfs_iMONx=Vi?Z4zwUsUssUd4_o zRJXQjJzHvGr=VVU*qPaKoNY%-`9P5y;x5{kheds6T6UF3l#Dg%UI@hOD-y4GY69h+ z4GNhvotFH3O<0~aEigpnjNwB5^m*x;!!lN(O+mp1{j|jVk*kj_1mDY=bKn;U)BWkI zWw1QGu)lO3X6F%(sM=Y!QE(YN-CZ!_xKLh>3>M<`)pmuK zjvNhBD)-%qs`{E;T=aslW$tyw2TEL;Z=CQ{`NeO9l!n3~8JVo3Sd1wn;SCyoCWg!3 zn@u({dV3kHYhs3X$sK>)GTLGc7jr*YWYv&~{<-v&n<6?AF6^4}vzoeT`-;RXoR#-8n*$9=asOt;eNlb%7Htd`j}Cxq|utd&x5 zPDMHkc(mLXG@`ORk5e6yj z?)Mwj*(X%_VhpVpSCer>!SM+voIP4povsD(v-1!M_$!&qUZ+#7!(vKhn|#?nmC*Zm z+UqV!3hRfn;Z8obJuOq}tn%*p>l-Eeik!i3N&b?(bH9?bW@C)ak%pH2r3J^UH*$>B z|0WRPB~NPMp?{c;8!(yu!Wc4r>+Db#Qv6~yX4)6a^oK(r`svK1*9DQDK~L(>DG`(} z4|U?Bm7-1b9!LLeiP!e+ldUN#X^0OKsmUU>mogchpr&6h9vQOZEU@8S!qR^j4!s+^ z@tXY9yRhVMJe}hN&Tj+@7umG01SkC7jyBmJGp$v~ZD!v+;tF>?heiM*%xblr7-{!%c zwvp)jAMCgBCux@K7pzn3@+=y*yT=CXg1UZk1=0LfQwh@`X3BbS4?__2XVZ6i{I-f) z_D=eBUkBu=j=7jlW@y}Dk~HP^r9)DGHcoo`Z_TRW@1{wndJ=MF^=r*2JW8B3YY$r-~t*66fYw z{qi&D&xnyUpELd~DN}aSiWImj$R4f`G*>zBrYnald;iq2KJ!eLDmAP2&0dqEmc#aD zxt)i?w)*@K@x%?>60617OYs2akHmQd;ZFkwq zY6%0uP%MU0vs-j0o%2v@BTeI|>ClVO4XPiVc6Ns!q~G})V_>}s1jYa& zFgFi@L30R9WFdtL+{z#{Ns6oDs}2)s!| z;1M4Juig-NCWgSfDg+)Pp+x9%2PHzME(p90LEte3e0_kxb$tZxw>zLyg234U0-Nm!>`o)For}Q!CjuLW z2<)&SuqB1S9u5MV9ti9bAkbDL(1)UR$haZU>7WhB8h~1wEs}?d)bEPhWlzR!ZYrVw z=c`R9NdD)WUs!J4S`4rJeE2^kC}?035;-6V3Llt+#1Ke=0tqBR(FBs9kb+4_T!ADg zxIhw&Wd})6n1LiH)<6;za3BeaI*}Bxn#p5;To42^mR{1kEK#f`$_$K@$p-kTC^G z(5!+aXkcLyTJ;4<(CES>WPV{1GQ=PWnq-g!jWbAsW*Q_xgAJ0P>4r&YuL>kVa}JWA zVFyXj#KR=CYXXv>*#}9m0RWO$T(8VVo@HW^?NYCM1>IM{kc>=J|NArgTeljcYq#&p#m6|JQ$D1?<-Ji>5ylVSWF1JDlOz zLP=dEvS*f=Bwv`4LFlR4ticPoQE?6sT&Re)wRAerI-)QCnuj@COA;#mAYuMjgn%1g z^#AvF$O2N9l&;K)BhVU)#W!d_Z4jA-!&pJ@6*~sp!Zs>6Yo;rMX@ePq+W?$=u3ETn zU>kbL51e$OLJbT_BLg8fgNMqye1F6hE&^O&;J`4z2*A9-2=EZltR23XyErjW zwzK}?g#m>Ch6YAZTnml>>E!qsBOCO?A~*sTczN$yt0X7x5S`{t@@s51%wQ!92E3)Y zgSo4&m+?hUy85J@$ys_OkBP1(1xrR3nXxTA`W;|Kp z=f$sKalEmsioypo~M7aKu(e z3b*NhG8O{nXc0>_KevNO8m~_iy>Mu8tT}r;zlg@Jv0vyo+Il+sleYi z9?Ur-a^u7t|9YG6TtDW=#&@ZWs@F#JbPlyqD*9Vj6Qcu%V_G0fc+%owx9 z^aT)%nrD2X&5yl|EqwGu?|A!^M5TeR#@_^MzjV0moK$EcKH+niTZB(xwgra4;-CQm zt{Vst7e}ymjY?77I+#=)i{gGE`3lOIu*;F97^g!P|H)Qdrf)(H=ESjq)kseK3w}IY za{rUD5->NCG87a3{rW|Dt(|Wa*{=VUt~3Dh)Bc6&gV#=E>50N}+7E3(W(vm!{1Vx$ zIxjbJ<3OKL+R}?6XJ0&ss;-CL@3Vi>3#{SrGgKH@hQpRRTM2ZC1513+$%~ei30^+tuTzVW4hvfT99Ge^r6avjqa& z3C#lJYbF0!?cr!oZ*CoTlNgxQ8tOv|P(YyFZXA3FbtFMSYI6`(c&>{)(mHA{Lp4|oK zPD6&=xRHST4)(^Joa$$o)J>b};6mV*O`ym#F>mtExbo`=BMOapokBSzvkA9swgUth z81OVoN2^fF@9WlgvoOl0oZI?q{+SyH6~#yRr1ffgnjycVgxJU^MP|{(Wd*dJEr5V4 z?@&An&8pq!e_U(lZI&1#U-8!{n>umFSZM+mHR>|{ld0ZSV4-pFDxN0LQZ^9fyijJ*w3$F(1m`Np}pZEIhnK3Eoyoh!jED&!1U3@ z|4+t2z#KM7)^C;*!0@#Uc}mR4^^ewA>kWl!hOc3}6V#hfjLSnHRCKU>%eSPTmGHNJ zKbL_%XVb|s260t`SvK4$((&-B;7Zz%R~Kv2D71txaR)aTEPFz89a2F)tD~v=XmW`& z@P4hqoBbNY;(y^<6b#;iA}^&wFdw$B|{K)~1W zpBMbj5=#(t>QA*2(06x+%N0q290?n;5_8wIk*n`AbksvjoTm%nVFoHH*KgHPb*H#% z|C4bNFo(9w_n3oOi2DfvR!J+0m%RX975Inbb*;DQc^ zWkGJ7mX8?JF*a0qP16n*p`bQz)JCtCii*63^n~icqstZ&nE?x%=31$o4;WjOX(z_T zP~QBy?xaO8nQ%&jaGWgxUKe3$Yg1*t8YQ{>T;zpV~Y8)+nP6v!e^}i zPsT;S91rtMpN*Yq6%XZ~Ni_tvGyt{j>m#W6)@)!$XQgoCW34jLzqRiKh z435spbQ7f9sq4+pz`emIbe+|>?p<0%o%M&3|C8+mMy?+VtXa&FmRv*MV*3IKP1w}P%*F?fpH zIMmP~d@`Zta|;uLOa|x8w~>NP)eS9N3qp(j7Zv3{way$!Hl5|E|Kjybl1JT>MPX?m z7{1;1p$9%f7MDlq1*yGEn$Ouqg2$S7OXQf1ur#X6If5Oy_MrlUg_({z`GEjH5wbCK z_8VvM-3a+#J@6QssJzEb6~IOFQ!JN63E0`YjEH-)p;ok=L%aDp=-B#*pkX51`F}DV z0_HmLiLrkCsmn!?31?hvq0GFZQmpMKD+g3#0tO)juL^g4iGRHC`Dusk(Ab*v=SAek zO?jc+TJ{$eO{XzUuM86{=8UDLGjCgYvdhTs>DO4vq38vn_OEM=B`d7(uX`=B5ZY#s z@+?~kiH#dTXHgP8xcQ2B2M;M?PBAW7fp3r5QJC1b!WP})4HI3PXBYR;KtSL>v7Hgi zv2180DC4Cf?mtE-v$b~hu~1m1LU;_8`ECeHpavmfHnPYyt&LtfPwYOwQD1K2xWN!Pfp50-o`0m zQ}6;DB2i)^X}H<}b0OQa*jWtPI9Q8J$!$|I`=z(~kPsn6VP*W>YD=k%iw0N_>_C7e zkqbsz(y2B$wb0+d-vaWUOFxEx{Y$BdFRe!SZsslWP9v3SE%_m!?(^HD-iL&ChD|pF zb?E~QR3ULwP<`B3eZN%a5$Wzi3{N5VuvG`o=MmjE_MKo07_a2uE@UmWvJqoRex+j;rt)^Q^-{CC5zk)(z9 zBMp=x=?_8{M_SA3^8aKRWJk?9A2W<`llu_o!wV0GMmkEspXW6{Ol zaLzvnq!d;AEY??X9Bv{#53z6)ms8X_V=sT|Y+2#!#75^fYlg%qXzX_~8Jso#C*W!A@ioe@^t z9oJXRyRMk5cA8Y+{-EE&#!y3FnX6o?DglUSa|6kjCKB2Hrk-TuxPsq4lC7H@++HG`iPy$EK^XlV7gv&6%`@=p=m96t0)z#-Y3FaQw4mo;YprAgp3&GNRKpwwN(rn<{~#v zM+G}{1Fnt5`&Z>Yv-~z$tA6YvvURc;fW@O7!O|?5-mw@%$hl2${5O@Dq8!PAOm>VR zsNd6#EO=+6o;1R*mm!#Dm5MBi_AO0pjV?n|YYgGSPjW$;kwc>S2h2kX5Fq2N7kEC{ z(lA~)>iPmru{uv8(o<~|A;?Vhl(3wp#rsZUxaOx>hq!C2U3dK=-EEp{4SRk1fd(p( z%8OUSUO6)O&??Oe29gs!b*bGL-&C!+%mV)KhX%f&!{nXq{Yj&Qzr|c?)+v&GmJc@Z)xa)v`pX>;|n*39Z(>8$II0gFa zQx$vS-)W=oi*%LgbWAEdn&TKc+YU6BjCl8}0)M)QTAMgfcn}6kJoUAdcHuld73~a^ zErP!FkI=2)o3TR5`olBin7G;%iETISCy(DX6La+hl;87xv$f1^j{^cuPXmG@%Y@5~ zg^#fsZ5<8VQ)QvOI%vDnd(E(=u4(VyX)LNMabhtN2{0jQV$lR9=e>EK=RVLtEfTmm zr4ys*_4|Z;z#ivNYQpeQ+`0qOT4Gw(fCT2$3H1kB#x&B?`ugKhWdFz?I2+LSS_Fid zQ&cva$TFPSBL>ovaJ&xCcAyY{Gx^BIG!jhXaE)7XEFv`P))e|PiOla@SZ7j>qRSWN zjD1@bxI#2m?BdphlzwYrvsMdih$Gcsf+;Lunz3o!c4!@l);RQ8?5p2~K7yA;1cXwLuf4*7pE68Yr( zw>8XDpx&91PC6BsG(w7d=D5>9Zrso3*GOsd9KI2rEWratS>8sx{T6lsxi7RJUU9xTiR3&(iWnrp2iMZHChn5t##a_HBpD{@d}DGkKw!{PjD7bG(<6S=a@ zX0)4&oWF|J`^Ryk^ZMy@oSQbJ{qi?VdIxXBfrgWVPb;uLrJY1Po8Cn-u(F(s`@@#lw(~G=jdX_69tZ!rI7UyK`myx znK&|1W|nZBla#|x>s7*Px&cOpX^2U5u3r4?sdwJEZH7#e3sN+ zW9!DwHg))zw!XXh86ioaNG*FGy%wA24V&a=D zm%Bk1jKW6aOD(j~y5P!$zpf9=ExUM8zj@Ij+;8I{B=B+>ls4j#8^=Eogr(EBd@Vcb z75+o-!E6rRWw}NFV;6-SrB2{L%Nm_dcteG7itDY1aupDm<_H_!hhSTU@FB;ZZzoc zW{ViBJ27ui0U2_04XF=deWG5Z`}%E*{0ADSMk0@Jx$bK%V}HxR ztO{-~u$lT5bkpBSPSgzbtn5Et6p~VWdoHP|1+dNHTzH-cCX*W%VI$#JP^@9!883gP z+key{A}|_GJbtLN+_0U^E>Zd7L|9YMqGy4W7 z2!w4lV82$wFd+>M<%pRnLY^4*Iz`P^eq&m5QuO-{1n}c03w&0xWX@iW*xBwl*)S~P zgC;-eo3qUcurihnQ+TI|fnE0ptj>>p86cCtCEgM~z!72mKm*lC+$%|nlhomQT29pH z$n!}u`Z?dRlx5bA4X2A9+=|LMQ(jJhukE=L;mt0uqo+?Kxp4%p7uBiZ(o82d+_$Z9 z#cGIL;eyke>g>BjhM0hXm_iTVK^{?>JcEIrES6zU)Cw12>0p6+rRrI*Q&n) zd8~Q;FV#lcfd@|Ef3#BYiEstfi|$5fh{%%{+9D(7=MOYcjU*2*Qz9P;*}}(bX+G^_ z(eswVQ^|NZbV3FIzzO-@zUqjd9Plwr+HrsWLoDGPW=L+F0X9OR3u)ex+pu;J8qdWw zE?ee)^tk70nIZ>!!Ha~@fT=R^#o9u{er-U|`lJ&HLM3H-w%9Yqx+fTM`gI)M;RN0T zbx$btA$<^!eiu4mLiHE_R%)(~Lcuj>l3IE!5O975K^0AAk4aj32Bj=>P=QT#8}rkp zKn|O8I9m6b;=j<63>yAnAWNNBqWsuk_>d$)IIQdUfd;CPJWrqz4QhJw?;?$b8;zc8 zLFgbXB(rOTJ?ry@io>-=DiiA0vnz<+1~_*IS0qOza^r7XYHi?kv}t>Gdt+x?f2Crn zCuBVB`fRauhLcbV;SNmOT~xl|`!`WNZCZyb^Jjv0@(>lA;%GZ_P3rQ9ID2xno_ShF z*j5P>HehljhhR~A7Ey7FCed8kh+8-95Y_+zKZrO9FKeneHfWBnb_`$L&JQE=?VLD2 zjpH7^GP=Osysw(fJx6vWLmB3YthYp(N29>H)*0Ui+W*x^sU9{}pQzmg5Q@Th#?VNl+6m`F5|B@0q10qHkek!3dwbI24ON~+DE$5dQWIfld z+~0HFq11yNV9rhgck4=oP{d7VJAp#kJP_dg8}bK=1jL)>XbjtZt6mWg!(xI4PTi7! zlgIg`gqX)WO)WoS*KM{`epe{{Rzn~KU4pQI`U4GABgG44)ZUGp(zbqKIq?G`zf#D} zwxc4+l(c8bsh)?vlD<%t;jY+fF>hFErf>LK?M$Zb?|@2;mgJSIrY#< z+-OT8g7)_0WuZ?lpkdzvC85-ISi;cF(bhbWRs3293^50zMqR(xLS-Sa(ek}Z70 zEr>gx2b{sGi4kCC9d2Rn%e!fSM9qKUv7r}r3SoI_31vyAU{C-S=ZoLxaB(8j0tj>f z_AC8u<<(BFD_V55S-36BkaDjywLm}vb2PgMz4~4=N0iYh4v+CSFvVS8E6ScqH=@p1 z}-?E&E`YSF;&*Ol)@Y5kSjdu zc3EY)NPl$4kXFXO3hFNqmF+J90n<6hYW}lvYQBt7TmNJ*S+tB|g_YN2zl>k7iaRUl zSijTso%Q99w9Q}* zQe&|4s7|fp&OiIgilrddmbK~cu#5MupxH>OimquEn4(j`MfAoPc_>cx=VUrpmsY+%c+LaQQm20 z?lEZ22>)bw@&3jtN>9&Za!Vq9pn+Es@&>^rq#alsCWc>gM zPcl%h)pXYnGH?awTvv-ZS#=h@%Jf%iSiAA$Fl%lYgZ z;Y>Mesa@SkOkdw`$$*f*SQL{6GWMNH~?rCxkoYjL{&UPi9)?!Re$FUmRgO;ZM5-7s|AO ztbQVzZ?YI6gui(geU8*0L9>x2A$BMRGDN$p*3UJHRP8mf3YG*IoACRIJ@T#<8k_LB z1e?iq(9JgAM<9@f@Q?XWBfyj+h<}8VvpY(l)9*v~ao*jdwSM;qNr&C3 zu}lfACE|-phr_{N>-=hR`DpU0@^N?!J^;k=4i?Gwh`vR3bEegs3`j{6S#OJyIlitDK%N|c9#&S zMV9N(^F9LJT_E6*>ff0?qa?xoKunmM1ZDY*P3;}-BAaw8Cf=;`{b=_)&0?MH(HFt4 zN`vaU`^S2^`Qj>7_6HiMM#9nvBcTCXF~h6P=n98tbBgKTCwr zZ}i5&@jW7!UkGzC?=^iM^*8GhQ#J|+R>e^bLL4>2U*^?ryTe!W4hZ|^jlWQ}BPWYV=u zm$LDjpsG@nuYch`&_FfPEcGSl_ow_z6;Z!k=U`e8M^xEVtB0a0O{6aD1f7eiO z05cuc4QUH$y9mwTpxt=_4evBYlLVE;mW!}AWaNR#y;Rf+;%FtKHq!}+v2J=7xV3$e2$?YU%07v9p~RBAEdb|)h}Cz6Uk3Dg1fA&9>kU_ z1PXV~U)(a2<1fn(mHnutqdH+e49gt68msp`4$(|G0wa>n^?ml+P+A^td z+MCv-DFrp;{I8nW+@{>OlzXqL0gm{B_J)#xAtyGa=$!q4b+6@jnrBOEwc~-}bSFbb zK<2Sn)^-iy=m#38M!x#P>ZC@IENex4VjyuP_QCM|uJ%(g^k!7Re39tR;u;WD<_Q$- zHxOYp1qUliHX%1|afhvyio*@_kq;DAgEfs%SZw+c)uo>sE&$h}Fq}u!+xwG+0tbZ; ze8rln#`*6na^N>AV>#WWvWN(A^qE!qaOmKH4vB!pEOpm}gJ=6V zDtLtzprID38J8#cKmu zE^}t$=V8v&|7h(0w^LL0DW6-@0)yd!>8Eh&4YYBuQ|)Cs>8;L=enU{Ua{l>JA9fdR zab9&yc=s`@p)zPNF1F=t9bc?P-GCJ_0+m9OS(PrpE6($36>LZ5_KU}!YX$X+2d&|*$C~^FIR(AIVs)O{ckqn2j8|gNJ<%iem)vnBB3$ZLFg}{lj!ySR|T_Vj>9_=k7Kv|nD`vnR~x)7mrAxM zP-m+O*BHy7Ib{6?nRo{7wx3~T0~rxMF`JP5moKR z?kzw-!pr@2!D-T7(miM49G$qP>tk2yj(XUrF$RXuw&?}OJ>lx#Exa~l?yZ{y6EBjBI%Fjgm$>t$s4J0 zou=DNaQ{YbT)weh)hT14jqloPr(GDWzoLiSDw_UZZO6Wz)=E4cOB9~|i@e~v;(@%H{!Veu$@aiCGi}A9w3-N<<&rC` z1<&noq7`!EO0O1<4rO+TW_BnH!#||uLAQgQS5{%^Qi5Wr5!6tPYJl(6L4O>cOH++1 z;gk!~a7=FaKV?W*ZHp->NyZbE5rNT{<89uvd2)XC<)U(RRxqt0>0gD96Ul;b!Y-4C z1_IXE0>*;kUD?!vMf6^}4E6k7F>I=9DkP;6xL9Ld=D6Q!fkw6^*vcFw`^=J{XFv#e z1vBW5548WAjjR}stQeMGm*XjHZ(X3>3Ax~y^m5$Gz{=J@{!R<_cVY0U$f)9sT1pKPugokfDUAe+MOAb(>kZn}p$^SO&{?&>S|ZmDu*8RylMg zfQV|#eij+W)aVqWn=P;-`uBOfuWw@Q*|51&H; zjkq+e{Gk#W19J#Au$EQfeSKg|kRhJHo@n>dCMNt&`$j)j#HCim$BUG#__w;yfHCb1 z?E?)|BTfAQZOHyyNs{m~BkP|51!$+L8by zLZC-cbBcfm=~wSD`EQMPfp8c8Be*~Fh@eqT5x*+xSCbb?Z6t4?_^M9v;cY?gKNeiW z6E*w{$88n;xt--Kd+R;bB278a0%ro9GYds2iN&wkzX^^RaVol*zfu%)=P~CB+Ns%I zz{_&No^}&aXfYu$rh~@_wj-%6tI4r$V;hZ|D0-(wsL>)mA{sl+A=s$G%wuX85A@xC zpn+1lF{R z;5m*QSK^-(WIiHqG)r;9eAw}$<&K40&OH3rcUm-xIkl0rLAcI7)YqBk*R8OWN}+jJ`Yiuc%{f%%TQ_@Yl^no2mMvmThPIH(dMT%^@x^wnzH4<79t7PX zD>|tm5+}>Bl|I>g9qHHz|DFgvtZ#I6OI)RGKR$H*1Ip}QcUaLtW&N1qC|^oD7M@?D z{P>Jw`s|@9c4OvD2O@_jd(U`9$0N4plTL{})bw*?8Q)Hos%BL|2j`5nC=gIu({1{C z{5_smV?}Fs+7Tu}Vh^j4@zZJ-S=0htF{$r6EuIWc=$3pOAFtcdt$P#wc$ipC_yY}8 zBelO;p%2k5oH{icm!E006tHM0adk-qh*qAs{v`E@p5~PpgaArkQfbg%O^foUq(LLr z``rwY&*|V$c5Bs2_wVRv9OR&<=bTLUzWcZlsIYqc}g08^D-q@oB#EKWC~u> za*6``GS^)`lATMQbt(&4vqIsnwI}hj?JmqcqN|&F!HDuoCwL8b%09#Z1k9;lDb{T1 zr=!}iQy<5;%5a*8BL2+JX~CZCZeFRPk@=72Xe+@}Fl|F-WRf8XBg<@VVkP}Q8V9IG zzLEDbo&JQU(r5nJY?E{RmBE(vNj|$fp)Y>B){r_b9)8WhDKt`ZyjO-G4r6o`P zYd;dQ#};!O3D@q?Z;5CfBpJ6(cEz`xoR24J`fR+M{Z!sMF$2-Q0X=X)02v0#HJ#rr z_m(ZU$?R2m#=Mgp|L1;Pb$PQ9xRnTv%KvCXBv_6_3Jcj-FR{FxDEIpFh+rRRpc;u1 zbC`8*sz$HJlXsi+wbscnWoPi4gaGtMu7X6-pF*=qre7jAu3fu+ zGY4&PeQcv`4#X1wf7i_;@I4Osr_il0;z&OGlny2S_$_ynJM!rw_A=lZg zX?xMWT`ORz$>h_ROPU_3Orm&~&>%hm?PZMChy||L2Wg8-cSfxTcE*sK~>%O zSK8`9A>nDR8r;*#SNq3(y_L#f8Q~c4m3aswqOVV94LlmH8kC!#YL*P~P$4;{8Y-fL zhyK36yMZxnQ4oA}lLuq3adxZjr>gbx3`6}&HMwW4bWgDrgJ|;^bdbpeQx<+)Qt4Py zeNt_9PV`{0L~4>VAXr1xL# z)Q}Q7OgVA9O7?gQzpBI6N)pgndp|u`?>r1gG1R5CY2Cr~_CRXvj@q{d;_c(pm@J`!y^=b= z@Yn|}{4`4>8$cXpb9lj6BkB>{?TZM-O~VkcwuvbkUnZB&$$ta_U_UF(L)ycC=dT7+ zkj(u$&2SB&)Gc_Obw<7)L+YtU`c6wb@zjLzegLltICwQ5?TMF+wr~AF1Jy|7CxlJm z2AxIoHkAc6;aBdB9QpRInGWXg%b6{0LJ^&WZK;b!PCtOhhu8-Pzqt!Rn^0o6G_#tMW4tGfoBXGF0aSH$#ETkisTU4Zi9@W>|c)O@264UtZ#x zCGgS(Ucgl(E3%;>hqbRDJ|(d0!@6tkSCfx>{+ZQsv}Po0KUmv>z`L0XhSp!m1#y;lTg7 zgV|fS_cA~rJrsU?{0V9GLa8547m~^-kAYi7RWLcCW6Dg}Pb@b>< z{_45|WrD#{Q8>LX6k`SGtk^p(V}8Mkz$r=C_q%RJ>&+rJA5@yk2O6kGO8x&0bFK^1 z1TmW?(2cIDzBKR6v_dVm{qmS8^~b^SH7ag(+Y6rX_LH*re4W++(J~CK1<%fUov7|; zaJr`aoUI)g2HXg%YXp=#vMgr92TT@vI9$Q@=1DZj&-=!7cW~>CJyKe80a*^FtKh$y zdcUdm7}RoFnw7UKg-(^it-#JU)qQ595=e0&eg6GVwh##TtX)^sX!09+C{*E19(Z<# zmbH=B>BQ@W^+mZR_=J+-otEQX=Nq2MPdPJkN1MK06I~{j`Qf3415_hF{f^T!jmkyW za%oHhgz?o{wqEQOhePvhA-BV8ZWRL!_ zD7(hMpfJ9-W2#oba)<%LGUrF#`r!r>e?s?ta5I$|68s+oG9Oe=Q-=9?1nqXzGE01c zMF<*ODfpJQvIkP*kKQ23Rp%cBy{tcz{VR2j=D(c^ta^dY9mC=gJxmExe} z_7E}MW}$B}L$daEpCLYwB-vcEy!ADj#1w3itOS$(G5oG7#0pi+0d89)z0Y}Z+u8qs^Roez%2DxAzuzp;74TN159u!)|qtMHS_zdWHp3ss-xCW#g zLYQNn8Gc6(U}A$wcY7Nj*Rg$MT zkW>1;%wSC`*&w+g5a49|A;%2AA_g05g2M?IdcM)nA?V~nnbX%`c3k6Ppc12RjeREl zN)b)UXpw+!Zd*(!_r`-8>da4bKRtMe`|mjNjNt_|tm`iGzQK<};kCWd*V~On#lxd^ z)g$6HHH@jz3#>PD`j1u&<$3H~#vlj%L!C1h3CAuubNa&t$qA~FFrERWW(kI0Z+Kc} z-$le%c7FJDu)39%TwQC#r{t!f?MBsL>rD}Zv*!e0CSo#*fCl4{L4pZ1GCSG=pcBNx z*PS=$M#L0a_5j;Fy{kcSs9(Wwhjk5|lRMmwJ(X6*$)9jxe|AMcl0fYe)CG=!A;p%+>-_Z0v>+p8=3Gh+g#x*?vX&bTko=Q zcM8SYdD_SSi`S2`d0(}k2*0{g<~k8Z0zWah2R#(|(#C(d`E!D5Bt}HV%@3=#0M#k{ zstp^uYgtwb&t=|xLsOuAm&w|$$@{sR3Kxl80Ft&+L*cL49q35RCYH+_l0Al3duom8 zkE`aY=*z%dwb(1fAAMb(I7K2er%5vZY#a$-m!P3K{ytQcZQc+`kc?79Fo0pawncgB2i{tBysB3Z-2)A#=paDXjP3|k-i`v)TmoT}# z*B0jMAtM8hsjYGr#RiFD*D^fy)7QgVT?l*KA2l?h`;snLNx|0_570`ePkqS zdeoz)4Kgq15bLC#a`Z$GjfR_~X-7T1K^VzJBWn^$Y)}(-|^F{HF2J zo7r~%^Q2}y?5kob^Z-guVsopQe}`1gB7X!}1x$eey9}u4DhcExSA#jUqT!l7bKR%Y zae8bG$cOl9l`YHO|7bTn>B{~K^{>~$3oHW2J+|qQv>#}o8cFd^0~4vrq`l!^Zmot5 z@PNv(kPo7$V_M-#`o!d%U}tbu3;9;5-=w!nO}RWO*a~{P3$(EV{~KRF+e+7sv}qz+ z;b+TQys7lBwl74dY7g$!Q|+fe^*C9*qdTHzM^%m{Tl{I;B7|&Hs00QvvJ9FRA%aAs32I{Ff4>zfmlF61a z3Sd=@t)I{nz((I`l{2jaP^abrb(iXSW%~=lkroA#A84Q&`2}XanUBARWZvzk@>zS4 z{LEUnJwb$M-%WWaARj$PwZX1Qp($?(x{<|=G0rxy6LcGa)5liDgv5fzcsU6PX2}&+ zla)gvZd(PTkGW>IZ!qVX2=U zhY0@`>&%((w{x${5!{X@gLBLuBVqy%-||4GeK4__rY+}#e>8AR8~jtn*+!VzZ!D0h zV)s7tVDYi}DfCXOUKMvvz|=vxncf&mT$VkBR^Q?LKm*lC=mX7)96v@*gF;H@eVS@e zJ@OLzwQ7SGZMySUje(4+QGqscD$fgq2RY2Y)s$fMItbG!CiKZB49}Q z8JwWSHuP^74lv(rf6ftMdM&hGE{R{tH{gN>2$~TXUS3##<3;#cG)E>M`LnR_`M?jm zD1%bkj(V#9+sm%uNaA6{6h6G)$8#UmoyAuu#r$y8lM_@UxqP{Q7P^u=tq8MISC4S* zy32v0BI;wEtVX?FQ}lnMdJK*&#z?3sFk0#C>j?s&L7z2**!8ryWAqifBmrr=mTC`h zuS>%nf+g$zK0{M9{%2s1tMUZ{$FH`zFns=1wsLTsm+LFf>xieFL?#=5F2;uwbph3? zFI7ASWmy;U>Lhn+I_0vBiD_EPuP0Y=;+GnXK{ihGp;^=bYph|RPmLe{!He6C;XayK zX}my53##96xzZ<@X7({1^yaVd&TMzK-$66~mVFVjeRjKU`C++98hbc1*S~%Fn1Tq%=noBnBGi#2-+6PqLLXrO#J3S) zM-S)u2N&Y1ui6dL&^x2@y_!pVfUixcUZ4%%szu=y0v9Mzy2N`nGK%!y;Y5^$lY1x| zCpJ2{cM@D;|IvPX(KDXF%Hr_(CypSX$H*PlOMIY#Y9#0;2*u5^HHV<|scAkzmJ~Q# zOPoUEQlFj#$N2!6GQ!^B;M|*l&#t}Ux5rJN02;9t`(?wz$=GAw8WqeY!>f0^JXr@x zs26>Xu* z7bO8ven5cm64_#RI_KM$QlFw&7O!YyDec;M(94wAc|qJjEo*Z|Zaj)uZ5g|~VU$K1 zIxtwcx{;n_Gi;&pNOj`K0t!z&+s`j`cmmkLaZiA~@5oh1HHOh>juEo{hxkYijc{M2 z{PH=wb*WXtI?indN=HI9PsuXwIYH(Bn58awS}5{TM{g-qg6Bz<#b;g2X&D|}h?7B@Gp0hTr;0m2iq}6_`bxPjW_Bo(O(X1N^a^Ym9$2>(l$-t*Q<<4l7Iib~u zM@CQA25F{n@-(ZL$t4GJcmJMEKBN3+L^uHBUV&IwZB}$_`MrYplWV_<_f#Il(`}XG zk?*3#lCf}<$}Dl^xaOA;TDYMLAYkVK&bf#jaUWAv)XC%Sn4dg(De9n!rGaDScak%; zqR%_6vvkvB(H5#GVl{y-_@vY~iYej4J2hOO8j04!_}f%fUE_~6K&2SqKPg$?isFdz zIR~Gbz*P|D7!yh{e}6>d;zl!IoZ%Z?uPnK7Fon08Gj0NGb+p}IJ8oNOcScmo3E0>m~6`v{VOjP~`Qj&qv z>z&soV`dCh7goan?GT?>=sFgB@W&s(nB4^%m{kz z{a_H&bouwnbNQB0hhNtIp{|o>8Uv4iCb@AP;{rkoF2tG7ONcb1juY>_oxIm81213D zA=f;Yq&i2~OcK+ChR(dBIdj)u48>n~-U3uX+BCK?O>A_x8Ah@Q-4$ABviK0rMXbO- zFt(j%-p!C)HnKr`i)Vbl7KyuofOtCpIj3kTy8x8bVEr1$l(AuJHB4x?nSSl;Sb^TV ztanWjo@46XAIG*FEM-8&(XVT;L*-jReib9SPqo4LCY+d+Kx zLkl}>lkkchu%;a|avqK!#Qt%+NHh)l%8u%mjmX^nS&^}`zG7~fTSG!tb`n!9Egeh} z%rr1)2ruRpv~!v(65_=kWQ4629$xrw-Doq>9LIc-)KD1p=B z&l#0?MEQNuw=oj@l4s#0kTXp?2ZEfS?TpHWQ;Vq6N7VJe8lq-imYPo ziy=f1GX!OKQ^E%e*WRJ=BRX-3t&~Wsjf)ZX-X6Zv3e4rwi-ZIhgs^Y|1JeY_B6ji* zhHPit8NX0W$ScBwpdJuoQU`#5RKw7h=d}fCuge`SXl2K$Uz`1uN=b78a<}5C_CQpD z_f;D$5(4eyj^kSIg}Uy!&#x;Xs~LP)HBgO|D)4u0Bx<0HpC97MWdC7eD>?fv$L%0r zNP$Ca;c0SfRAZV}%P?3xR^1~<^fY5jZd~}~0%yj3Rw|az2|5*7qgBNC5vJ3 zey5ESBh>2Lp2z#M^H_|8>A&m+bG?3`foi1eOcC1$zq{Z%RezB4?*sj1CJ$b>^UmNt zs@B;riLm(~9v#CtdD5YKTP9!l+QwwnV!GjzpAYEV2&JLUkCkb`sT#9x>mef7(FdzSQ+;r;R9@9|*aw&B z|7h9;y!$ioz^>(xy>_J-vxaQL58L$Ipc)A$Ep1X8V~Uuu-1@BFL9lG0z(z{8lyynQ zVr7f)R|VshU~V&j9?r-Ui_vWvnZEr0spGuEss8^rZg0o2N5-)^_9j~zM3OC=?7dgY zN@R0+pRfDAQQPA%w{fvY z9#hH&I5Qsm5iX39dW25>eO`URlC>C59dQ}YRycn;THiwQlB-EyLw}9I`60AyWB~fp zaPeAPd0=L3XnZ=l>FAVi4=2AC^{oEH!^q$C$+r{>N$pzc>8QEjFq7_r+uevM=Vi~wk%cGCzIk+WYWT0TVaZHn7N3I|dV(m<10x=;msn>1JvB+xH1aCH z2D^;sQSN@=mP(POl$00-6*o@Yx8jbp)r?eUzsu-;gCbFBW3=yIeWgQ8FBVX@qR-A6 z*0PZc8hma~4tuK(PX_N&oIKvWe2=U2%o@$aTpDAzjIiuBh2-nV5b*^*LySc&2Tdxk zvBB}j13mL1t!f7|6Edf6gfeyy2cySQZEgn+OEW&qx_){*J+X!_cnOCMIm~%HJ;eyT zwKHmn%hTBGV0JMG!Pn1%wdfLx$P~)`k@g9BN>+VbfhiLjWfYiw{63d4aN~+J)HG7# zx13k7^9g#>mfw{d>!Iu7S%pql28GnMY#*EZw8D0E=M33+xHG%fU#*s6KfM}8+0P2d zV%-Q6N)wLVEm+A2B{P|PJd83K6ryiAJi!X9f+5EyA@kxBS&Hb2(*^j|!q^E)U+?eV z42J9tVJ?2jH7r)57$;h+A0U0pEfR1fd!c5dN!{kGY6CN;-C^Y6LHR~nS8X^upXnRN z7_{-9t;VR)C1fK5A~@W1OH-B!dIkRmD#wL#*WHhLT!lYwvHPblsB7$tWL=SlnnptE zeD@o|cGW1^;w$@VL`E<@4fQ+3t003i-=Qp%*;uVC53HNkstQUTzbSGnwEu|O9;;)Y z$jL~kg_dS+!;Neff4yNz6{ig2@UuOgK1`^@lfJL`1~%kRlRbUjifzbYgmq-{w0Iur z9B@xWzxKAL{Oxh+$z7tZ31+F}E+?mZbmlFI*?RPJBsskceb~kNsJrZ@-T}jbSM1?8Gc0?Y8t7s zIVQbv?bHg%j+QP)fEmirn`s23I~WRIy@4L#wGM1pPs2u2FCP-I(fw{uQ6$q$irb7>NUyV!ff+` zRFt)EoOfCYGguIJ`(H#EQXm7D++213NSjf)Y$#LFu_vzm)!Hxj+EmB=!t07O)HIS( zLT~ws?MNrPdaK2lIh^!@q;=hvH2zbvxKB^L41~JOI2%9g&duTNCe^xTLp@YKOh_TI zJ)7`3Wv`v<%i^qYRl#|_5}sRxb0jMqNfyh~Mt*r?B^h~_1=Zh3Ql&)tT_`X%jYZEM zIl&HXm0r9RlcFW3?S(l0+&40MznCaCN;%>TLHnH3YBfh8Qf5DBTL_2k2V_VueBHC` zJ9{u~xa37eE}@b$WzdYtdmT4w;L*_EfBy4Axbop##>4Ftz9GWfD!%tui#@JLLro*k z?_&1I$KY!|st>FY5*mPN4faY%X{YDzG7k{)BpSR7H=eK$-s)VK6U_Yn1zkRt#jGwO zb6VNzE*pC!jM|2egJBTBj61Ybtl6>~_zMt$ZoY;j0ddK@y`AGr!opwPeHh(6LcL|D z4O?;(grB&O_zF8hYSgZOTBu&_vGpk?W z%l^HxyZU*qlo>q;_ENpqoA-IVq0r4IPG~=g7k98X?2Qi>S0t_5x7JYG{7oNio~ zJc|5p)I&;ln^pap1&y|oa`=RD^k4TA{gJlNta*PDwrC!jIx9+mb;8pB%j)_SX{c!= zEP+pe#t5e0k~M}t*6k7g?G&yC}bZ2Yt@q0)s-a`U|<$EnIoFGo2LPNpy^+Z(qu zrA?<+oIO*S|B@-PO1v4K%WKL~G#NHVaCBg(|b_!DlWzp`JwRme@9IA~7+C2uUL zAUnO}7;&_BaEEhQ8o5tof4X0mK=g*e8}c+#pxeuz#vpKX=n2(a=_slwfW1Qe#S+y( zg{yVpP&Ryem4#S1?(7+X6Q^-R+?|l#zo)j^Gx^1*QtzY-v4t@DHjwjdWa-_1rAeWt zk-~L#>k?P26yjP*5s6~0&}JWx&r!p3Uo;k9ceW+)#JbHKd;S^5TM?& zFC$hLx6XGVI_{eq7G0rl>u#Bp=MY_Y*b)AnD52i;Eo%pj^zj^LYvTtK#Tnu#T=a8h zqY5Oe+<-N)KjFt&q?MTSY$Z5Aoj?>ha>6@Uxp2HJ3zRtVv=q zO9?JvCw9BLx6a^pAoaGcEo&L0eD1ml?{fvu-bQa}Bj&a1Mb+#y?F{>%Bi7^-D*>eteVcAwN zv7o+B5h5izRN_dTA8TlR-#2>yr%Z@rsv7>^2*YNC9z}*}fMa=|j>lyW*3{s+8}f=Y z)HG5`_^U`Q%&GZy|GHlz$J@5GqJnrH4eOx|oQIwfTX`53t82(z-zlmQTnvW8vwl=A zyV~GuY+eQ_?J9a~&MP04AO;y#{n3u?4tPP$-a5AXpzx(&)6Ux=t{Sm3ol?q*4jkmm z!zU+2QTRfWdTH&wse0mI!B^**MVSTa<;9z`BeTONdqEXE+=-qUlHA zdb#*;nz(qmLX1Fds3PdKFzWD_oM7)F#} zXh6_5Z50nrXU7km2k~4c3i%NPHr`ejD;M4~Eq|o#o9{R>Iv<-JPsec&8oCiZ;8;Al za%%rfBQ4|3hg`Z&A@(i~5o)W7<)RXz5p^FrCStq~zTA{kj1()X$M)@#&Q$N$-V~ve zMSW#YW9cSKYm)l-dnPgICKRr=eb7+Xf$whaKr!N_#qArb`S}d&Z$u=?^>|}Pt>2^k zT+8C?MC+*(j1i$#&%El!{!`4{e z==Vzgv{*O&V9>5X#TbU{9kRJYv5P}FPS=ux`$yWrho^45F0m?&ry6O2zcfTyi!86K zeWXy+NTLDB$A-S!g~{&Zwmk1rKaWOmIcH>z>jo(k0N51EU5Tj); zy+&QrmxNH9dpftNXZ8oBC>E9bIgj!qqZ2t1V$p}q*;wfZ$*w&YBWsyG_uG8xSZTNC!ZHp+Nz7Ux>B&$m<;h{Hx8B~;!yt~M;_WW z7b8AFWh@AS)f#IKa)jp33!Bv@Z7GfC=+2d-|42LJw^MmE$?Lw-c_BS5rK8aFjw9-d zG}JVL@D-~Y=Q1R}Ew~WCEl!*28_07KniUrt1`H1>D=9QL zVm0J0xVNdVg-BI;oJZ7r%PA>-hu*pNp=t^rnd%Y4y;Q099UWhbJ1{egb_Nd9CqoY+ zW+x|owAG<|m^m~=YKn-Oyuh_SeyV*!7Ru-NS6a=4uExELGsAw&Fve!iqUub){$FgR3y7y?irbcNpe};Yh0+^ z$bK0f<5IXShg0V(PHh6?`$#!$4Ud_;0ki4Aiwql@P=4GRcVeZbZ#3N(d^hpaaXl*; z9mu2=^R|L&bChM-qO?pm_mYqp{;_e&woaY0HUWO@^eh-0UQ&jlk4a>k;IJ~$HV^tQ zrBHWgOSNr5ZjDenCW0g*{0u1`{nP%bd zSFUP9%_DhV>u=^!hIrGgMKjhrT^xF<1(Uw%t@@^ADA&p0`<+A3%H=ylfmzz~?&-)^ zQqQh1JjhIe501Lf!R?Dzy6<0e#F1!WI_}$$+Od5!qEIqk34qfp2;}moM$}t zu%d95hv{YDXzBFoCM-F8?G@BmjS)Wkjo6d?wOwL|y*XeXNXph_XkSxhZ96Etjq0wr zr;Y7Wx*S?+WneK(=(?^y@_h}btNfdn^@D*Y-X6l7f3%&<>q!o2J%+zH^5p8}Sb3Rd z5EpV)8)_hVjyK4thUhD&DObI@qV>E|AuUbr@;y#E*P5dq%}Z5j1?V>8(Or$oI~1qW z+QMwMsB^at6a; z_=*F(=NXJdU9H5eGx`(nW)2B{^jTO?rDZ(-9i?eZKo*8hKm+athEn|V1k|hh?-M{T z?cXP$Ue|x00D4iO6i{Wla25h=PZx(r&SYoI1YF<`M23g;Z*w!v)2ExE)eBVrkf0Cv zKS;m_29qEG|APbw0+YbvRG=bY5h;)a7JGt8P*En31Qt&MNnnv9kOcN30!d)aAdm#M z_5n#?2_BFHcFF-sU_~2{1U8-lNnn8(kOcN|0ZCxJ6_~UEv_*kQQ27#&1a=vLNl-Nq zkOVgE07+nR4Uh!(zyL{LtqPC?wv+%#VCe^t1a@eENl+yPkOVe707+nB0+;x^3CddmNnm^lNCMM9KoS_P0h6Gt3XlXwKY%1KNdY8*!2}=)^!Wox zplu#V0$u1p5@_lMl0c6%kOW$e!6eAR3nYPtSRe`XmjX$kT@y$G-H$*LXeI=cAa5R! z1X|sIB+xkqB!NaOFbVQC0ZE`u2uK25H((NEVgZssPY9R6m^3A)DtlE7;bAPKx>0FuC^J&*+M zz=0%ir3@xP8(JU(Eboe0@rb1613a`lE78b|0BuB3I6YA(5FvZBO6Z&$o>Pw Ci{ ({ + name: `follower_${i}`, + script: './bin/run', + args: 'start --api-http --api-ws', + interpreter: 'node', + env: { + LISK_LOG_LEVEL: 'debug', + LISK_NETWORK: 'alphanet', + LISK_PORT: 7667 + i + 1, + LISK_API_WS_PORT: 7887 + i + 1, + LISK_SEED_PEERS: `127.0.0.1:7667`, + LISK_DATA_PATH: path.join(os.homedir(), '.lisk', 'ex-pos-mainchain', `follower_${i}`), + }, +})); + +module.exports = { + apps: [ + { + name: 'seed', + script: './bin/run', + args: 'start --api-http --api-ws', + interpreter: 'node', + env: { + LISK_LOG_LEVEL: 'debug', + LISK_NETWORK: 'default', + LISK_DATA_PATH: path.join(os.homedir(), '.lisk', 'ex-pos-mainchain', 'seed'), + }, + }, + ...followers, + ], +}; diff --git a/examples/poa-sidechain/jest.config.js b/examples/poa-sidechain/jest.config.js new file mode 100644 index 00000000000..766e9586f0a --- /dev/null +++ b/examples/poa-sidechain/jest.config.js @@ -0,0 +1,41 @@ +module.exports = { + testMatch: ['/test/**/?(*.)+(spec|test).+(ts|tsx|js)'], + setupFilesAfterEnv: ['/test/_setup.js'], + transform: { + '^.+\\.(ts|tsx)$': [ + 'ts-jest', + { + tsconfig: '/tsconfig.json', + }, + ], + }, + verbose: false, + collectCoverage: false, + coverageReporters: ['json'], + coverageDirectory: '.coverage', + /** + * restoreMocks [boolean] + * + * Default: false + * + * Automatically restore mock state between every test. + * Equivalent to calling jest.restoreAllMocks() between each test. + * This will lead to any mocks having their fake implementations removed + * and restores their initial implementation. + */ + restoreMocks: true, + resetMocks: true, + + /** + * resetModules [boolean] + * + * Default: false + * + * By default, each test file gets its own independent module registry. + * Enabling resetModules goes a step further and resets the module registry before running each individual test. + * This is useful to isolate modules for every test so that local module state doesn't conflict between tests. + * This can be done programmatically using jest.resetModules(). + */ + resetModules: true, + testEnvironment: 'node', +}; diff --git a/examples/poa-sidechain/package.json b/examples/poa-sidechain/package.json new file mode 100755 index 00000000000..edb14eee5c8 --- /dev/null +++ b/examples/poa-sidechain/package.json @@ -0,0 +1,158 @@ +{ + "name": "poa-sidechain", + "private": true, + "version": "0.1.0", + "description": "Lisk-SDK Application", + "author": "Lisk Foundation <admin@lisk.com>, lightcurve GmbH <admin@lightcurve.io>", + "license": "Apache-2.0", + "keywords": [ + "blockchain", + "lisk", + "nodejs", + "javascript", + "typescript" + ], + "homepage": "", + "repository": {}, + "engines": { + "node": ">=18.12.0 <=18", + "npm": ">=8.1.0" + }, + "main": "dist/index.js", + "scripts": { + "lint": "eslint --ext .ts .", + "lint:fix": "eslint --fix --ext .js,.ts .", + "format": "prettier --write '**/*'", + "prepack": "oclif manifest && oclif readme --multi --dir=docs/commands && npm shrinkwrap && npm prune --production && npm shrinkwrap", + "prebuild": "if test -d dist; then rm -r dist; fi; rm -f tsconfig.tsbuildinfo; rm -f npm-shrinkwrap.json", + "start": "echo Run \"./bin/run start\" to start the app", + "build": "tsc", + "test": "jest --passWithNoTests", + "test:coverage": "jest --passWithNoTests --coverage=true --coverage-reporters=text", + "test:ci": "jest --passWithNoTests --coverage=true --coverage-reporters=json", + "version": "oclif readme --multi --dir=docs/commands && git add README.md docs", + "prepublishOnly": "npm ci && npm run lint && npm run build", + "app": "./bin/run start --api-http --api-ipc", + "init-genesis": "./bin/run genesis-block:create --output config/default --assets-file ./config/default/genesis_assets.json", + "updateAuthority": "./node_modules/.bin/ts-node scripts/updateAuthority.ts" + }, + "bin": { + "poa-sidechain": "./bin/run" + }, + "lisk": { + "addressPrefix": "lsk" + }, + "oclif": { + "bin": "poa-sidechain", + "commands": "./dist/commands", + "plugins": [ + "@oclif/plugin-help", + "@oclif/plugin-autocomplete", + "@oclif/plugin-version" + ], + "additionalHelpFlags": [ + "-h" + ], + "additionalVersionFlags": [ + "-v" + ], + "topicSeparator": " ", + "topics": { + "account": { + "description": "Commands relating to poa-sidechain accounts." + }, + "block": { + "description": "Commands relating to poa-sidechain blocks." + }, + "blockchain": { + "description": "Commands relating to poa-sidechain blockchain data." + }, + "console": { + "description": "poa-sidechain interactive REPL session to run commands." + }, + "config": { + "description": "Commands relating to poa-sidechain node configuration." + }, + "endpoint": { + "description": "Commands relating to endpoints." + }, + "keys": { + "description": "Commands relating to account, generator and bls keys." + }, + "generator": { + "description": "Commands relating to poa-sidechain forging and generator-info data." + }, + "hash-onion": { + "description": "Create hash onions to be used by the forger." + }, + "system": { + "description": "Commands relating to poa-sidechain node." + }, + "passphrase": { + "description": "Commands relating to poa-sidechain passphrases." + }, + "sdk": { + "description": "Commands relating to Lisk SDK development." + }, + "transaction": { + "description": "Commands relating to poa-sidechain transactions." + } + } + }, + "files": [ + "/bin", + "/npm-shrinkwrap.json", + "/oclif.manifest.json", + "/dist", + "/config", + "/docs" + ], + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "dependencies": { + "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-alpha.7", + "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-alpha.7", + "@liskhq/lisk-framework-forger-plugin": "^0.3.0-alpha.7", + "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-alpha.7", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-alpha.7", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-beta.0", + "@oclif/core": "1.20.4", + "@oclif/plugin-autocomplete": "1.3.6", + "@oclif/plugin-help": "5.1.19", + "@oclif/plugin-version": "1.1.3", + "axios": "1.2.0", + "fs-extra": "11.1.0", + "inquirer": "8.2.5", + "lisk-commander": "^6.0.0-alpha.7", + "lisk-sdk": "^6.0.0-alpha.7", + "tar": "6.1.12", + "tslib": "2.4.1" + }, + "devDependencies": { + "@types/fs-extra": "9.0.13", + "@types/jest": "29.2.3", + "@types/jest-when": "3.5.2", + "@types/node": "18.15.3", + "@types/tar": "6.1.3", + "@typescript-eslint/eslint-plugin": "5.44.0", + "@typescript-eslint/parser": "5.44.0", + "eslint": "8.28.0", + "eslint-config-lisk-base": "2.0.1", + "eslint-plugin-import": "2.26.0", + "eslint-plugin-jest": "27.1.6", + "globby": "10.0.2", + "husky": "4.2.5", + "jest": "29.3.1", + "jest-extended": "3.2.0", + "jest-when": "3.5.2", + "lint-staged": "10.2.11", + "oclif": "3.2.31", + "prettier": "2.8.0", + "ts-jest": "29.0.3", + "ts-node": "10.9.1", + "typescript": "5.0.2" + } +} diff --git a/examples/poa-sidechain/scripts/extern_types.ts b/examples/poa-sidechain/scripts/extern_types.ts new file mode 100644 index 00000000000..079c85ca4c4 --- /dev/null +++ b/examples/poa-sidechain/scripts/extern_types.ts @@ -0,0 +1,42 @@ +import { GenesisConfig } from 'lisk-sdk'; +export interface ValidatorJSON { + address: string; + bftWeight: string; + generatorKey: string; + blsKey: string; +} + +export interface NodeInfo { + readonly version: string; + readonly networkVersion: string; + readonly chainID: string; + readonly lastBlockID: string; + readonly height: number; + readonly genesisHeight: number; + readonly finalizedHeight: number; + readonly syncing: boolean; + readonly unconfirmedTransactions: number; + readonly genesis: GenesisConfig; + readonly network: { + readonly port: number; + readonly hostIp?: string; + readonly seedPeers: { + readonly ip: string; + readonly port: number; + }[]; + readonly blacklistedIPs?: string[]; + readonly fixedPeers?: string[]; + readonly whitelistedPeers?: { + readonly ip: string; + readonly port: number; + }[]; + }; +} + +export interface BFTParametersJSON { + prevoteThreshold: string; + precommitThreshold: string; + certificateThreshold: string; + validators: ValidatorJSON[]; + validatorsHash: string; +} diff --git a/examples/poa-sidechain/scripts/schema.ts b/examples/poa-sidechain/scripts/schema.ts new file mode 100644 index 00000000000..05a672db1ec --- /dev/null +++ b/examples/poa-sidechain/scripts/schema.ts @@ -0,0 +1,39 @@ +import { NUM_BYTES_ADDRESS } from 'lisk-framework/dist-node/modules/poa/constants'; + +const validator = { + type: 'object', + required: ['address', 'weight'], + properties: { + address: { + dataType: 'bytes', + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + fieldNumber: 1, + }, + weight: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, +}; + +export const updateAuthorityWithoutSigSchema = { + $id: '/poa-sidechain/command/updateAuthority', + type: 'object', + required: ['newValidators', 'threshold', 'validatorsUpdateNonce'], + properties: { + newValidators: { + type: 'array', + fieldNumber: 1, + items: validator, + }, + threshold: { + dataType: 'uint64', + fieldNumber: 2, + }, + validatorsUpdateNonce: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; diff --git a/examples/poa-sidechain/scripts/updateAuthority.json b/examples/poa-sidechain/scripts/updateAuthority.json new file mode 100644 index 00000000000..ed0b1c351f7 --- /dev/null +++ b/examples/poa-sidechain/scripts/updateAuthority.json @@ -0,0 +1,28 @@ +{ + "newValidators": [ + { + "address": "lskzbqjmwmd32sx8ya56saa4gk7tkco953btm24t8", + "weight": 20 + }, + { + "address": "lskzot8pzdcvjhpjwrhq3dkkbf499ok7mhwkrvsq3", + "weight": 20 + }, + { + "address": "lskz89nmk8tuwt93yzqm6wu2jxjdaftr9d5detn8v", + "weight": 20 + }, + { + "address": "lskx2hume2sg9grrnj94cpqkjummtz2mpcgc8dhoe", + "weight": 20 + }, + { + "address": "lskxa4895zkxjspdvu3e5eujash7okvnkkpr8xsr5", + "weight": 15 + } + ], + "threshold": "68", + "validatorsUpdateNonce": 0, + "signature": "87a20b81bdcbc7a4f228bc00894d53d55fbb2c53960f0ddc0cfa0f77395a33858a9907079773ad50a220cbdb49bc1d171250df83dd70572c4691eb280ae99d4501b289676b6bb0ad0e859b525752015bf5113e49050a8c70853470f2dd7e9344", + "aggregationBits": "1d224ad4cf64a3db52b2509c5b63365db970f34c8e09babf4af8135d9234f91f" +} diff --git a/examples/poa-sidechain/scripts/updateAuthority.ts b/examples/poa-sidechain/scripts/updateAuthority.ts new file mode 100644 index 00000000000..c1b771aa43d --- /dev/null +++ b/examples/poa-sidechain/scripts/updateAuthority.ts @@ -0,0 +1,105 @@ +import { writeFileSync, readFileSync } from 'fs-extra'; +import { codec, cryptography, apiClient } from 'lisk-sdk'; +import { NodeInfo } from './extern_types'; +import { keys as validatorsKeys } from '../config/default/dev-validators.json'; +import { MESSAGE_TAG_POA } from 'lisk-framework/dist-node/modules/poa/constants'; +import { updateAuthorityWithoutSigSchema } from './schema'; + +(async () => { + const { bls } = cryptography; + + const client = await apiClient.createIPCClient('~/.lisk/poa-sidechain'); + const nodeInfo = await client.invoke('system_getNodeInfo'); + // Get active validators from mainchain + + const paramsJSON = JSON.parse(readFileSync('./scripts/updateAuthority.json', 'utf-8')); + const genesis = JSON.parse(readFileSync('./config/default/genesis_assets.json', 'utf-8')); + + const chainID = Buffer.from(nodeInfo.chainID, 'hex'); + + const params = { + newValidators: paramsJSON.newValidators.map(validator => ({ + address: cryptography.address.getAddressFromLisk32Address(validator.address), + weight: validator.weight, + })), + threshold: paramsJSON.threshold, + validatorsUpdateNonce: paramsJSON.validatorsUpdateNonce, + }; + + const message = codec.encode(updateAuthorityWithoutSigSchema, params); + + // console.log(message); + + const snapshotSubstore = genesis.assets.filter(module => module.module === 'poa')[0].data + .snapshotSubstore; + + const activeValidatorsWithPrivateKey: { blsPublicKey: Buffer; blsPrivateKey: Buffer }[] = []; + for (const validator of snapshotSubstore.activeValidators) { + const validatorInfo = validatorsKeys.find( + configValidator => configValidator.address === validator.address, + ); + if (validatorInfo) { + activeValidatorsWithPrivateKey.push({ + blsPublicKey: Buffer.from(validatorInfo.plain.blsKey, 'hex'), + blsPrivateKey: Buffer.from(validatorInfo.plain.blsPrivateKey, 'hex'), + }); + } + } + activeValidatorsWithPrivateKey.sort((a, b) => a.blsPublicKey.compare(b.blsPublicKey)); + + const keys: Buffer[] = []; + const weights: bigint[] = []; + const validatorSignatures: { publicKey: Buffer; signature: Buffer }[] = []; + // Sign with each active validator + for (const validator of activeValidatorsWithPrivateKey) { + keys.push(validator.blsPublicKey); + weights.push(BigInt(20)); + const signature = bls.signData(MESSAGE_TAG_POA, chainID, message, validator.blsPrivateKey); + validatorSignatures.push({ publicKey: validator.blsPublicKey, signature }); + } + + const publicKeysList = activeValidatorsWithPrivateKey.map(v => v.blsPublicKey); + console.log('Total active sidechain validators:', validatorSignatures.length); + + const { aggregationBits, signature } = bls.createAggSig(publicKeysList, validatorSignatures); + + const verifyResult = bls.verifyWeightedAggSig( + // validatorsInfos.map(validatorInfo => validatorInfo.key), + // aggregationBits, + // signature, + // MESSAGE_TAG_POA, + // context.chainID, + // message, + // validatorsInfos.map(validatorInfo => validatorInfo.weight), + // snapshot0.threshold, + + keys, + aggregationBits, + signature, + MESSAGE_TAG_POA, + chainID, + message, + weights, + BigInt(68), + ); + console.log('==SIGNATURE VERIFICATION RESULT====', verifyResult); + + writeFileSync( + './updateAuthority_signed.json', + JSON.stringify({ + ...paramsJSON, + newValidators: paramsJSON.newValidators.map(validator => ({ + address: cryptography.address + .getAddressFromLisk32Address(validator.address) + .toString('hex'), + weight: validator.weight, + })), + signature: signature.toString('hex'), + aggregationBits: aggregationBits.toString('hex'), + }), + ); + + console.log('UpdateAuthority file is created at ./updateAuthority_signed successfully.'); + + process.exit(0); +})(); diff --git a/examples/poa-sidechain/scripts/updateKey.json b/examples/poa-sidechain/scripts/updateKey.json new file mode 100644 index 00000000000..b97cf124c29 --- /dev/null +++ b/examples/poa-sidechain/scripts/updateKey.json @@ -0,0 +1,3 @@ +{ + "generatorKey": "1d224ad4cf64a3db52b2509c5b63365db970f34c8e09babf4af8135d9234f91f" +} diff --git a/examples/poa-sidechain/src/app/app.ts b/examples/poa-sidechain/src/app/app.ts new file mode 100644 index 00000000000..42f6724245f --- /dev/null +++ b/examples/poa-sidechain/src/app/app.ts @@ -0,0 +1,10 @@ +import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { registerModules } from './modules'; +import { registerPlugins } from './plugins'; + +export const getApplication = (config: PartialApplicationConfig): Application => { + const app = registerModules(config); + registerPlugins(app); + + return app; +}; diff --git a/examples/poa-sidechain/src/app/index.ts b/examples/poa-sidechain/src/app/index.ts new file mode 100644 index 00000000000..ff8b4c56321 --- /dev/null +++ b/examples/poa-sidechain/src/app/index.ts @@ -0,0 +1 @@ +export default {}; diff --git a/examples/poa-sidechain/src/app/modules.ts b/examples/poa-sidechain/src/app/modules.ts new file mode 100644 index 00000000000..bc8306a7c45 --- /dev/null +++ b/examples/poa-sidechain/src/app/modules.ts @@ -0,0 +1,44 @@ +import { + Application, + AuthModule, + FeeModule, + PartialApplicationConfig, + PoAModule, + RandomModule, + RewardModule, + SidechainInteroperabilityModule, + TokenModule, + ValidatorsModule, +} from 'lisk-sdk'; + +export const registerModules = (config: PartialApplicationConfig): Application => { + const application = new Application(config); + // create module instances + const authModule = new AuthModule(); + const tokenModule = new TokenModule(); + const feeModule = new FeeModule(); + const rewardModule = new RewardModule(); + const randomModule = new RandomModule(); + const validatorModule = new ValidatorsModule(); + const poaModule = new PoAModule(); + const interoperabilityModule = new SidechainInteroperabilityModule(); + + interoperabilityModule.addDependencies(validatorModule.method, tokenModule.method); + rewardModule.addDependencies(tokenModule.method, randomModule.method); + feeModule.addDependencies(tokenModule.method, interoperabilityModule.method); + poaModule.addDependencies(validatorModule.method, feeModule.method, randomModule.method); + + interoperabilityModule.registerInteroperableModule(tokenModule); + interoperabilityModule.registerInteroperableModule(feeModule); + + // Register modules in the sequence defined in LIP0063 https://github.com/LiskHQ/lips/blob/main/proposals/lip-0063.md#modules + application.registerModule(authModule); + application.registerModule(validatorModule); + application.registerModule(tokenModule); + application.registerModule(feeModule); + application.registerModule(interoperabilityModule); + application.registerModule(poaModule); + application.registerModule(randomModule); + + return application; +}; diff --git a/examples/poa-sidechain/src/app/modules/.gitkeep b/examples/poa-sidechain/src/app/modules/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/poa-sidechain/src/app/plugins.ts b/examples/poa-sidechain/src/app/plugins.ts new file mode 100644 index 00000000000..51a493fe23b --- /dev/null +++ b/examples/poa-sidechain/src/app/plugins.ts @@ -0,0 +1,4 @@ +/* eslint-disable @typescript-eslint/no-empty-function */ +import { Application } from 'lisk-sdk'; + +export const registerPlugins = (_app: Application): void => {}; diff --git a/examples/poa-sidechain/src/app/plugins/.gitkeep b/examples/poa-sidechain/src/app/plugins/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/poa-sidechain/src/commands/block/get.ts b/examples/poa-sidechain/src/commands/block/get.ts new file mode 100644 index 00000000000..c1c1c385807 --- /dev/null +++ b/examples/poa-sidechain/src/commands/block/get.ts @@ -0,0 +1 @@ +export { BlockGetCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/blockchain/export.ts b/examples/poa-sidechain/src/commands/blockchain/export.ts new file mode 100644 index 00000000000..3af8131165b --- /dev/null +++ b/examples/poa-sidechain/src/commands/blockchain/export.ts @@ -0,0 +1 @@ +export { BlockchainExportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/blockchain/hash.ts b/examples/poa-sidechain/src/commands/blockchain/hash.ts new file mode 100644 index 00000000000..d5161d903bf --- /dev/null +++ b/examples/poa-sidechain/src/commands/blockchain/hash.ts @@ -0,0 +1 @@ +export { BlockchainHashCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/blockchain/import.ts b/examples/poa-sidechain/src/commands/blockchain/import.ts new file mode 100644 index 00000000000..50faa4ad859 --- /dev/null +++ b/examples/poa-sidechain/src/commands/blockchain/import.ts @@ -0,0 +1 @@ +export { BlockchainImportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/blockchain/reset.ts b/examples/poa-sidechain/src/commands/blockchain/reset.ts new file mode 100644 index 00000000000..3131c161f30 --- /dev/null +++ b/examples/poa-sidechain/src/commands/blockchain/reset.ts @@ -0,0 +1 @@ +export { BlockchainResetCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/config/create.ts b/examples/poa-sidechain/src/commands/config/create.ts new file mode 100644 index 00000000000..103acf9d4d3 --- /dev/null +++ b/examples/poa-sidechain/src/commands/config/create.ts @@ -0,0 +1 @@ +export { ConfigCreateCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/config/show.ts b/examples/poa-sidechain/src/commands/config/show.ts new file mode 100644 index 00000000000..3b4ad3084eb --- /dev/null +++ b/examples/poa-sidechain/src/commands/config/show.ts @@ -0,0 +1 @@ +export { ConfigShowCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/console.ts b/examples/poa-sidechain/src/commands/console.ts new file mode 100644 index 00000000000..03a4a4f200a --- /dev/null +++ b/examples/poa-sidechain/src/commands/console.ts @@ -0,0 +1 @@ +export { ConsoleCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/endpoint/invoke.ts b/examples/poa-sidechain/src/commands/endpoint/invoke.ts new file mode 100644 index 00000000000..99794488428 --- /dev/null +++ b/examples/poa-sidechain/src/commands/endpoint/invoke.ts @@ -0,0 +1 @@ +export { InvokeCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/endpoint/list.ts b/examples/poa-sidechain/src/commands/endpoint/list.ts new file mode 100755 index 00000000000..72823301bd3 --- /dev/null +++ b/examples/poa-sidechain/src/commands/endpoint/list.ts @@ -0,0 +1 @@ +export { ListCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/generator/disable.ts b/examples/poa-sidechain/src/commands/generator/disable.ts new file mode 100644 index 00000000000..5d9ed476298 --- /dev/null +++ b/examples/poa-sidechain/src/commands/generator/disable.ts @@ -0,0 +1 @@ +export { GeneratorDisableCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/generator/enable.ts b/examples/poa-sidechain/src/commands/generator/enable.ts new file mode 100644 index 00000000000..a10141e171b --- /dev/null +++ b/examples/poa-sidechain/src/commands/generator/enable.ts @@ -0,0 +1 @@ +export { GeneratorEnableCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/generator/export.ts b/examples/poa-sidechain/src/commands/generator/export.ts new file mode 100644 index 00000000000..0f2f768f74e --- /dev/null +++ b/examples/poa-sidechain/src/commands/generator/export.ts @@ -0,0 +1 @@ +export { GeneratorExportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/generator/import.ts b/examples/poa-sidechain/src/commands/generator/import.ts new file mode 100644 index 00000000000..4028f7e37cd --- /dev/null +++ b/examples/poa-sidechain/src/commands/generator/import.ts @@ -0,0 +1 @@ +export { GeneratorImportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/generator/status.ts b/examples/poa-sidechain/src/commands/generator/status.ts new file mode 100644 index 00000000000..31038a3dad8 --- /dev/null +++ b/examples/poa-sidechain/src/commands/generator/status.ts @@ -0,0 +1 @@ +export { GeneratorStatusCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/genesis-block/create.ts b/examples/poa-sidechain/src/commands/genesis-block/create.ts new file mode 100644 index 00000000000..ad2d13f7575 --- /dev/null +++ b/examples/poa-sidechain/src/commands/genesis-block/create.ts @@ -0,0 +1,15 @@ +import { BaseGenesisBlockCommand } from 'lisk-commander'; +import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { join } from 'path'; +import { getApplication } from '../../app/app'; + +export class GenesisBlockCommand extends BaseGenesisBlockCommand { + public getApplication(config: PartialApplicationConfig): Application { + const app = getApplication(config); + return app; + } + + public getApplicationConfigDir(): string { + return join(__dirname, '../../../config'); + } +} diff --git a/examples/poa-sidechain/src/commands/hash-onion.ts b/examples/poa-sidechain/src/commands/hash-onion.ts new file mode 100644 index 00000000000..3a96cf04cf8 --- /dev/null +++ b/examples/poa-sidechain/src/commands/hash-onion.ts @@ -0,0 +1 @@ +export { HashOnionCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/keys/create.ts b/examples/poa-sidechain/src/commands/keys/create.ts new file mode 100644 index 00000000000..7a4d6261c10 --- /dev/null +++ b/examples/poa-sidechain/src/commands/keys/create.ts @@ -0,0 +1 @@ +export { KeysCreateCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/keys/encrypt.ts b/examples/poa-sidechain/src/commands/keys/encrypt.ts new file mode 100644 index 00000000000..42ff9418f11 --- /dev/null +++ b/examples/poa-sidechain/src/commands/keys/encrypt.ts @@ -0,0 +1 @@ +export { KeysEncryptCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/keys/export.ts b/examples/poa-sidechain/src/commands/keys/export.ts new file mode 100644 index 00000000000..598306ad7f9 --- /dev/null +++ b/examples/poa-sidechain/src/commands/keys/export.ts @@ -0,0 +1 @@ +export { KeysExportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/keys/import.ts b/examples/poa-sidechain/src/commands/keys/import.ts new file mode 100644 index 00000000000..56e53adeb30 --- /dev/null +++ b/examples/poa-sidechain/src/commands/keys/import.ts @@ -0,0 +1 @@ +export { KeysImportCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/passphrase/create.ts b/examples/poa-sidechain/src/commands/passphrase/create.ts new file mode 100644 index 00000000000..87c8db87659 --- /dev/null +++ b/examples/poa-sidechain/src/commands/passphrase/create.ts @@ -0,0 +1 @@ +export { PassphraseCreateCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/passphrase/decrypt.ts b/examples/poa-sidechain/src/commands/passphrase/decrypt.ts new file mode 100644 index 00000000000..1119f9fbfb1 --- /dev/null +++ b/examples/poa-sidechain/src/commands/passphrase/decrypt.ts @@ -0,0 +1 @@ +export { PassphraseDecryptCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/passphrase/encrypt.ts b/examples/poa-sidechain/src/commands/passphrase/encrypt.ts new file mode 100644 index 00000000000..3d614b09f95 --- /dev/null +++ b/examples/poa-sidechain/src/commands/passphrase/encrypt.ts @@ -0,0 +1 @@ +export { PassphraseEncryptCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/start.ts b/examples/poa-sidechain/src/commands/start.ts new file mode 100644 index 00000000000..0aefab8d5d3 --- /dev/null +++ b/examples/poa-sidechain/src/commands/start.ts @@ -0,0 +1,136 @@ +/* eslint-disable no-param-reassign */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/explicit-member-accessibility */ +import { Flags as flagParser } from '@oclif/core'; +import { BaseStartCommand } from 'lisk-commander'; +import { Application, ApplicationConfig, PartialApplicationConfig } from 'lisk-sdk'; +import { ForgerPlugin } from '@liskhq/lisk-framework-forger-plugin'; +import { MonitorPlugin } from '@liskhq/lisk-framework-monitor-plugin'; +import { ReportMisbehaviorPlugin } from '@liskhq/lisk-framework-report-misbehavior-plugin'; +import { DashboardPlugin } from '@liskhq/lisk-framework-dashboard-plugin'; +import { FaucetPlugin } from '@liskhq/lisk-framework-faucet-plugin'; +import { ChainConnectorPlugin } from '@liskhq/lisk-framework-chain-connector-plugin'; +import { join } from 'path'; +import { getApplication } from '../app/app'; + +interface Flags { + [key: string]: string | number | boolean | undefined; +} + +const setPluginConfig = (config: ApplicationConfig, flags: Flags): void => { + if (flags['monitor-plugin-port'] !== undefined) { + config.plugins[MonitorPlugin.name] = config.plugins[MonitorPlugin.name] ?? {}; + config.plugins[MonitorPlugin.name].port = flags['monitor-plugin-port']; + } + if ( + flags['monitor-plugin-whitelist'] !== undefined && + typeof flags['monitor-plugin-whitelist'] === 'string' + ) { + config.plugins[MonitorPlugin.name] = config.plugins[MonitorPlugin.name] ?? {}; + config.plugins[MonitorPlugin.name].whiteList = flags['monitor-plugin-whitelist'] + .split(',') + .filter(Boolean); + } + if (flags['faucet-plugin-port'] !== undefined) { + config.plugins[FaucetPlugin.name] = config.plugins[FaucetPlugin.name] ?? {}; + config.plugins[FaucetPlugin.name].port = flags['faucet-plugin-port']; + } + if (flags['dashboard-plugin-port'] !== undefined) { + config.plugins[DashboardPlugin.name] = config.plugins[DashboardPlugin.name] ?? {}; + config.plugins[DashboardPlugin.name].port = flags['dashboard-plugin-port']; + } +}; + +export class StartCommand extends BaseStartCommand { + static flags: any = { + ...BaseStartCommand.flags, + 'enable-forger-plugin': flagParser.boolean({ + description: + 'Enable Forger Plugin. Environment variable "LISK_ENABLE_FORGER_PLUGIN" can also be used.', + env: 'LISK_ENABLE_FORGER_PLUGIN', + default: false, + }), + 'enable-monitor-plugin': flagParser.boolean({ + description: + 'Enable Monitor Plugin. Environment variable "LISK_ENABLE_MONITOR_PLUGIN" can also be used.', + env: 'LISK_ENABLE_MONITOR_PLUGIN', + default: false, + }), + 'monitor-plugin-port': flagParser.integer({ + description: + 'Port to be used for Monitor Plugin. Environment variable "LISK_MONITOR_PLUGIN_PORT" can also be used.', + env: 'LISK_MONITOR_PLUGIN_PORT', + dependsOn: ['enable-monitor-plugin'], + }), + 'monitor-plugin-whitelist': flagParser.string({ + description: + 'List of IPs in comma separated value to allow the connection. Environment variable "LISK_MONITOR_PLUGIN_WHITELIST" can also be used.', + env: 'LISK_MONITOR_PLUGIN_WHITELIST', + dependsOn: ['enable-monitor-plugin'], + }), + 'enable-report-misbehavior-plugin': flagParser.boolean({ + description: + 'Enable ReportMisbehavior Plugin. Environment variable "LISK_ENABLE_REPORT_MISBEHAVIOR_PLUGIN" can also be used.', + env: 'LISK_ENABLE_MONITOR_PLUGIN', + default: false, + }), + 'enable-faucet-plugin': flagParser.boolean({ + description: + 'Enable Faucet Plugin. Environment variable "LISK_ENABLE_FAUCET_PLUGIN" can also be used.', + env: 'LISK_ENABLE_FAUCET_PLUGIN', + default: false, + }), + 'faucet-plugin-port': flagParser.integer({ + description: + 'Port to be used for Faucet Plugin. Environment variable "LISK_FAUCET_PLUGIN_PORT" can also be used.', + env: 'LISK_FAUCET_PLUGIN_PORT', + dependsOn: ['enable-faucet-plugin'], + }), + 'enable-dashboard-plugin': flagParser.boolean({ + description: + 'Enable Dashboard Plugin. Environment variable "LISK_ENABLE_DASHBOARD_PLUGIN" can also be used.', + env: 'LISK_ENABLE_DASHBOARD_PLUGIN', + default: false, + }), + 'dashboard-plugin-port': flagParser.integer({ + description: + 'Port to be used for Dashboard Plugin. Environment variable "LISK_DASHBOARD_PLUGIN_PORT" can also be used.', + env: 'LISK_DASHBOARD_PLUGIN_PORT', + dependsOn: ['enable-dashboard-plugin'], + }), + }; + + public async getApplication(config: PartialApplicationConfig): Promise { + /* eslint-disable @typescript-eslint/no-unsafe-call */ + const { flags } = await this.parse(StartCommand); + // Set Plugins Config + setPluginConfig(config as ApplicationConfig, flags); + const app = getApplication(config); + + if (flags['enable-forger-plugin']) { + app.registerPlugin(new ForgerPlugin(), { loadAsChildProcess: true }); + } + if (flags['enable-monitor-plugin']) { + app.registerPlugin(new MonitorPlugin(), { loadAsChildProcess: true }); + } + if (flags['enable-report-misbehavior-plugin']) { + app.registerPlugin(new ReportMisbehaviorPlugin(), { loadAsChildProcess: true }); + } + if (flags['enable-faucet-plugin']) { + app.registerPlugin(new FaucetPlugin(), { loadAsChildProcess: true }); + } + if (flags['enable-dashboard-plugin']) { + app.registerPlugin(new DashboardPlugin(), { loadAsChildProcess: true }); + } + if (flags['enable-chain-connector-plugin']) { + app.registerPlugin(new ChainConnectorPlugin(), { loadAsChildProcess: true }); + } + + return app; + } + + public getApplicationConfigDir(): string { + return join(__dirname, '../../config'); + } +} diff --git a/examples/poa-sidechain/src/commands/system/metadata.ts b/examples/poa-sidechain/src/commands/system/metadata.ts new file mode 100644 index 00000000000..e3f72a6982e --- /dev/null +++ b/examples/poa-sidechain/src/commands/system/metadata.ts @@ -0,0 +1 @@ +export { NodeMetadataCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/system/node-info.ts b/examples/poa-sidechain/src/commands/system/node-info.ts new file mode 100644 index 00000000000..5b44ac03ce9 --- /dev/null +++ b/examples/poa-sidechain/src/commands/system/node-info.ts @@ -0,0 +1 @@ +export { NodeInfoCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/transaction/create.ts b/examples/poa-sidechain/src/commands/transaction/create.ts new file mode 100644 index 00000000000..4c7cbb76768 --- /dev/null +++ b/examples/poa-sidechain/src/commands/transaction/create.ts @@ -0,0 +1,22 @@ +/* eslint-disable class-methods-use-this */ +/* eslint-disable @typescript-eslint/explicit-member-accessibility */ +import { TransactionCreateCommand } from 'lisk-commander'; +import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { getApplication } from '../../app/app'; + +type CreateFlags = typeof TransactionCreateCommand.flags & { + [key: string]: Record; +}; + +export class CreateCommand extends TransactionCreateCommand { + static flags: CreateFlags = { + ...TransactionCreateCommand.flags, + }; + + static args = [...TransactionCreateCommand.args]; + + public getApplication(config: PartialApplicationConfig): Application { + const app = getApplication(config); + return app; + } +} diff --git a/examples/poa-sidechain/src/commands/transaction/get.ts b/examples/poa-sidechain/src/commands/transaction/get.ts new file mode 100644 index 00000000000..a537b15ae8b --- /dev/null +++ b/examples/poa-sidechain/src/commands/transaction/get.ts @@ -0,0 +1 @@ +export { TransactionGetCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/transaction/send.ts b/examples/poa-sidechain/src/commands/transaction/send.ts new file mode 100644 index 00000000000..e6754332cd8 --- /dev/null +++ b/examples/poa-sidechain/src/commands/transaction/send.ts @@ -0,0 +1 @@ +export { TransactionSendCommand } from 'lisk-commander'; diff --git a/examples/poa-sidechain/src/commands/transaction/sign.ts b/examples/poa-sidechain/src/commands/transaction/sign.ts new file mode 100644 index 00000000000..b55093102a7 --- /dev/null +++ b/examples/poa-sidechain/src/commands/transaction/sign.ts @@ -0,0 +1,20 @@ +/* eslint-disable class-methods-use-this */ +/* eslint-disable @typescript-eslint/explicit-member-accessibility */ +import { TransactionSignCommand } from 'lisk-commander'; +import { Application, PartialApplicationConfig } from 'lisk-sdk'; +import { getApplication } from '../../app/app'; + +type SignFlags = typeof TransactionSignCommand.flags & { [key: string]: Record }; + +export class SignCommand extends TransactionSignCommand { + static flags: SignFlags = { + ...TransactionSignCommand.flags, + }; + + static args = [...TransactionSignCommand.args]; + + public getApplication(config: PartialApplicationConfig): Application { + const app = getApplication(config); + return app; + } +} diff --git a/examples/poa-sidechain/test/.eslintrc.js b/examples/poa-sidechain/test/.eslintrc.js new file mode 100644 index 00000000000..f93c4465d72 --- /dev/null +++ b/examples/poa-sidechain/test/.eslintrc.js @@ -0,0 +1,7 @@ +module.exports = { + extends: ['lisk-base/ts-jest'], + parserOptions: { + project: './tsconfig.json', + tsconfigRootDir: __dirname, + }, +}; diff --git a/examples/poa-sidechain/test/_setup.js b/examples/poa-sidechain/test/_setup.js new file mode 100644 index 00000000000..aab218d192e --- /dev/null +++ b/examples/poa-sidechain/test/_setup.js @@ -0,0 +1,4 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const matchers = require('jest-extended'); + +expect.extend(matchers); diff --git a/examples/poa-sidechain/test/integration/.gitkeep b/examples/poa-sidechain/test/integration/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/poa-sidechain/test/network/.gitkeep b/examples/poa-sidechain/test/network/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/poa-sidechain/test/tsconfig.json b/examples/poa-sidechain/test/tsconfig.json new file mode 100644 index 00000000000..c0c763f8f4d --- /dev/null +++ b/examples/poa-sidechain/test/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "rootDir": "../" + }, + "include": ["../src/**/*", "./**/*", "../package.json", "../config/**/*.json"] +} diff --git a/examples/poa-sidechain/test/types.ts b/examples/poa-sidechain/test/types.ts new file mode 100644 index 00000000000..deff13c121c --- /dev/null +++ b/examples/poa-sidechain/test/types.ts @@ -0,0 +1,16 @@ +/* + * Copyright © 2022 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + * + */ + +export type Awaited = T extends Promise ? U : T; diff --git a/examples/poa-sidechain/test/unit/modules/.gitkeep b/examples/poa-sidechain/test/unit/modules/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/poa-sidechain/test/utils/config.ts b/examples/poa-sidechain/test/utils/config.ts new file mode 100644 index 00000000000..5b37b43e3d9 --- /dev/null +++ b/examples/poa-sidechain/test/utils/config.ts @@ -0,0 +1,10 @@ +import { Config } from '@oclif/core'; + +import pJSON = require('../../package.json'); + +export const getConfig = async () => { + const config = await Config.load(); + config.pjson.lisk = { addressPrefix: 'lsk' }; + config.pjson.version = pJSON.version; + return config; +}; diff --git a/examples/poa-sidechain/tsconfig.json b/examples/poa-sidechain/tsconfig.json new file mode 100644 index 00000000000..42faa2a792d --- /dev/null +++ b/examples/poa-sidechain/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "forceConsistentCasingInFileNames": true, + "target": "es2019", + "module": "commonjs", + "moduleResolution": "node", + "newLine": "lf", + "importHelpers": true, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "pretty": true, + "removeComments": true, + "resolveJsonModule": true, + "sourceMap": true, + "strict": true, + "composite": true, + "declaration": true, + "noImplicitAny": false, + "skipLibCheck": true, + "rootDir": "./src", + "outDir": "./dist" + }, + "include": ["./src/**/*.ts", "./src/**/*.json"] +} diff --git a/framework/src/index.ts b/framework/src/index.ts index 0d35d808c97..cb36fd145eb 100644 --- a/framework/src/index.ts +++ b/framework/src/index.ts @@ -142,6 +142,7 @@ export { export { RewardMethod, RewardModule } from './modules/reward'; export { FeeMethod, FeeModule } from './modules/fee'; export { RandomMethod, RandomModule } from './modules/random'; +export { PoAModule, PoAMethod } from './modules/poa'; export { GenesisBlockExecuteContext, InsertAssetContext, diff --git a/framework/src/modules/poa/commands/register_authority.ts b/framework/src/modules/poa/commands/register_authority.ts new file mode 100644 index 00000000000..cf5d8947e84 --- /dev/null +++ b/framework/src/modules/poa/commands/register_authority.ts @@ -0,0 +1,104 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address } from '@liskhq/lisk-cryptography'; +import { validator } from '@liskhq/lisk-validator'; +import { BaseCommand } from '../../base_command'; +import { registerAuthoritySchema } from '../schemas'; +import { + CommandExecuteContext, + CommandVerifyContext, + VerificationResult, + VerifyStatus, +} from '../../../state_machine'; +import { RegisterAuthorityParams, ValidatorsMethod, FeeMethod } from '../types'; +import { COMMAND_REGISTER_AUTHORITY, POA_VALIDATOR_NAME_REGEX } from '../constants'; +import { ValidatorStore, NameStore } from '../stores'; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#register-authority-command +export class RegisterAuthorityCommand extends BaseCommand { + public schema = registerAuthoritySchema; + private _validatorsMethod!: ValidatorsMethod; + private _feeMethod!: FeeMethod; + private _authorityRegistrationFee!: bigint; + + public get name(): string { + return COMMAND_REGISTER_AUTHORITY; + } + + public init(args: { authorityRegistrationFee: bigint }) { + this._authorityRegistrationFee = args.authorityRegistrationFee; + } + + public addDependencies(validatorsMethod: ValidatorsMethod, feeMethod: FeeMethod) { + this._validatorsMethod = validatorsMethod; + this._feeMethod = feeMethod; + } + + public async verify( + context: CommandVerifyContext, + ): Promise { + const { name } = context.params; + try { + validator.validate(registerAuthoritySchema, context.params); + } catch (err) { + return { + status: VerifyStatus.FAIL, + error: err as Error, + }; + } + + if (!POA_VALIDATOR_NAME_REGEX.test(name)) { + throw new Error(`Name does not comply with format ${POA_VALIDATOR_NAME_REGEX.toString()}.`); + } + + const nameExists = await this.stores.get(NameStore).has(context, Buffer.from(name)); + if (nameExists) { + throw new Error('Name already exists.'); + } + + const senderAddress = address.getAddressFromPublicKey(context.transaction.senderPublicKey); + const validatorExists = await this.stores.get(ValidatorStore).has(context, senderAddress); + if (validatorExists) { + throw new Error('Validator already exists.'); + } + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { params } = context; + + const senderAddress = address.getAddressFromPublicKey(context.transaction.senderPublicKey); + this._feeMethod.payFee(context, this._authorityRegistrationFee); + + await this.stores.get(ValidatorStore).set(context, senderAddress, { + name: params.name, + }); + + await this.stores.get(NameStore).set(context, Buffer.from(params.name), { + address: senderAddress, + }); + + await this._validatorsMethod.registerValidatorKeys( + context, + senderAddress, + params.blsKey, + params.generatorKey, + params.proofOfPossession, + ); + } +} diff --git a/framework/src/modules/poa/commands/update_authority.ts b/framework/src/modules/poa/commands/update_authority.ts new file mode 100644 index 00000000000..63fe5c30575 --- /dev/null +++ b/framework/src/modules/poa/commands/update_authority.ts @@ -0,0 +1,176 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { MAX_UINT64 } from '@liskhq/lisk-validator'; +import { bls } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; +import { objects as objectUtils } from '@liskhq/lisk-utils'; +import { BaseCommand } from '../../base_command'; +import { updateAuthoritySchema, validatorSignatureMessageSchema } from '../schemas'; +import { + COMMAND_UPDATE_AUTHORITY, + MAX_NUM_VALIDATORS, + MESSAGE_TAG_POA, + EMPTY_BYTES, + UpdateAuthorityResult, + KEY_SNAPSHOT_0, + KEY_SNAPSHOT_2, +} from '../constants'; +import { + CommandExecuteContext, + CommandVerifyContext, + VerificationResult, + VerifyStatus, +} from '../../../state_machine'; +import { UpdateAuthorityParams, ValidatorsMethod } from '../types'; +import { ChainPropertiesStore, SnapshotStore, ValidatorStore } from '../stores'; +import { AuthorityUpdateEvent } from '../events/authority_update'; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#update-authority-command +export class UpdateAuthorityCommand extends BaseCommand { + public schema = updateAuthoritySchema; + private _validatorsMethod!: ValidatorsMethod; + + public get name(): string { + return COMMAND_UPDATE_AUTHORITY; + } + + public addDependencies(validatorsMethod: ValidatorsMethod) { + this._validatorsMethod = validatorsMethod; + } + + public async verify( + context: CommandVerifyContext, + ): Promise { + const { newValidators, threshold, validatorsUpdateNonce } = context.params; + + if (newValidators.length < 1 || newValidators.length > MAX_NUM_VALIDATORS) { + throw new Error( + `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive).`, + ); + } + + const newValidatorsAddresses = newValidators.map(newValidator => newValidator.address); + if (!objectUtils.isBufferArrayOrdered(newValidatorsAddresses)) { + throw new Error('Addresses in newValidators are not lexicographically ordered.'); + } + + if (!objectUtils.bufferArrayUniqueItems(newValidatorsAddresses)) { + throw new Error('Addresses in newValidators are not unique.'); + } + + const validatorStore = this.stores.get(ValidatorStore); + let totalWeight = BigInt(0); + for (const newValidator of newValidators) { + const validatorExists = await validatorStore.has(context, newValidator.address); + if (!validatorExists) { + throw new Error( + `No validator found for given address ${newValidator.address.toString('hex')}.`, + ); + } + totalWeight += newValidator.weight; + } + + if (totalWeight > MAX_UINT64) { + throw new Error(`Validators total weight exceeds ${MAX_UINT64}.`); + } + + const minThreshold = totalWeight / BigInt(3) + BigInt(1); + if (threshold < minThreshold || threshold > totalWeight) { + throw new Error(`Threshold must be between ${minThreshold} and ${totalWeight} (inclusive).`); + } + + const chainPropertiesStore = await this.stores + .get(ChainPropertiesStore) + .get(context, EMPTY_BYTES); + if (validatorsUpdateNonce !== chainPropertiesStore.validatorsUpdateNonce) { + throw new Error( + `validatorsUpdateNonce must be equal to ${chainPropertiesStore.validatorsUpdateNonce}.`, + ); + } + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { newValidators, threshold, validatorsUpdateNonce, aggregationBits, signature } = + context.params; + + // Verify weighted aggregated signature. + const message = codec.encode(validatorSignatureMessageSchema, { + newValidators, + threshold, + validatorsUpdateNonce, + }); + + const validatorsInfos = []; + const snapshotStore = this.stores.get(SnapshotStore); + const snapshot0 = await snapshotStore.get(context, KEY_SNAPSHOT_0); + for (const snapshotValidator of snapshot0.validators) { + const keys = await this._validatorsMethod.getValidatorKeys( + context, + snapshotValidator.address, + ); + validatorsInfos.push({ + key: keys.blsKey, + weight: snapshotValidator.weight, + }); + } + + validatorsInfos.sort((a, b) => a.key.compare(b.key)); + const verified = bls.verifyWeightedAggSig( + validatorsInfos.map(validatorInfo => validatorInfo.key), + aggregationBits, + signature, + MESSAGE_TAG_POA, + context.chainID, + message, + validatorsInfos.map(validatorInfo => validatorInfo.weight), + snapshot0.threshold, + ); + + const authorityUpdateEvent = this.events.get(AuthorityUpdateEvent); + if (!verified) { + authorityUpdateEvent.log( + context, + { + result: UpdateAuthorityResult.FAIL_INVALID_SIGNATURE, + }, + true, + ); + throw new Error('Invalid weighted aggregated signature.'); + } + await snapshotStore.set(context, KEY_SNAPSHOT_2, { + validators: newValidators, + threshold, + }); + + const chainPropertiesStore = this.stores.get(ChainPropertiesStore); + const chainProperties = await chainPropertiesStore.get(context, EMPTY_BYTES); + await chainPropertiesStore.set(context, EMPTY_BYTES, { + ...chainProperties, + validatorsUpdateNonce: chainProperties.validatorsUpdateNonce + 1, + }); + + authorityUpdateEvent.log( + context, + { + result: UpdateAuthorityResult.SUCCESS, + }, + false, + ); + } +} diff --git a/framework/src/modules/poa/commands/update_generator_key.ts b/framework/src/modules/poa/commands/update_generator_key.ts new file mode 100644 index 00000000000..d3f7f4f994d --- /dev/null +++ b/framework/src/modules/poa/commands/update_generator_key.ts @@ -0,0 +1,74 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; +import { BaseCommand } from '../../base_command'; +import { updateGeneratorKeySchema } from '../schemas'; +import { COMMAND_UPDATE_KEY } from '../constants'; +import { + CommandExecuteContext, + CommandVerifyContext, + VerificationResult, + VerifyStatus, +} from '../../../state_machine'; +import { UpdateGeneratorKeyParams, ValidatorsMethod } from '../types'; +import { ValidatorStore } from '../stores'; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#update-generator-key-command +export class UpdateGeneratorKeyCommand extends BaseCommand { + public schema = updateGeneratorKeySchema; + private _validatorsMethod!: ValidatorsMethod; + + public get name(): string { + return COMMAND_UPDATE_KEY; + } + + public addDependencies(validatorsMethod: ValidatorsMethod) { + this._validatorsMethod = validatorsMethod; + } + + public async verify( + context: CommandVerifyContext, + ): Promise { + try { + validator.validate(updateGeneratorKeySchema, context.params); + } catch (err) { + return { + status: VerifyStatus.FAIL, + error: err as Error, + }; + } + + const validatorExists = await this.stores + .get(ValidatorStore) + .has(context, context.transaction.senderAddress); + if (!validatorExists) { + throw new Error('Validator does not exist.'); + } + + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { generatorKey } = context.params; + + await this._validatorsMethod.setValidatorGeneratorKey( + context, + context.transaction.senderAddress, + generatorKey, + ); + } +} diff --git a/framework/src/modules/poa/constants.ts b/framework/src/modules/poa/constants.ts new file mode 100644 index 00000000000..2e5d0f3a6c8 --- /dev/null +++ b/framework/src/modules/poa/constants.ts @@ -0,0 +1,45 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { utils } from '@liskhq/lisk-cryptography'; + +export enum UpdateAuthorityResult { + SUCCESS = 0, + FAIL_INVALID_SIGNATURE, +} + +export const MODULE_NAME_POA = 'poa'; +export const MAX_LENGTH_NAME = 20; +export const LENGTH_BLS_KEY = 48; +export const LENGTH_PROOF_OF_POSSESSION = 96; +export const LENGTH_GENERATOR_KEY = 32; +export const NUM_BYTES_ADDRESS = 20; +export const MAX_NUM_VALIDATORS = 199; +export const POA_VALIDATOR_NAME_REGEX = /^[a-z0-9!@$&_.]+$/; +export const MESSAGE_TAG_POA = 'LSK_POA_'; +export const AUTHORITY_REGISTRATION_FEE = BigInt(1000000000); // Determined by Operator +export const EMPTY_BYTES = Buffer.alloc(0); +export const COMMAND_REGISTER_AUTHORITY = 'registerAuthority'; +export const COMMAND_UPDATE_KEY = 'updateKey'; +export const COMMAND_UPDATE_AUTHORITY = 'updateAuthority'; +export const MAX_UINT64 = BigInt(2) ** BigInt(64) - BigInt(1); +export const LENGTH_PROOF_OF_POSESSION = 96; +export const defaultConfig = { + authorityRegistrationFee: AUTHORITY_REGISTRATION_FEE.toString(), +}; + +// Store key +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#uint32be-function +export const KEY_SNAPSHOT_0 = utils.intToBuffer(0, 4); +export const KEY_SNAPSHOT_1 = utils.intToBuffer(1, 4); +export const KEY_SNAPSHOT_2 = utils.intToBuffer(2, 4); diff --git a/framework/src/modules/poa/endpoint.ts b/framework/src/modules/poa/endpoint.ts new file mode 100644 index 00000000000..c59e74e6c09 --- /dev/null +++ b/framework/src/modules/poa/endpoint.ts @@ -0,0 +1,111 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; +import { NotFoundError } from '@liskhq/lisk-db'; +import { BaseEndpoint } from '../base_endpoint'; +import { ValidatorStore } from './stores/validator'; +import { ModuleEndpointContext } from '../../types'; +import { KEY_SNAPSHOT_0 } from './constants'; +import { SnapshotStore } from './stores'; +import { Validator } from './types'; + +export class PoAEndpoint extends BaseEndpoint { + private _authorityRegistrationFee!: bigint; + + public init(authorityRegistrationFee: bigint) { + this._authorityRegistrationFee = authorityRegistrationFee; + } + + public async getValidator(context: ModuleEndpointContext): Promise { + const validatorSubStore = this.stores.get(ValidatorStore); + const { address } = context.params; + if (typeof address !== 'string') { + throw new Error('Parameter address must be a string.'); + } + cryptoAddress.validateLisk32Address(address); + + let validatorName: { name: string }; + try { + validatorName = await validatorSubStore.get( + context, + cryptoAddress.getAddressFromLisk32Address(address), + ); + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } + + throw new Error(`Validator not found in snapshot for address ${address}`); + } + + const snapshotStore = this.stores.get(SnapshotStore); + const currentRoundSnapshot = await snapshotStore.get(context, KEY_SNAPSHOT_0); + const validatorInfo = currentRoundSnapshot.validators.find( + v => cryptoAddress.getLisk32AddressFromAddress(v.address) === address, + ); + if (!validatorInfo) { + throw new Error(`Validator not found in snapshot for address ${address}`); + } + + return { + ...validatorName, + address, + weight: validatorInfo.weight.toString(), + }; + } + + public async getAllValidators( + context: ModuleEndpointContext, + ): Promise<{ validators: Validator[] }> { + const validatorStore = this.stores.get(ValidatorStore); + const startBuf = Buffer.alloc(20); + const endBuf = Buffer.alloc(20, 255); + const validatorStoreData = await validatorStore.iterate(context, { + gte: startBuf, + lte: endBuf, + }); + + const snapshotStore = this.stores.get(SnapshotStore); + const currentRoundSnapshot = await snapshotStore.get(context, KEY_SNAPSHOT_0); + + const validatorsData: Validator[] = []; + for (const data of validatorStoreData) { + const address = cryptoAddress.getLisk32AddressFromAddress(data.key); + // `name` comes from type `ValidatorName` + const { name } = await validatorStore.get(context, data.key); + const activeValidator = currentRoundSnapshot.validators.find( + v => cryptoAddress.getLisk32AddressFromAddress(v.address) === address, + ); + + const validator: Validator = { + name, + address, + weight: activeValidator ? activeValidator.weight.toString() : '', + }; + validatorsData.push(validator); + } + + // This is needed since response from this endpoint is returning data in unexpected sorting order on next execution + // which can result in potential test/build failure + validatorsData.sort((v1, v2) => v1.name.localeCompare(v2.name, 'en')); + return { validators: validatorsData }; + } + + public getRegistrationFee(): { fee: string } { + return { + fee: this._authorityRegistrationFee.toString(), + }; + } +} diff --git a/framework/src/modules/poa/events/authority_update.ts b/framework/src/modules/poa/events/authority_update.ts new file mode 100644 index 00000000000..31b2597ef20 --- /dev/null +++ b/framework/src/modules/poa/events/authority_update.ts @@ -0,0 +1,40 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseEvent, EventQueuer } from '../../base_event'; +import { UpdateAuthorityResult } from '../constants'; + +export interface AuthorityUpdateData { + result: UpdateAuthorityResult; +} + +export const authorityUpdateDataSchema = { + $id: '/poa/events/authorityUpdate', + type: 'object', + required: ['result'], + properties: { + result: { + dataType: 'uint32', + fieldNumber: 1, + }, + }, +}; + +export class AuthorityUpdateEvent extends BaseEvent { + public schema = authorityUpdateDataSchema; + + public log(ctx: EventQueuer, data: AuthorityUpdateData, noRevert: boolean): void { + this.add(ctx, data, [], noRevert); + } +} diff --git a/framework/src/modules/poa/events/index.ts b/framework/src/modules/poa/events/index.ts new file mode 100644 index 00000000000..206ba71de27 --- /dev/null +++ b/framework/src/modules/poa/events/index.ts @@ -0,0 +1,13 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ diff --git a/framework/src/modules/poa/index.ts b/framework/src/modules/poa/index.ts new file mode 100644 index 00000000000..2d9f117ab7c --- /dev/null +++ b/framework/src/modules/poa/index.ts @@ -0,0 +1,16 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export { PoAModule } from './module'; +export { PoAMethod } from './method'; diff --git a/framework/src/modules/poa/internal_method.ts b/framework/src/modules/poa/internal_method.ts new file mode 100644 index 00000000000..9885f7e1f2e --- /dev/null +++ b/framework/src/modules/poa/internal_method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from '../base_method'; + +export class PoAInternalMethod extends BaseMethod {} diff --git a/framework/src/modules/poa/method.ts b/framework/src/modules/poa/method.ts new file mode 100644 index 00000000000..b720957dd68 --- /dev/null +++ b/framework/src/modules/poa/method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseMethod } from '../base_method'; + +export class PoAMethod extends BaseMethod {} diff --git a/framework/src/modules/poa/module.ts b/framework/src/modules/poa/module.ts new file mode 100644 index 00000000000..02049d574d4 --- /dev/null +++ b/framework/src/modules/poa/module.ts @@ -0,0 +1,338 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { codec } from '@liskhq/lisk-codec'; +import { objects } from '@liskhq/lisk-utils'; +import { validator } from '@liskhq/lisk-validator'; +import { BaseModule, ModuleInitArgs, ModuleMetadata } from '../base_module'; +import { PoAMethod } from './method'; +import { PoAEndpoint } from './endpoint'; +import { AuthorityUpdateEvent } from './events/authority_update'; +import { ChainPropertiesStore, ValidatorStore, NameStore, SnapshotStore } from './stores'; +import { BlockAfterExecuteContext, GenesisBlockExecuteContext } from '../../state_machine'; +import { + MODULE_NAME_POA, + EMPTY_BYTES, + KEY_SNAPSHOT_0, + KEY_SNAPSHOT_1, + KEY_SNAPSHOT_2, + MAX_UINT64, + defaultConfig, +} from './constants'; +import { shuffleValidatorList } from './utils'; +import { NextValidatorsSetter, MethodContext } from '../../state_machine/types'; +import { + configSchema, + genesisPoAStoreSchema, + getAllValidatorsResponseSchema, + getRegistrationFeeResponseSchema, + getValidatorRequestSchema, + getValidatorResponseSchema, +} from './schemas'; +import { + FeeMethod, + GenesisPoAStore, + ValidatorsMethod, + RandomMethod, + ModuleConfigJSON, + ModuleConfig, +} from './types'; +import { RegisterAuthorityCommand } from './commands/register_authority'; +import { UpdateAuthorityCommand } from './commands/update_authority'; +import { UpdateGeneratorKeyCommand } from './commands/update_generator_key'; + +export class PoAModule extends BaseModule { + public method = new PoAMethod(this.stores, this.events); + public endpoint = new PoAEndpoint(this.stores, this.offchainStores); + private _randomMethod!: RandomMethod; + private _validatorsMethod!: ValidatorsMethod; + private _feeMethod!: FeeMethod; + private readonly _registerAuthorityCommand = new RegisterAuthorityCommand( + this.stores, + this.events, + ); + private readonly _updateAuthorityCommand = new UpdateAuthorityCommand(this.stores, this.events); + private readonly _updateGeneratorKeyCommand = new UpdateGeneratorKeyCommand( + this.stores, + this.events, + ); + private _moduleConfig!: ModuleConfig; + + public commands = [ + this._registerAuthorityCommand, + this._updateAuthorityCommand, + this._updateGeneratorKeyCommand, + ]; + + public constructor() { + super(); + this.events.register(AuthorityUpdateEvent, new AuthorityUpdateEvent(this.name)); + this.stores.register(ValidatorStore, new ValidatorStore(this.name, 0)); + this.stores.register(ChainPropertiesStore, new ChainPropertiesStore(this.name, 1)); + this.stores.register(NameStore, new NameStore(this.name, 2)); + this.stores.register(SnapshotStore, new SnapshotStore(this.name, 3)); + } + + public get name() { + return 'poa'; + } + + public addDependencies( + validatorsMethod: ValidatorsMethod, + feeMethod: FeeMethod, + randomMethod: RandomMethod, + ) { + this._validatorsMethod = validatorsMethod; + this._feeMethod = feeMethod; + this._randomMethod = randomMethod; + + // Add dependencies to commands + this._registerAuthorityCommand.addDependencies(this._validatorsMethod, this._feeMethod); + this._updateAuthorityCommand.addDependencies(this._validatorsMethod); + this._updateGeneratorKeyCommand.addDependencies(this._validatorsMethod); + } + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [ + { + name: this.endpoint.getValidator.name, + request: getValidatorRequestSchema, + response: getValidatorResponseSchema, + }, + { + name: this.endpoint.getAllValidators.name, + response: getAllValidatorsResponseSchema, + }, + { + name: this.endpoint.getRegistrationFee.name, + response: getRegistrationFeeResponseSchema, + }, + ], + assets: [], + }; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async init(args: ModuleInitArgs) { + const config = objects.mergeDeep({}, { ...defaultConfig }, args.moduleConfig); + validator.validate(configSchema, config); + + this._moduleConfig = { + ...config, + authorityRegistrationFee: BigInt(config.authorityRegistrationFee), + }; + this._registerAuthorityCommand.init(this._moduleConfig); + this.endpoint.init(this._moduleConfig.authorityRegistrationFee); + } + + // LIP: https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#after-transactions-execution + public async afterTransactionsExecute(context: BlockAfterExecuteContext): Promise { + const chainPropertiesStore = this.stores.get(ChainPropertiesStore); + const chainProperties = await chainPropertiesStore.get(context, EMPTY_BYTES); + + if (context.header.height === chainProperties.roundEndHeight) { + const snapshotStore = this.stores.get(SnapshotStore); + const snapshot0 = await snapshotStore.get(context, KEY_SNAPSHOT_0); + const previousLengthValidators = snapshot0.validators.length; + + const snapshot1 = await snapshotStore.get(context, KEY_SNAPSHOT_1); + // Update the chain information for the next round + + // snapshot0 = snapshot1 + await snapshotStore.set(context, KEY_SNAPSHOT_0, snapshot1); + const snapshot2 = await snapshotStore.get(context, KEY_SNAPSHOT_2); + // snapshot1 = snapshot2 + await snapshotStore.set(context, KEY_SNAPSHOT_1, snapshot2); + + // Reshuffle the list of validators and pass it to the Validators module + const roundStartHeight = chainProperties.roundEndHeight - previousLengthValidators + 1; + const randomSeed = await this._randomMethod.getRandomBytes( + context, + roundStartHeight, + previousLengthValidators, + ); + + const nextValidators = shuffleValidatorList(randomSeed, snapshot1.validators); + + await this._validatorsMethod.setValidatorsParams( + context as MethodContext, + context as NextValidatorsSetter, + snapshot1.threshold, + snapshot1.threshold, + nextValidators.map(v => ({ + address: v.address, + bftWeight: v.weight, + })), + ); + + chainProperties.roundEndHeight += snapshot1.validators.length; + + await chainPropertiesStore.set(context, EMPTY_BYTES, chainProperties); + } + } + + public async initGenesisState(context: GenesisBlockExecuteContext): Promise { + const genesisBlockAssetBytes = context.assets.getAsset(MODULE_NAME_POA); + if (!genesisBlockAssetBytes) { + return; + } + const asset = codec.decode(genesisPoAStoreSchema, genesisBlockAssetBytes); + validator.validate(genesisPoAStoreSchema, asset); + + const { validators, snapshotSubstore } = asset; + + // Check that the name property of all entries in the validators array are pairwise distinct. + const validatorNames = validators.map(v => v.name); + if (validatorNames.length !== new Set(validatorNames).size) { + throw new Error('`name` property of all entries in the validators must be distinct.'); + } + + // Check that the address properties of all entries in the validators array are pairwise distinct. + const validatorAddresses = validators.map(v => v.address); + if (!objects.bufferArrayUniqueItems(validatorAddresses)) { + throw new Error('`address` property of all entries in validators must be distinct.'); + } + + const sortedValidatorsByAddress = [...validatorAddresses].sort((a, b) => a.compare(b)); + for (let i = 0; i < validators.length; i += 1) { + // Check that entries in the validators array are ordered lexicographically according to address. + if (!validatorAddresses[i].equals(sortedValidatorsByAddress[i])) { + throw new Error('`validators` must be ordered lexicographically by address.'); + } + + if (!/^[a-z0-9!@$&_.]+$/g.test(validators[i].name)) { + throw new Error('`name` property is invalid. Must contain only characters a-z0-9!@$&_.'); + } + } + + const { activeValidators, threshold } = snapshotSubstore; + const activeValidatorAddresses = activeValidators.map(v => v.address); + const sortedActiveValidatorsByAddress = [...activeValidatorAddresses].sort((a, b) => + a.compare(b), + ); + const validatorAddressesString = validatorAddresses.map(a => a.toString('hex')); + let totalWeight = BigInt(0); + + // Check that the address properties of entries in the snapshotSubstore.activeValidators are pairwise distinct. + if (!objects.bufferArrayUniqueItems(activeValidatorAddresses)) { + throw new Error('`address` properties in `activeValidators` must be distinct.'); + } + + for (let i = 0; i < activeValidators.length; i += 1) { + // Check that entries in the snapshotSubstore.activeValidators array are ordered lexicographically according to address. + if (!activeValidators[i].address.equals(sortedActiveValidatorsByAddress[i])) { + throw new Error( + '`activeValidators` must be ordered lexicographically by address property.', + ); + } + + // Check that for every element activeValidator in the snapshotSubstore.activeValidators array, there is an entry validator in the validators array with validator.address == activeValidator.address. + if (!validatorAddressesString.includes(activeValidators[i].address.toString('hex'))) { + throw new Error('`activeValidator` address is missing from validators array.'); + } + + // Check that the weight property of every entry in the snapshotSubstore.activeValidators array is a positive integer. + if (activeValidators[i].weight <= BigInt(0)) { + throw new Error('`activeValidators` weight must be positive integer.'); + } + + totalWeight += activeValidators[i].weight; + } + + if (totalWeight > MAX_UINT64) { + throw new Error('Total weight `activeValidators` exceeds maximum value.'); + } + + // Check that the value of snapshotSubstore.threshold is within range + if (threshold < totalWeight / BigInt(3) + BigInt(1) || threshold > totalWeight) { + throw new Error('`threshold` in snapshot substore is not within range.'); + } + + // Create an entry in the validator substore for each entry validator in the validators + // Create an entry in the name substore for each entry validator in the validators + const validatorStore = this.stores.get(ValidatorStore); + const nameStore = this.stores.get(NameStore); + + for (const currentValidator of validators) { + await validatorStore.set(context, currentValidator.address, { name: currentValidator.name }); + await nameStore.set(context, Buffer.from(currentValidator.name, 'utf-8'), { + address: currentValidator.address, + }); + } + + // Create three entries in the snapshot substore indicating a snapshot of the next rounds of validators + const snapshotStore = this.stores.get(SnapshotStore); + await snapshotStore.set(context, KEY_SNAPSHOT_0, { + ...snapshotSubstore, + validators: activeValidators, + }); + await snapshotStore.set(context, KEY_SNAPSHOT_1, { + ...snapshotSubstore, + validators: activeValidators, + }); + await snapshotStore.set(context, KEY_SNAPSHOT_2, { + ...snapshotSubstore, + validators: activeValidators, + }); + + // Create an entry in the chain properties substore + const { header } = context; + const chainPropertiesStore = this.stores.get(ChainPropertiesStore); + await chainPropertiesStore.set(context, EMPTY_BYTES, { + roundEndHeight: header.height, + validatorsUpdateNonce: 0, + }); + } + + public async finalizeGenesisState(context: GenesisBlockExecuteContext): Promise { + const genesisBlockAssetBytes = context.assets.getAsset(MODULE_NAME_POA); + if (!genesisBlockAssetBytes) { + return; + } + const asset = codec.decode(genesisPoAStoreSchema, genesisBlockAssetBytes); + const snapshotStore = this.stores.get(SnapshotStore); + const currentRoundSnapshot = await snapshotStore.get(context, KEY_SNAPSHOT_0); + const chainPropertiesStore = this.stores.get(ChainPropertiesStore); + const chainProperties = await chainPropertiesStore.get(context, EMPTY_BYTES); + + await chainPropertiesStore.set(context, EMPTY_BYTES, { + ...chainProperties, + roundEndHeight: chainProperties.roundEndHeight + currentRoundSnapshot.validators.length, + }); + + // Pass the required information to the Validators module. + const methodContext = context.getMethodContext(); + + // Pass the BLS keys and generator keys to the Validators module. + for (const v of asset.validators) { + await this._validatorsMethod.registerValidatorKeys( + methodContext, + v.address, + v.blsKey, + v.generatorKey, + v.proofOfPossession, + ); + } + + await this._validatorsMethod.setValidatorsParams( + methodContext, + context, + currentRoundSnapshot.threshold, + currentRoundSnapshot.threshold, + currentRoundSnapshot.validators.map(v => ({ address: v.address, bftWeight: v.weight })), + ); + } +} diff --git a/framework/src/modules/poa/schemas.ts b/framework/src/modules/poa/schemas.ts new file mode 100644 index 00000000000..c06d1ca840e --- /dev/null +++ b/framework/src/modules/poa/schemas.ts @@ -0,0 +1,287 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { + LENGTH_BLS_KEY, + LENGTH_GENERATOR_KEY, + LENGTH_PROOF_OF_POSSESSION, + MAX_LENGTH_NAME, + NUM_BYTES_ADDRESS, + MAX_NUM_VALIDATORS, +} from './constants'; + +export const configSchema = { + $id: '/poa/config', + type: 'object', + properties: { + authorityRegistrationFee: { + type: 'string', + format: 'uint64', + }, + }, +}; + +const validator = { + type: 'object', + required: ['address', 'weight'], + properties: { + address: { + dataType: 'bytes', + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + fieldNumber: 1, + }, + weight: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, +}; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#register-authority-command +export const registerAuthoritySchema = { + $id: '/poa/command/registerAuthority', + type: 'object', + required: ['name', 'blsKey', 'proofOfPossession', 'generatorKey'], + properties: { + name: { + dataType: 'string', + minLength: 1, + maxLength: MAX_LENGTH_NAME, + fieldNumber: 1, + }, + blsKey: { + dataType: 'bytes', + minLength: LENGTH_BLS_KEY, + maxLength: LENGTH_BLS_KEY, + fieldNumber: 2, + }, + proofOfPossession: { + dataType: 'bytes', + minLength: LENGTH_PROOF_OF_POSSESSION, + maxLength: LENGTH_PROOF_OF_POSSESSION, + fieldNumber: 3, + }, + generatorKey: { + dataType: 'bytes', + minLength: LENGTH_GENERATOR_KEY, + maxLength: LENGTH_GENERATOR_KEY, + fieldNumber: 4, + }, + }, +}; + +export const updateGeneratorKeySchema = { + $id: '/poa/command/updateGeneratorKey', + type: 'object', + required: ['generatorKey'], + properties: { + generatorKey: { + dataType: 'bytes', + minLength: LENGTH_GENERATOR_KEY, + maxLength: LENGTH_GENERATOR_KEY, + fieldNumber: 1, + }, + }, +}; + +export const updateAuthoritySchema = { + $id: '/poa/command/updateAuthority', + type: 'object', + required: ['newValidators', 'threshold', 'validatorsUpdateNonce', 'signature', 'aggregationBits'], + properties: { + newValidators: { + type: 'array', + fieldNumber: 1, + items: validator, + }, + threshold: { + dataType: 'uint64', + fieldNumber: 2, + }, + validatorsUpdateNonce: { + dataType: 'uint32', + fieldNumber: 3, + }, + signature: { + dataType: 'bytes', + fieldNumber: 4, + }, + aggregationBits: { + dataType: 'bytes', + fieldNumber: 5, + }, + }, +}; + +export const validatorSignatureMessageSchema = { + $id: '/poa/command/validatorSignatureMessage', + type: 'object', + required: ['newValidators', 'threshold', 'validatorsUpdateNonce'], + properties: { + newValidators: { + type: 'array', + fieldNumber: 1, + items: validator, + }, + threshold: { + dataType: 'uint64', + fieldNumber: 2, + }, + validatorsUpdateNonce: { + dataType: 'uint32', + fieldNumber: 3, + }, + }, +}; + +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0047.md#genesis-poa-store-schema +export const genesisPoAStoreSchema = { + $id: '/poa/genesis/genesisPoAStoreSchema', + type: 'object', + required: ['validators', 'snapshotSubstore'], + properties: { + validators: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + required: ['address', 'name', 'blsKey', 'proofOfPossession', 'generatorKey'], + properties: { + address: { + dataType: 'bytes', + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + fieldNumber: 1, + }, + name: { + dataType: 'string', + minLength: 1, + maxLength: MAX_LENGTH_NAME, + fieldNumber: 2, + }, + blsKey: { + dataType: 'bytes', + minLength: LENGTH_BLS_KEY, + maxLength: LENGTH_BLS_KEY, + fieldNumber: 3, + }, + proofOfPossession: { + dataType: 'bytes', + minLength: LENGTH_PROOF_OF_POSSESSION, + maxLength: LENGTH_PROOF_OF_POSSESSION, + fieldNumber: 4, + }, + generatorKey: { + dataType: 'bytes', + minLength: LENGTH_GENERATOR_KEY, + maxLength: LENGTH_GENERATOR_KEY, + fieldNumber: 5, + }, + }, + }, + }, + snapshotSubstore: { + type: 'object', + fieldNumber: 2, + properties: { + activeValidators: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + required: ['address', 'weight'], + properties: { + address: { + dataType: 'bytes', + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + fieldNumber: 1, + }, + weight: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, + }, + minItems: 1, + maxItems: MAX_NUM_VALIDATORS, + }, + threshold: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, + required: ['activeValidators', 'threshold'], + }, + }, +}; + +const validatorJSONSchema = { + type: 'object', + required: ['address', 'name', 'weight'], + properties: { + address: { + type: 'string', + format: 'lisk32', + }, + name: { + type: 'string', + }, + weight: { + type: 'string', + format: 'uint64', + }, + }, +}; + +export const getValidatorRequestSchema = { + $id: 'modules/poa/endpoint/getValidatorRequest', + type: 'object', + required: ['address'], + properties: { + address: { + dataType: 'string', + format: 'lisk32', + }, + }, +}; + +export const getValidatorResponseSchema = { + $id: 'modules/poa/endpoint/getValidatorResponse', + ...validatorJSONSchema, +}; + +export const getAllValidatorsResponseSchema = { + $id: 'modules/poa/endpoint/getAllValidatorsResponse', + type: 'object', + required: ['validators'], + properties: { + validators: { + type: 'array', + items: validatorJSONSchema, + }, + }, +}; + +export const getRegistrationFeeResponseSchema = { + $id: 'modules/poa/endpoint/getRegistrationFeeResponse', + type: 'object', + required: ['fee'], + properties: { + fee: { + type: 'string', + }, + }, +}; diff --git a/framework/src/modules/poa/stores/chain_properties.ts b/framework/src/modules/poa/stores/chain_properties.ts new file mode 100644 index 00000000000..f1c95327184 --- /dev/null +++ b/framework/src/modules/poa/stores/chain_properties.ts @@ -0,0 +1,39 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from '../../base_store'; + +export interface ChainProperties { + roundEndHeight: number; + validatorsUpdateNonce: number; +} + +export const chainPropertiesSchema = { + $id: '/poa/chainProperties', + type: 'object', + required: ['roundEndHeight', 'validatorsUpdateNonce'], + properties: { + roundEndHeight: { + dataType: 'uint32', + fieldNumber: 1, + }, + validatorsUpdateNonce: { + dataType: 'uint32', + fieldNumber: 2, + }, + }, +}; + +export class ChainPropertiesStore extends BaseStore { + public schema = chainPropertiesSchema; +} diff --git a/framework/src/modules/poa/stores/index.ts b/framework/src/modules/poa/stores/index.ts new file mode 100644 index 00000000000..5ea18ba0cc0 --- /dev/null +++ b/framework/src/modules/poa/stores/index.ts @@ -0,0 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export * from './chain_properties'; +export * from './name'; +export * from './snapshot'; +export * from './validator'; diff --git a/framework/src/modules/poa/stores/name.ts b/framework/src/modules/poa/stores/name.ts new file mode 100644 index 00000000000..0419c6cc490 --- /dev/null +++ b/framework/src/modules/poa/stores/name.ts @@ -0,0 +1,37 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from '../../base_store'; +import { NUM_BYTES_ADDRESS } from '../constants'; + +export interface ValidatorAddress { + address: Buffer; +} + +export const validatorAddressSchema = { + $id: '/poa/validatorAddress', + type: 'object', + required: ['address'], + properties: { + address: { + dataType: 'bytes', + fieldNumber: 1, + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + }, + }, +}; + +export class NameStore extends BaseStore { + public schema = validatorAddressSchema; +} diff --git a/framework/src/modules/poa/stores/snapshot.ts b/framework/src/modules/poa/stores/snapshot.ts new file mode 100644 index 00000000000..d4fe04d58b3 --- /dev/null +++ b/framework/src/modules/poa/stores/snapshot.ts @@ -0,0 +1,61 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from '../../base_store'; +import { NUM_BYTES_ADDRESS } from '../constants'; +import { ActiveValidator } from '../types'; + +export interface Validator { + address: Buffer; + weight: bigint; +} +export interface SnapshotObject { + validators: ActiveValidator[]; + threshold: bigint; +} + +export const snapshotSchema = { + $id: '/poa/snapshot', + type: 'object', + required: ['validators', 'threshold'], + properties: { + validators: { + type: 'array', + fieldNumber: 1, + items: { + type: 'object', + required: ['address', 'weight'], + properties: { + address: { + dataType: 'bytes', + minLength: NUM_BYTES_ADDRESS, + maxLength: NUM_BYTES_ADDRESS, + fieldNumber: 1, + }, + weight: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, + }, + }, + threshold: { + dataType: 'uint64', + fieldNumber: 2, + }, + }, +}; + +export class SnapshotStore extends BaseStore { + public schema = snapshotSchema; +} diff --git a/framework/src/modules/poa/stores/validator.ts b/framework/src/modules/poa/stores/validator.ts new file mode 100644 index 00000000000..d176dd9843d --- /dev/null +++ b/framework/src/modules/poa/stores/validator.ts @@ -0,0 +1,37 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from '../../base_store'; +import { MAX_LENGTH_NAME } from '../constants'; + +export interface ValidatorName { + name: string; +} + +export const validatorNameSchema = { + $id: '/poa/validatorName', + type: 'object', + required: ['name'], + properties: { + name: { + dataType: 'string', + fieldNumber: 1, + minLength: 1, + maxLength: MAX_LENGTH_NAME, + }, + }, +}; + +export class ValidatorStore extends BaseStore { + public schema = validatorNameSchema; +} diff --git a/framework/src/modules/poa/types.ts b/framework/src/modules/poa/types.ts new file mode 100644 index 00000000000..134ca1c4567 --- /dev/null +++ b/framework/src/modules/poa/types.ts @@ -0,0 +1,132 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { + ImmutableMethodContext, + MethodContext, + NextValidatorsSetter, +} from '../../state_machine/types'; +import { JSONObject } from '../../types'; + +export interface ModuleConfig { + authorityRegistrationFee: bigint; +} + +export type ModuleConfigJSON = JSONObject; +export interface RegisterAuthorityParams { + name: string; + blsKey: Buffer; + generatorKey: Buffer; + proofOfPossession: Buffer; +} + +export interface UpdateAuthorityParams { + newValidators: { + address: Buffer; + weight: bigint; + }[]; + threshold: bigint; + validatorsUpdateNonce: number; + signature: Buffer; + aggregationBits: Buffer; +} + +export interface ValidatorWeightWithRoundHash { + readonly address: Buffer; + weight: bigint; + roundHash: Buffer; +} + +export interface ValidatorsMethod { + setValidatorGeneratorKey( + methodContext: MethodContext, + validatorAddress: Buffer, + generatorKey: Buffer, + ): Promise; + registerValidatorKeys( + methodContext: MethodContext, + validatorAddress: Buffer, + blsKey: Buffer, + generatorKey: Buffer, + proofOfPossession: Buffer, + ): Promise; + registerValidatorWithoutBLSKey( + methodContext: MethodContext, + validatorAddress: Buffer, + generatorKey: Buffer, + ): Promise; + getValidatorKeys(methodContext: ImmutableMethodContext, address: Buffer): Promise; + getGeneratorsBetweenTimestamps( + methodContext: ImmutableMethodContext, + startTimestamp: number, + endTimestamp: number, + ): Promise>; + setValidatorsParams( + methodContext: MethodContext, + validatorSetter: NextValidatorsSetter, + preCommitThreshold: bigint, + certificateThreshold: bigint, + validators: { address: Buffer; bftWeight: bigint }[], + ): Promise; +} + +export interface RandomMethod { + getRandomBytes( + methodContext: ImmutableMethodContext, + height: number, + numberOfSeeds: number, + ): Promise; +} + +export interface ValidatorKeys { + generatorKey: Buffer; + blsKey: Buffer; +} + +export interface FeeMethod { + payFee(methodContext: MethodContext, amount: bigint): void; +} + +interface PoAValidator { + address: Buffer; + name: string; + blsKey: Buffer; + proofOfPossession: Buffer; + generatorKey: Buffer; +} + +export interface ActiveValidator { + address: Buffer; + weight: bigint; +} + +export interface SnapshotSubstore { + activeValidators: ActiveValidator[]; + threshold: bigint; +} + +export interface GenesisPoAStore { + validators: PoAValidator[]; + snapshotSubstore: SnapshotSubstore; +} + +export interface UpdateGeneratorKeyParams { + generatorKey: Buffer; +} + +export interface Validator { + address: string; + name: string; + weight: string; +} diff --git a/framework/src/modules/poa/utils.ts b/framework/src/modules/poa/utils.ts new file mode 100644 index 00000000000..dc7fdfd8b01 --- /dev/null +++ b/framework/src/modules/poa/utils.ts @@ -0,0 +1,43 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; +import { ValidatorWeightWithRoundHash } from './types'; +import { Validator } from './stores'; + +// Same as pos/utils/shuffleValidatorList +export const shuffleValidatorList = ( + roundSeed: Buffer, + validators: Validator[], +): ValidatorWeightWithRoundHash[] => { + const validatorsWithRoundHash: ValidatorWeightWithRoundHash[] = []; + for (const validator of validators) { + const seedSource = Buffer.concat([roundSeed, validator.address]); + validatorsWithRoundHash.push({ + ...validator, + roundHash: utils.hash(seedSource), + }); + } + + validatorsWithRoundHash.sort((validator1, validator2) => { + const diff = validator1.roundHash.compare(validator2.roundHash); + if (diff !== 0) { + return diff; + } + + return validator1.address.compare(validator2.address); + }); + + return validatorsWithRoundHash; +}; diff --git a/framework/test/unit/modules/poa/commands/register_authority.spec.ts b/framework/test/unit/modules/poa/commands/register_authority.spec.ts new file mode 100644 index 00000000000..244c27961d8 --- /dev/null +++ b/framework/test/unit/modules/poa/commands/register_authority.spec.ts @@ -0,0 +1,210 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address, utils } from '@liskhq/lisk-cryptography'; +import { TransactionAttrs } from '@liskhq/lisk-chain'; +import { codec } from '@liskhq/lisk-codec'; +import * as testing from '../../../../../src/testing'; +import { + CommandExecuteContext, + CommandVerifyContext, + Transaction, + VerifyStatus, + PoAModule, +} from '../../../../../src'; +import { RegisterAuthorityCommand } from '../../../../../src/modules/poa/commands/register_authority'; +import { + COMMAND_REGISTER_AUTHORITY, + AUTHORITY_REGISTRATION_FEE, + LENGTH_BLS_KEY, + LENGTH_PROOF_OF_POSSESSION, + LENGTH_GENERATOR_KEY, + MODULE_NAME_POA, + POA_VALIDATOR_NAME_REGEX, +} from '../../../../../src/modules/poa/constants'; + +import { registerAuthoritySchema } from '../../../../../src/modules/poa/schemas'; +import { RegisterAuthorityParams, ValidatorsMethod } from '../../../../../src/modules/poa/types'; + +import { createStoreGetter } from '../../../../../src/testing/utils'; +import { NameStore, ValidatorStore } from '../../../../../src/modules/poa/stores'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import { ED25519_PUBLIC_KEY_LENGTH } from '../../../../../src/modules/validators/constants'; + +describe('RegisterAuthority', () => { + const poaModule = new PoAModule(); + let registerAuthorityCommand: RegisterAuthorityCommand; + let mockValidatorsMethod: ValidatorsMethod; + let mockFeeMethod: any; + let stateStore: PrefixedStateReadWriter; + let validatorStore: ValidatorStore; + let nameStore: NameStore; + + const registerAuthorityTransactionParams = { + name: 'max', + blsKey: utils.getRandomBytes(LENGTH_BLS_KEY), + proofOfPossession: utils.getRandomBytes(LENGTH_PROOF_OF_POSSESSION), + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY), + }; + + const publicKey = utils.getRandomBytes(ED25519_PUBLIC_KEY_LENGTH); + const chainID = Buffer.from([0, 0, 0, 1]); + + const buildTransaction = (transaction: Partial): Transaction => { + return new Transaction({ + module: transaction.module ?? MODULE_NAME_POA, + command: transaction.command ?? COMMAND_REGISTER_AUTHORITY, + senderPublicKey: transaction.senderPublicKey ?? publicKey, + nonce: transaction.nonce ?? BigInt(0), + fee: transaction.fee ?? AUTHORITY_REGISTRATION_FEE, + params: + transaction.params ?? + codec.encode(registerAuthoritySchema, registerAuthorityTransactionParams), + signatures: transaction.signatures ?? [publicKey], + }); + }; + + beforeEach(async () => { + registerAuthorityCommand = new RegisterAuthorityCommand(poaModule.stores, poaModule.events); + mockValidatorsMethod = { + setValidatorGeneratorKey: jest.fn(), + registerValidatorKeys: jest.fn(), + registerValidatorWithoutBLSKey: jest.fn(), + getValidatorKeys: jest.fn(), + getGeneratorsBetweenTimestamps: jest.fn(), + setValidatorsParams: jest.fn(), + }; + mockFeeMethod = { + payFee: jest.fn(), + }; + (poaModule as any)['_registerAuthorityCommand'] = registerAuthorityCommand; + await poaModule.init({ + genesisConfig: {} as any, + moduleConfig: { authorityRegistrationFee: AUTHORITY_REGISTRATION_FEE.toString() }, + }); + registerAuthorityCommand.addDependencies(mockValidatorsMethod, mockFeeMethod); + + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + validatorStore = poaModule.stores.get(ValidatorStore); + nameStore = poaModule.stores.get(NameStore); + }); + + describe('verify', () => { + let context: CommandVerifyContext; + beforeEach(() => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandVerifyContext(registerAuthoritySchema); + }); + + it('should return error when name does not comply regex', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: codec.encode(registerAuthoritySchema, { + ...registerAuthorityTransactionParams, + name: '###', + }), + }), + chainID, + }) + .createCommandVerifyContext(registerAuthoritySchema); + + await expect(registerAuthorityCommand.verify(context)).rejects.toThrow( + `Name does not comply with format ${POA_VALIDATOR_NAME_REGEX.toString()}.`, + ); + }); + + it('should return error when name already exist', async () => { + await nameStore.set( + createStoreGetter(stateStore), + Buffer.from(registerAuthorityTransactionParams.name), + { + address: address.getAddressFromPublicKey(context.transaction.senderPublicKey), + }, + ); + + await expect(registerAuthorityCommand.verify(context)).rejects.toThrow( + 'Name already exists.', + ); + }); + + it('should return error when senderAddress already exist', async () => { + await validatorStore.set( + createStoreGetter(stateStore), + address.getAddressFromPublicKey(publicKey), + { + name: registerAuthorityTransactionParams.name, + }, + ); + + await expect(registerAuthorityCommand.verify(context)).rejects.toThrow( + 'Validator already exists.', + ); + }); + + it('should return OK when transaction is valid', async () => { + const result = await registerAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + }); + + describe('execute', () => { + let context: CommandExecuteContext; + beforeEach(() => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandExecuteContext(registerAuthoritySchema); + }); + + it('should call registerValidatorKeys', async () => { + await registerAuthorityCommand.execute(context); + + expect(mockFeeMethod.payFee).toHaveBeenCalledWith( + expect.anything(), + AUTHORITY_REGISTRATION_FEE, + ); + await expect( + validatorStore.has( + createStoreGetter(stateStore), + address.getAddressFromPublicKey(publicKey), + ), + ).resolves.toBe(true); + await expect( + nameStore.has( + createStoreGetter(stateStore), + Buffer.from(registerAuthorityTransactionParams.name), + ), + ).resolves.toBe(true); + expect(mockValidatorsMethod.registerValidatorKeys).toHaveBeenCalledWith( + expect.anything(), + address.getAddressFromPublicKey(publicKey), + context.params.blsKey, + context.params.generatorKey, + context.params.proofOfPossession, + ); + }); + }); +}); diff --git a/framework/test/unit/modules/poa/commands/update_authority.spec.ts b/framework/test/unit/modules/poa/commands/update_authority.spec.ts new file mode 100644 index 00000000000..eff3fd528ff --- /dev/null +++ b/framework/test/unit/modules/poa/commands/update_authority.spec.ts @@ -0,0 +1,420 @@ +import { bls, utils } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; +import { TransactionAttrs } from '@liskhq/lisk-chain'; +import { MAX_UINT64 } from '@liskhq/lisk-validator'; +import { + CommandExecuteContext, + CommandVerifyContext, + PoAModule, + Transaction, + VerifyStatus, +} from '../../../../../src'; +import { UpdateAuthorityCommand } from '../../../../../src/modules/poa/commands/update_authority'; +import { UpdateAuthorityParams, ValidatorsMethod } from '../../../../../src/modules/poa/types'; +import { + AUTHORITY_REGISTRATION_FEE, + COMMAND_UPDATE_AUTHORITY, + EMPTY_BYTES, + KEY_SNAPSHOT_0, + KEY_SNAPSHOT_2, + MAX_NUM_VALIDATORS, + MODULE_NAME_POA, + UpdateAuthorityResult, +} from '../../../../../src/modules/poa/constants'; +import { updateAuthoritySchema } from '../../../../../src/modules/poa/schemas'; +import * as testing from '../../../../../src/testing'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { + ChainPropertiesStore, + SnapshotStore, + ValidatorStore, +} from '../../../../../src/modules/poa/stores'; +import { createStoreGetter } from '../../../../../src/testing/utils'; +import { AuthorityUpdateEvent } from '../../../../../src/modules/poa/events/authority_update'; +import { EventQueue } from '../../../../../src/state_machine'; +import { ED25519_PUBLIC_KEY_LENGTH } from '../../../../../src/modules/validators/constants'; + +describe('UpdateAuthority', () => { + const poaModule = new PoAModule(); + let updateAuthorityCommand: UpdateAuthorityCommand; + let mockValidatorsMethod: ValidatorsMethod; + let stateStore: PrefixedStateReadWriter; + let validatorStore: ValidatorStore; + let chainPropertiesStore: ChainPropertiesStore; + let snapshotStore: SnapshotStore; + + const address0 = Buffer.from('0000000000000000000000000000000000000000', 'hex'); + const address1 = Buffer.from('0000000000000000000000000000000000000001', 'hex'); + const address2 = Buffer.from('0000000000000000000000000000000000000002', 'hex'); + + const updateAuthorityValidatorParams: UpdateAuthorityParams = { + newValidators: [ + { + address: address0, + weight: BigInt(40), + }, + { + address: address1, + weight: BigInt(40), + }, + ], + threshold: BigInt(68), + validatorsUpdateNonce: 0, + signature: utils.getRandomBytes(64), + aggregationBits: Buffer.from([0]), + }; + + const buildUpdateAuthorityValidatorParams = (params: Partial): Buffer => + codec.encode(updateAuthoritySchema, { + newValidators: params.newValidators ?? updateAuthorityValidatorParams.newValidators, + threshold: params.threshold ?? updateAuthorityValidatorParams.threshold, + validatorsUpdateNonce: + params.validatorsUpdateNonce ?? updateAuthorityValidatorParams.validatorsUpdateNonce, + signature: params.signature ?? updateAuthorityValidatorParams.signature, + aggregationBits: params.aggregationBits ?? updateAuthorityValidatorParams.aggregationBits, + }); + + const publicKey = utils.getRandomBytes(ED25519_PUBLIC_KEY_LENGTH); + const chainID = Buffer.from([0, 0, 0, 1]); + + const buildTransaction = (transaction: Partial): Transaction => { + return new Transaction({ + module: transaction.module ?? MODULE_NAME_POA, + command: transaction.command ?? COMMAND_UPDATE_AUTHORITY, + senderPublicKey: transaction.senderPublicKey ?? publicKey, + nonce: transaction.nonce ?? BigInt(0), + fee: transaction.fee ?? AUTHORITY_REGISTRATION_FEE, + params: + transaction.params ?? codec.encode(updateAuthoritySchema, updateAuthorityValidatorParams), + signatures: transaction.signatures ?? [publicKey], + }); + }; + + beforeEach(async () => { + updateAuthorityCommand = new UpdateAuthorityCommand(poaModule.stores, poaModule.events); + mockValidatorsMethod = { + setValidatorGeneratorKey: jest.fn(), + registerValidatorKeys: jest.fn(), + registerValidatorWithoutBLSKey: jest.fn(), + getValidatorKeys: jest.fn(), + getGeneratorsBetweenTimestamps: jest.fn(), + setValidatorsParams: jest.fn(), + }; + updateAuthorityCommand.addDependencies(mockValidatorsMethod); + + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + validatorStore = poaModule.stores.get(ValidatorStore); + chainPropertiesStore = poaModule.stores.get(ChainPropertiesStore); + snapshotStore = poaModule.stores.get(SnapshotStore); + + await validatorStore.set(createStoreGetter(stateStore), address0, { + name: 'validator0', + }); + await validatorStore.set(createStoreGetter(stateStore), address1, { + name: 'validator1', + }); + await chainPropertiesStore.set(createStoreGetter(stateStore), EMPTY_BYTES, { + roundEndHeight: 0, + validatorsUpdateNonce: 0, + }); + }); + + describe('verify', () => { + let context: CommandVerifyContext; + beforeEach(() => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + }); + + it('should throw error when length of newValidators is less than 1', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: [], + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive).`, + ); + }); + + it('should throw error when length of newValidators is greater than MAX_NUM_VALIDATORS', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: Array.from(Array(MAX_NUM_VALIDATORS + 1).keys()).map(_ => ({ + address: utils.getRandomBytes(20), + weight: BigInt(1), + })), + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive)`, + ); + }); + + it('should throw error when newValidators are not lexicographically ordered', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: [ + { + address: address1, + weight: BigInt(1), + }, + { + address: address0, + weight: BigInt(1), + }, + ], + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + 'Addresses in newValidators are not lexicographically ordered.', + ); + }); + + it('should throw error when addresses are in newValidators are not unique', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: [ + { + address: address0, + weight: BigInt(1), + }, + { + address: address1, + weight: BigInt(1), + }, + { + address: address1, + weight: BigInt(1), + }, + ], + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + 'Addresses in newValidators are not unique.', + ); + }); + + it('should throw error when validator is not in ValidatorStore', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: [ + ...updateAuthorityValidatorParams.newValidators, + { + address: address2, + weight: BigInt(2), + }, + ], + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `No validator found for given address ${address2.toString('hex')}.`, + ); + }); + + it('should throw error when totalWeight is greater than MAX_UINT64', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + newValidators: [ + { + address: address0, + weight: BigInt(MAX_UINT64), + }, + { + address: address1, + weight: BigInt(1), + }, + ], + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `Validators total weight exceeds ${MAX_UINT64}`, + ); + }); + + it('should throw error when trsParams.threshold is less than (totalWeight / 3) + 1 ', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + threshold: BigInt(20), + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + const totalWeight = updateAuthorityValidatorParams.newValidators.reduce( + (acc, validator) => acc + validator.weight, + BigInt(0), + ); + const minThreshold = totalWeight / BigInt(3) + BigInt(1); + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `Threshold must be between ${minThreshold} and ${totalWeight} (inclusive).`, + ); + }); + + it('should throw error when trsParams.threshold is greater than totalWeight', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + threshold: BigInt(81), + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + const totalWeight = updateAuthorityValidatorParams.newValidators.reduce( + (acc, validator) => acc + validator.weight, + BigInt(0), + ); + const minThreshold = totalWeight / BigInt(3) + BigInt(1); + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `Threshold must be between ${minThreshold} and ${totalWeight}`, + ); + }); + + it('should throw error when trsParams.validatorsUpdateNonce does not equal to chainProperties.validatorsUpdateNonce', async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({ + params: buildUpdateAuthorityValidatorParams({ + validatorsUpdateNonce: 1, + }), + }), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + + const chainProperties = await chainPropertiesStore.get(context, EMPTY_BYTES); + await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + `validatorsUpdateNonce must be equal to ${chainProperties.validatorsUpdateNonce}.`, + ); + }); + + it('should return OK when transaction is valid', async () => { + const result = await updateAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + }); + + describe('execute', () => { + let context: CommandExecuteContext; + + const checkEventResult = ( + eventQueue: EventQueue, + BaseEvent: any, + expectedResult: UpdateAuthorityResult, + length = 1, + index = 0, + ) => { + expect(eventQueue.getEvents()).toHaveLength(length); + expect(eventQueue.getEvents()[index].toObject().name).toEqual(new BaseEvent('token').name); + expect( + codec.decode>( + new BaseEvent('token').schema, + eventQueue.getEvents()[index].toObject().data, + ).result, + ).toEqual(expectedResult); + }; + beforeEach(async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandExecuteContext(updateAuthoritySchema); + + await snapshotStore.set(createStoreGetter(stateStore), KEY_SNAPSHOT_0, { + validators: [], + threshold: BigInt(0), + }); + }); + + it('should emit event and throw error when verifyWeightedAggSig failed', async () => { + jest.spyOn(bls, 'verifyWeightedAggSig').mockReturnValue(false); + + await expect(updateAuthorityCommand.execute(context)).rejects.toThrow( + 'Invalid weighted aggregated signature.', + ); + + checkEventResult( + context.eventQueue, + AuthorityUpdateEvent, + UpdateAuthorityResult.FAIL_INVALID_SIGNATURE, + ); + }); + + it('should increase stores (snapshotStore2 & chainProperties) and emit event when verifyWeightedAggSig is true', async () => { + jest.spyOn(bls, 'verifyWeightedAggSig').mockReturnValue(true); + + await updateAuthorityCommand.execute(context); + + expect(await snapshotStore.get(context, KEY_SNAPSHOT_2)).toStrictEqual({ + validators: updateAuthorityValidatorParams.newValidators, + threshold: updateAuthorityValidatorParams.threshold, + }); + expect(await chainPropertiesStore.get(context, EMPTY_BYTES)).toStrictEqual({ + roundEndHeight: 0, + validatorsUpdateNonce: 1, + }); + + checkEventResult(context.eventQueue, AuthorityUpdateEvent, UpdateAuthorityResult.SUCCESS); + }); + }); +}); diff --git a/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts b/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts new file mode 100644 index 00000000000..d68c775eb2f --- /dev/null +++ b/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts @@ -0,0 +1,130 @@ +import { codec } from '@liskhq/lisk-codec'; +import { TransactionAttrs } from '@liskhq/lisk-chain'; +import { utils, address } from '@liskhq/lisk-cryptography'; + +import { + PoAModule, + Transaction, + CommandVerifyContext, + CommandExecuteContext, + VerifyStatus, +} from '../../../../../src'; +import { UpdateGeneratorKeyParams, ValidatorsMethod } from '../../../../../src/modules/poa/types'; +import { ValidatorStore } from '../../../../../src/modules/poa/stores'; +import { + AUTHORITY_REGISTRATION_FEE, + COMMAND_UPDATE_KEY, + LENGTH_GENERATOR_KEY, + MODULE_NAME_POA, +} from '../../../../../src/modules/poa/constants'; +import { updateGeneratorKeySchema } from '../../../../../src/modules/poa/schemas'; +import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; +import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; +import * as testing from '../../../../../src/testing'; +import { UpdateGeneratorKeyCommand } from '../../../../../src/modules/poa/commands/update_generator_key'; +import { createStoreGetter } from '../../../../../src/testing/utils'; + +describe('UpdateGeneratorKey', () => { + const poaModule = new PoAModule(); + let updateGeneratorKeyCommand: UpdateGeneratorKeyCommand; + let stateStore: PrefixedStateReadWriter; + let mockValidatorsMethod: ValidatorsMethod; + let validatorStore: ValidatorStore; + + const publicKey = utils.getRandomBytes(32); + const chainID = Buffer.from([0, 0, 0, 1]); + + const updateGeneratorKeyParams: UpdateGeneratorKeyParams = { + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY), + }; + + const buildTransaction = (transaction: Partial): Transaction => { + return new Transaction({ + module: transaction.module ?? MODULE_NAME_POA, + command: transaction.command ?? COMMAND_UPDATE_KEY, + senderPublicKey: transaction.senderPublicKey ?? publicKey, + nonce: transaction.nonce ?? BigInt(0), + fee: transaction.fee ?? AUTHORITY_REGISTRATION_FEE, + params: + transaction.params ?? codec.encode(updateGeneratorKeySchema, updateGeneratorKeyParams), + signatures: transaction.signatures ?? [publicKey], + }); + }; + + beforeEach(async () => { + updateGeneratorKeyCommand = new UpdateGeneratorKeyCommand(poaModule.stores, poaModule.events); + mockValidatorsMethod = { + setValidatorGeneratorKey: jest.fn(), + registerValidatorKeys: jest.fn(), + registerValidatorWithoutBLSKey: jest.fn(), + getValidatorKeys: jest.fn(), + getGeneratorsBetweenTimestamps: jest.fn(), + setValidatorsParams: jest.fn(), + }; + updateGeneratorKeyCommand.addDependencies(mockValidatorsMethod); + + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + validatorStore = poaModule.stores.get(ValidatorStore); + + await validatorStore.set( + createStoreGetter(stateStore), + address.getAddressFromPublicKey(publicKey), + { + name: 'validator', + }, + ); + }); + + describe('verify', () => { + let context: CommandVerifyContext; + beforeEach(() => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandVerifyContext(updateGeneratorKeySchema); + }); + + it('should return error when validator not exist', async () => { + await validatorStore.del( + createStoreGetter(stateStore), + address.getAddressFromPublicKey(publicKey), + ); + + await expect(updateGeneratorKeyCommand.verify(context)).rejects.toThrow( + 'Validator does not exist.', + ); + }); + + it('should return OK when transaction is valid', async () => { + const result = await updateGeneratorKeyCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.OK); + }); + }); + + describe('execute', () => { + let context: CommandExecuteContext; + beforeEach(async () => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandExecuteContext(updateGeneratorKeySchema); + }); + + it('should call setValidatorGeneratorKey', async () => { + await updateGeneratorKeyCommand.execute(context); + + expect(mockValidatorsMethod.setValidatorGeneratorKey).toHaveBeenCalledWith( + expect.anything(), + address.getAddressFromPublicKey(publicKey), + context.params.generatorKey, + ); + }); + }); +}); diff --git a/framework/test/unit/modules/poa/endpoint.spec.ts b/framework/test/unit/modules/poa/endpoint.spec.ts new file mode 100644 index 00000000000..484e586bc02 --- /dev/null +++ b/framework/test/unit/modules/poa/endpoint.spec.ts @@ -0,0 +1,198 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address as cryptoAddress, utils } from '@liskhq/lisk-cryptography'; +import { PoAModule } from '../../../../src'; +import { PoAEndpoint } from '../../../../src/modules/poa/endpoint'; +import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; +import { SnapshotStore, ValidatorStore } from '../../../../src/modules/poa/stores'; +import { + InMemoryPrefixedStateDB, + createTransientModuleEndpointContext, +} from '../../../../src/testing'; +import { createStoreGetter } from '../../../../src/testing/utils'; +import { AUTHORITY_REGISTRATION_FEE, KEY_SNAPSHOT_0 } from '../../../../src/modules/poa/constants'; + +describe('PoAModuleEndpoint', () => { + const poa = new PoAModule(); + + let poaEndpoint: PoAEndpoint; + let stateStore: PrefixedStateReadWriter; + let validatorStore: ValidatorStore; + let snapshotStore: SnapshotStore; + + const address1 = utils.getRandomBytes(20); + const address2 = utils.getRandomBytes(20); + const address3 = utils.getRandomBytes(20); + + const validatorData = { + name: 'validator1', + address: cryptoAddress.getLisk32AddressFromAddress(address1), + weight: BigInt(1), + }; + + const snapshot = { + threshold: BigInt(2), + validators: [ + { + address: address1, + weight: BigInt(1), + }, + { + address: address2, + weight: BigInt(2), + }, + ], + }; + + beforeEach(() => { + poaEndpoint = new PoAEndpoint(poa.stores, poa.offchainStores); + poaEndpoint.init(AUTHORITY_REGISTRATION_FEE); + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + validatorStore = poa.stores.get(ValidatorStore); + snapshotStore = poa.stores.get(SnapshotStore); + }); + + describe('getValidator', () => { + beforeEach(async () => { + await validatorStore.set(createStoreGetter(stateStore), address1, { + name: validatorData.name, + }); + await snapshotStore.set(createStoreGetter(stateStore), KEY_SNAPSHOT_0, snapshot); + }); + + it('should return correct validator data corresponding to the input address', async () => { + const validatorDataReturned = await poaEndpoint.getValidator( + createTransientModuleEndpointContext({ + stateStore, + params: { + address: cryptoAddress.getLisk32AddressFromAddress(address1), + }, + }), + ); + + const validatorDataJSON = { + ...validatorData, + weight: validatorData.weight.toString(), + }; + + expect(validatorDataReturned).toStrictEqual(validatorDataJSON); + }); + + it('should return valid JSON output', async () => { + const validatorDataReturned = await poaEndpoint.getValidator( + createTransientModuleEndpointContext({ + stateStore, + params: { + address: cryptoAddress.getLisk32AddressFromAddress(address1), + }, + }), + ); + + expect(validatorDataReturned.weight).toBeString(); + }); + + it('should throw error if input address for validator not found', async () => { + await expect( + poaEndpoint.getValidator( + createTransientModuleEndpointContext({ + stateStore, + params: { address: cryptoAddress.getLisk32AddressFromAddress(address3) }, + }), + ), + ).rejects.toThrow( + `Validator not found in snapshot for address ${cryptoAddress.getLisk32AddressFromAddress( + address3, + )}`, + ); + }); + }); + + describe('getAllValidators', () => { + const address1Str = cryptoAddress.getLisk32AddressFromAddress(address1); + const address2Str = cryptoAddress.getLisk32AddressFromAddress(address2); + + const addresses = [address1Str, address2Str]; + + it('should return correct data for all validators', async () => { + await validatorStore.set(createStoreGetter(stateStore), address1, { + name: validatorData.name, + }); + await validatorStore.set(createStoreGetter(stateStore), address2, { name: 'validator2' }); + await snapshotStore.set(createStoreGetter(stateStore), KEY_SNAPSHOT_0, snapshot); + + const { validators } = await poaEndpoint.getAllValidators( + createTransientModuleEndpointContext({ stateStore }), + ); + + expect(addresses).toContain(validators[0].address); + expect(addresses).toContain(validators[1].address); + }); + + it('should return valid JSON output', async () => { + await validatorStore.set(createStoreGetter(stateStore), address1, { + name: validatorData.name, + }); + await validatorStore.set(createStoreGetter(stateStore), address2, { name: 'validator2' }); + await snapshotStore.set(createStoreGetter(stateStore), KEY_SNAPSHOT_0, snapshot); + + const { validators } = await poaEndpoint.getAllValidators( + createTransientModuleEndpointContext({ stateStore }), + ); + + // Here we are checking against name sorted values from endpoint + expect(validators[0].weight).toBe(snapshot.validators[0].weight.toString()); + expect(validators[1].weight).toBe(snapshot.validators[1].weight.toString()); + }); + + it('should return json with empty weight for non active validator', async () => { + await validatorStore.set(createStoreGetter(stateStore), address1, { name: 'validator1' }); + await validatorStore.set(createStoreGetter(stateStore), address2, { name: 'validator2' }); + const currentSnapshot = { + threshold: BigInt(2), + validators: [ + { + address: address1, + weight: BigInt(1), + }, + ], + }; + await snapshotStore.set(createStoreGetter(stateStore), KEY_SNAPSHOT_0, currentSnapshot); + + const { validators } = await poaEndpoint.getAllValidators( + createTransientModuleEndpointContext({ stateStore }), + ); + + // Checking against name-sorted values + expect(validators[0].weight).toBe(currentSnapshot.validators[0].weight.toString()); + expect(validators[1].weight).toBe(''); + }); + }); + + describe('getRegistrationFee', () => { + it('should return the default registration fee', () => { + const response = poaEndpoint.getRegistrationFee(); + + expect(response).toEqual({ fee: AUTHORITY_REGISTRATION_FEE.toString() }); + }); + + it('should return the configured registration fee', () => { + const authorityRegistrationFee = BigInt(200000); + poaEndpoint.init(authorityRegistrationFee); + const response = poaEndpoint.getRegistrationFee(); + + expect(response).toEqual({ fee: authorityRegistrationFee.toString() }); + }); + }); +}); diff --git a/framework/test/unit/modules/poa/genesis_block_test_data.ts b/framework/test/unit/modules/poa/genesis_block_test_data.ts new file mode 100644 index 00000000000..0cbcff4bd7b --- /dev/null +++ b/framework/test/unit/modules/poa/genesis_block_test_data.ts @@ -0,0 +1,197 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { bls, address as cryptoAddress, legacy, utils } from '@liskhq/lisk-cryptography'; +import { Mnemonic } from '@liskhq/lisk-passphrase'; + +export const validators = new Array(103).fill(0).map((_, i) => { + const passphrase = Mnemonic.generateMnemonic(); + const keys = legacy.getPrivateAndPublicKeyFromPassphrase(passphrase); + const address = cryptoAddress.getAddressFromPublicKey(keys.publicKey); + const blsPrivateKey = bls.generatePrivateKey(Buffer.from(passphrase, 'utf-8')); + const blsPublicKey = bls.getPublicKeyFromPrivateKey(blsPrivateKey); + const blsPoP = bls.popProve(blsPrivateKey); + return { + address, + name: `genesis_${i}`, + blsKey: blsPublicKey, + proofOfPossession: blsPoP, + generatorKey: keys.publicKey, + }; +}); +validators.sort((a, b) => a.address.compare(b.address)); + +const activeValidators = validators + .slice(0, validators.length - 2) + .map(v => ({ address: v.address, weight: BigInt(1) })); +const threshold = BigInt(35); + +export const validAsset = { + validators, + snapshotSubstore: { + activeValidators, + threshold, + }, +}; + +export const invalidAssets: any[] = [ + [ + 'Invalid validator name length', + { + validators: [ + { + ...validators[0], + name: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', + }, + ...validators.slice(1, validators.length), + ], + snapshotSubstore: { + activeValidators, + threshold, + }, + }, + ], + [ + 'Invalid validator name character', + { + validators: [ + { + ...validators[0], + name: '@@@__++', + }, + ...validators.slice(1, validators.length), + ], + snapshotSubstore: { + activeValidators, + threshold, + }, + }, + '`name` property is invalid. Must contain only characters a-z0-9!@$&_.', + ], + [ + 'Not unique validator name', + { + validators: [ + { + ...validators[0], + name: validators[1].name, + }, + ...validators.slice(1, validators.length), + ], + snapshotSubstore: { + activeValidators, + threshold, + }, + }, + '`name` property of all entries in the validators must be distinct.', + ], + [ + 'Not unique validator address', + { + validators: [ + { + ...validators[0], + address: validators[1].address, + }, + ...validators.slice(1, validators.length), + ], + snapshotSubstore: { + activeValidators, + threshold, + }, + }, + '`address` property of all entries in validators must be distinct.', + ], + [ + 'validator address is not ordered', + { + validators: validators.slice(0).sort((a, b) => b.address.compare(a.address)), + snapshotSubstore: { + activeValidators, + threshold, + }, + }, + '`validators` must be ordered lexicographically by address.', + ], + [ + 'active validator address is not unique', + { + validators, + snapshotSubstore: { + activeValidators: [ + { + ...activeValidators[0], + address: activeValidators[1].address, + }, + ...activeValidators.slice(1, activeValidators.length), + ], + threshold, + }, + }, + '`address` properties in `activeValidators` must be distinct.', + ], + [ + 'active validator address is not ordered', + { + validators, + snapshotSubstore: { + activeValidators: activeValidators.slice(0).sort((a, b) => b.address.compare(a.address)), + threshold, + }, + }, + '`activeValidators` must be ordered lexicographically by address property.', + ], + [ + 'active validator address is missing from validators array', + { + validators, + snapshotSubstore: { + activeValidators: [ + { ...activeValidators[0], address: utils.getRandomBytes(20) }, + ...activeValidators.slice(1, activeValidators.length), + ].sort((a, b) => a.address.compare(b.address)), + threshold, + }, + }, + '`activeValidator` address is missing from validators array.', + ], + [ + 'active validator weight must be positive integer', + { + validators, + snapshotSubstore: { + activeValidators: [ + { ...activeValidators[0], weight: BigInt(0) }, + ...activeValidators.slice(1, activeValidators.length), + ], + threshold, + }, + }, + '`activeValidators` weight must be positive integer.', + ], + [ + 'active validators total weight must be within range', + { + validators, + snapshotSubstore: { + activeValidators: [ + { ...activeValidators[0], weight: BigInt(1000000000000000) }, + ...activeValidators.slice(1, activeValidators.length), + ], + threshold, + }, + }, + '`threshold` in snapshot substore is not within range.', + ], +]; diff --git a/framework/test/unit/modules/poa/module.spec.ts b/framework/test/unit/modules/poa/module.spec.ts new file mode 100644 index 00000000000..fd7ed1c6928 --- /dev/null +++ b/framework/test/unit/modules/poa/module.spec.ts @@ -0,0 +1,506 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { codec } from '@liskhq/lisk-codec'; +import { utils } from '@liskhq/lisk-cryptography'; +import { BlockAssets } from '@liskhq/lisk-chain'; +import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; +import { + InMemoryPrefixedStateDB, + createBlockContext, + createGenesisBlockContext, + createTransientMethodContext, +} from '../../../../src/testing'; +import { invalidAssets, validAsset } from './genesis_block_test_data'; +import { PoAModule } from '../../../../src/modules/poa/module'; +import { genesisPoAStoreSchema } from '../../../../src/modules/poa/schemas'; +import { + AUTHORITY_REGISTRATION_FEE, + EMPTY_BYTES, + KEY_SNAPSHOT_0, + KEY_SNAPSHOT_1, + KEY_SNAPSHOT_2, + LENGTH_BLS_KEY, + LENGTH_GENERATOR_KEY, +} from '../../../../src/modules/poa/constants'; +import { + FeeMethod, + ModuleConfigJSON, + RandomMethod, + ValidatorsMethod, +} from '../../../../src/modules/poa/types'; +import { createFakeBlockHeader } from '../../../fixtures'; +import { + BlockAfterExecuteContext, + GenesisBlockContext, + GenesisBlockExecuteContext, + MethodContext, +} from '../../../../src/state_machine'; +import { + ValidatorStore, + SnapshotStore, + NameStore, + ChainPropertiesStore, + SnapshotObject, + ChainProperties, +} from '../../../../src/modules/poa/stores'; +import { shuffleValidatorList } from '../../../../src/modules/poa/utils'; + +describe('PoA module', () => { + let poaModule: PoAModule; + let randomMethod: RandomMethod; + let validatorMethod: ValidatorsMethod; + let feeMethod: FeeMethod; + + beforeEach(() => { + poaModule = new PoAModule(); + randomMethod = { + getRandomBytes: jest.fn(), + }; + validatorMethod = { + setValidatorGeneratorKey: jest.fn(), + registerValidatorKeys: jest.fn().mockResolvedValue(true), + registerValidatorWithoutBLSKey: jest.fn().mockResolvedValue(true), + getValidatorKeys: jest.fn().mockResolvedValue({ + blsKey: utils.getRandomBytes(LENGTH_BLS_KEY), + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY), + }), + getGeneratorsBetweenTimestamps: jest.fn(), + setValidatorsParams: jest.fn(), + }; + feeMethod = { + payFee: jest.fn(), + }; + }); + describe('constructor', () => {}); + + describe('init', () => { + let genesisConfig: any; + let moduleConfigJSON: ModuleConfigJSON; + + beforeEach(() => { + genesisConfig = {}; + moduleConfigJSON = { + authorityRegistrationFee: AUTHORITY_REGISTRATION_FEE.toString(), + }; + }); + it('should assign authorityRegistrationFee from config if given', async () => { + jest.spyOn(poaModule['_registerAuthorityCommand'], 'init'); + jest.spyOn(poaModule.endpoint, 'init'); + await poaModule.init({ + genesisConfig, + moduleConfig: { + ...moduleConfigJSON, + authorityRegistrationFee: '20000', + }, + }); + + expect(poaModule['_moduleConfig'].authorityRegistrationFee).toEqual(BigInt('20000')); + expect(poaModule['_registerAuthorityCommand'].init).toHaveBeenCalledWith( + poaModule['_moduleConfig'], + ); + expect(poaModule.endpoint.init).toHaveBeenCalledWith( + poaModule['_moduleConfig'].authorityRegistrationFee, + ); + }); + + it('should assign default value for authorityRegistrationFee when not given in config', async () => { + jest.spyOn(poaModule['_registerAuthorityCommand'], 'init'); + jest.spyOn(poaModule.endpoint, 'init'); + await poaModule.init({ + genesisConfig, + moduleConfig: { ...moduleConfigJSON }, + }); + + expect(poaModule['_moduleConfig'].authorityRegistrationFee).toEqual( + AUTHORITY_REGISTRATION_FEE, + ); + expect(poaModule['_registerAuthorityCommand'].init).toHaveBeenCalledWith( + poaModule['_moduleConfig'], + ); + expect(poaModule.endpoint.init).toHaveBeenCalledWith( + poaModule['_moduleConfig'].authorityRegistrationFee, + ); + }); + }); + + describe('addDependencies', () => { + it('should add all the dependencies', () => { + jest.spyOn(poaModule['_registerAuthorityCommand'], 'addDependencies'); + jest.spyOn(poaModule['_updateAuthorityCommand'], 'addDependencies'); + jest.spyOn(poaModule['_updateGeneratorKeyCommand'], 'addDependencies'); + poaModule.addDependencies(validatorMethod, feeMethod, randomMethod); + + expect(poaModule['_validatorsMethod']).toBeDefined(); + expect(poaModule['_feeMethod']).toBeDefined(); + expect(poaModule['_randomMethod']).toBeDefined(); + + // Check command dependencies + expect(poaModule['_registerAuthorityCommand'].addDependencies).toHaveBeenCalledWith( + poaModule['_validatorsMethod'], + poaModule['_feeMethod'], + ); + expect(poaModule['_updateAuthorityCommand'].addDependencies).toHaveBeenCalledWith( + poaModule['_validatorsMethod'], + ); + expect(poaModule['_updateGeneratorKeyCommand'].addDependencies).toHaveBeenCalledWith( + poaModule['_validatorsMethod'], + ); + }); + }); + + describe('afterTransactionsExecute', () => { + const genesisData = { + height: 0, + initRounds: 3, + initValidators: [], + }; + const bootstrapRounds = genesisData.initRounds; + let stateStore: PrefixedStateReadWriter; + let context: BlockAfterExecuteContext; + let currentTimestamp: number; + let height: number; + let snapshot0: SnapshotObject; + let snapshot1: SnapshotObject; + let snapshot2: SnapshotObject; + let chainPropertiesStore: ChainPropertiesStore; + let snapshotStore: SnapshotStore; + let methodContext: MethodContext; + let randomSeed: Buffer; + let chainProperties: ChainProperties; + + beforeEach(async () => { + poaModule = new PoAModule(); + poaModule.addDependencies(validatorMethod, feeMethod, randomMethod); + height = 103 * (bootstrapRounds + 1); + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + currentTimestamp = Math.floor(Date.now() / 1000); + + context = createBlockContext({ + stateStore, + header: createFakeBlockHeader({ + height, + timestamp: currentTimestamp, + }), + }).getBlockAfterExecuteContext(); + methodContext = createTransientMethodContext({ stateStore }); + chainProperties = { + roundEndHeight: height - 1, + validatorsUpdateNonce: 4, + }; + chainPropertiesStore = poaModule.stores.get(ChainPropertiesStore); + await chainPropertiesStore.set(methodContext, EMPTY_BYTES, chainProperties); + snapshot0 = { + threshold: BigInt(4), + validators: [ + { + address: Buffer.from('4162070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4262070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4362070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4462070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4562070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + ], + }; + + snapshot1 = { + threshold: BigInt(4), + validators: [ + { + address: Buffer.from('4162070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4862070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4362070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4762070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4562070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + ], + }; + + snapshot2 = { + threshold: BigInt(4), + validators: [ + { + address: Buffer.from('4262070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4862070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4362070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4762070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + { + address: Buffer.from('4562070a641cf689f765d43ad792e1970e6bb863', 'binary'), + weight: BigInt(1), + }, + ], + }; + + snapshotStore = poaModule.stores.get(SnapshotStore); + await snapshotStore.set(methodContext, KEY_SNAPSHOT_0, snapshot0); + await snapshotStore.set(methodContext, KEY_SNAPSHOT_1, snapshot1); + await snapshotStore.set(methodContext, KEY_SNAPSHOT_2, snapshot2); + randomSeed = utils.getRandomBytes(20); + jest.spyOn(snapshotStore, 'set'); + jest.spyOn(randomMethod, 'getRandomBytes').mockResolvedValue(randomSeed); + jest.spyOn(validatorMethod, 'setValidatorsParams').mockResolvedValue(); + }); + it('should not do anything when context.header.height !== chainProperties.roundEndHeight', async () => { + await poaModule.afterTransactionsExecute(context); + expect(poaModule.stores.get(SnapshotStore).set).not.toHaveBeenCalled(); + expect(randomMethod.getRandomBytes).not.toHaveBeenCalled(); + expect(validatorMethod.setValidatorsParams).not.toHaveBeenCalled(); + }); + + it('should set snapshots and call validatorsMethod.setValidatorsParams when context.header.height === chainProperties.roundEndHeight', async () => { + chainProperties = { + ...chainProperties, + roundEndHeight: height, + }; + await chainPropertiesStore.set(methodContext, EMPTY_BYTES, chainProperties); + const roundStartHeight = height - snapshot0.validators.length + 1; + const validators = []; + for (const validator of snapshot1.validators) { + validators.push(validator); + } + const nextValidators = shuffleValidatorList(randomSeed, validators); + await poaModule.afterTransactionsExecute(context); + expect(poaModule.stores.get(SnapshotStore).set).toHaveBeenCalledWith( + context, + KEY_SNAPSHOT_0, + snapshot1, + ); + expect(poaModule.stores.get(SnapshotStore).set).toHaveBeenCalledWith( + context, + KEY_SNAPSHOT_1, + snapshot2, + ); + expect(randomMethod.getRandomBytes).toHaveBeenCalledWith( + context, + roundStartHeight, + snapshot0.validators.length, + ); + expect(validatorMethod.setValidatorsParams).toHaveBeenCalledWith( + context, + context, + snapshot1.threshold, + snapshot1.threshold, + nextValidators.map(v => ({ + address: v.address, + bftWeight: v.weight, + })), + ); + await expect(chainPropertiesStore.get(context, EMPTY_BYTES)).resolves.toEqual({ + ...chainProperties, + roundEndHeight: chainProperties.roundEndHeight + snapshot1.validators.length, + }); + }); + }); + + describe('initGenesisState', () => { + let stateStore: PrefixedStateReadWriter; + let poa: PoAModule; + + beforeEach(() => { + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + poa = new PoAModule(); + poa.addDependencies(validatorMethod, feeMethod, randomMethod); + }); + + it('should not throw error if asset does not exist', async () => { + const context = createGenesisBlockContext({ + stateStore, + }).createInitGenesisStateContext(); + jest.spyOn(context, 'getStore'); + + await expect(poa.initGenesisState(context)).toResolve(); + expect(context.getStore).not.toHaveBeenCalled(); + }); + + describe.each(invalidAssets)('%p', (_, data, errString) => { + it('should throw error when asset is invalid', async () => { + const assetBytes = codec.encode(genesisPoAStoreSchema, data as object); + const context = createGenesisBlockContext({ + stateStore, + header: createFakeBlockHeader({ height: 12345 }), + assets: new BlockAssets([{ module: poa.name, data: assetBytes }]), + }).createInitGenesisStateContext(); + + await expect(poa.initGenesisState(context)).rejects.toThrow(errString as string); + }); + }); + + describe('when the genesis asset is valid', () => { + let genesisContext: GenesisBlockContext; + let context: GenesisBlockExecuteContext; + + beforeEach(() => { + const assetBytes = codec.encode(genesisPoAStoreSchema, validAsset); + genesisContext = createGenesisBlockContext({ + stateStore, + assets: new BlockAssets([{ module: poa.name, data: assetBytes }]), + }); + context = genesisContext.createInitGenesisStateContext(); + }); + + it('should store all the validators', async () => { + await expect(poa.initGenesisState(context)).resolves.toBeUndefined(); + const nameStore = poa.stores.get(NameStore); + const allNames = await nameStore.iterate(context, { + gte: Buffer.from([0]), + lte: Buffer.from([255]), + }); + expect(allNames).toHaveLength(validAsset.validators.length); + const validatorStore = poa.stores.get(ValidatorStore); + const allValidators = await validatorStore.iterate(context, { + gte: Buffer.alloc(20, 0), + lte: Buffer.alloc(20, 255), + }); + expect(allValidators).toHaveLength(validAsset.validators.length); + }); + + it('should store snapshot current round', async () => { + await expect(poa.initGenesisState(context)).toResolve(); + const snapshotStore = poa.stores.get(SnapshotStore); + await expect(snapshotStore.get(context, KEY_SNAPSHOT_0)).resolves.toEqual({ + validators: validAsset.snapshotSubstore.activeValidators, + threshold: validAsset.snapshotSubstore.threshold, + }); + }); + + it('should store snapshot current round + 1', async () => { + await expect(poa.initGenesisState(context)).toResolve(); + const snapshotStore = poa.stores.get(SnapshotStore); + await expect(snapshotStore.get(context, KEY_SNAPSHOT_1)).resolves.toEqual({ + validators: validAsset.snapshotSubstore.activeValidators, + threshold: validAsset.snapshotSubstore.threshold, + }); + }); + + it('should store snapshot current round + 2', async () => { + await expect(poa.initGenesisState(context)).toResolve(); + const snapshotStore = poa.stores.get(SnapshotStore); + await expect(snapshotStore.get(context, KEY_SNAPSHOT_2)).resolves.toEqual({ + validators: validAsset.snapshotSubstore.activeValidators, + threshold: validAsset.snapshotSubstore.threshold, + }); + }); + + it('should store chain properties', async () => { + await expect(poa.initGenesisState(context)).toResolve(); + const chainPropertiesStore = poa.stores.get(ChainPropertiesStore); + await expect(chainPropertiesStore.get(context, EMPTY_BYTES)).resolves.toEqual({ + roundEndHeight: context.header.height, + validatorsUpdateNonce: 0, + }); + }); + }); + }); + + describe('finalizeGenesisState', () => { + let genesisContext: GenesisBlockContext; + let context: GenesisBlockExecuteContext; + let snapshotStore: SnapshotStore; + let chainPropertiesStore: ChainPropertiesStore; + let stateStore: PrefixedStateReadWriter; + let poa: PoAModule; + + beforeEach(async () => { + poa = new PoAModule(); + poa.addDependencies(validatorMethod, feeMethod, randomMethod); + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + const assetBytes = codec.encode(genesisPoAStoreSchema, validAsset); + genesisContext = createGenesisBlockContext({ + stateStore, + assets: new BlockAssets([{ module: poa.name, data: assetBytes }]), + }); + context = genesisContext.createInitGenesisStateContext(); + snapshotStore = poa.stores.get(SnapshotStore); + await snapshotStore.set(context, KEY_SNAPSHOT_0, { + ...validAsset.snapshotSubstore, + validators: validAsset.snapshotSubstore.activeValidators, + }); + chainPropertiesStore = poa.stores.get(ChainPropertiesStore); + await chainPropertiesStore.set(context, EMPTY_BYTES, { + roundEndHeight: context.header.height, + validatorsUpdateNonce: 0, + }); + }); + + it('should store updated chain properties', async () => { + await expect(poa.finalizeGenesisState(context)).toResolve(); + poa.stores.get(ChainPropertiesStore); + await expect(chainPropertiesStore.get(context, EMPTY_BYTES)).resolves.toEqual({ + roundEndHeight: context.header.height + validAsset.snapshotSubstore.activeValidators.length, + validatorsUpdateNonce: 0, + }); + }); + + it('should register all active validators as BFT validators', async () => { + await expect(poa.finalizeGenesisState(context)).toResolve(); + expect(poa['_validatorsMethod'].setValidatorsParams).toHaveBeenCalledWith( + expect.any(Object), + expect.any(Object), + BigInt(validAsset.snapshotSubstore.threshold), + BigInt(validAsset.snapshotSubstore.threshold), + validAsset.snapshotSubstore.activeValidators.map(d => ({ + address: d.address, + bftWeight: d.weight, + })), + ); + }); + + it('should fail if registerValidatorKeys return false', async () => { + (poa['_validatorsMethod'].registerValidatorKeys as jest.Mock).mockRejectedValue( + new Error('Invalid validator key found in poa genesis asset validators.'), + ); + + await expect(poa.finalizeGenesisState(context)).rejects.toThrow( + 'Invalid validator key found in poa genesis asset validators.', + ); + }); + }); +}); diff --git a/framework/test/unit/modules/poa/utils.spec.ts b/framework/test/unit/modules/poa/utils.spec.ts new file mode 100644 index 00000000000..64ba1cd35ad --- /dev/null +++ b/framework/test/unit/modules/poa/utils.spec.ts @@ -0,0 +1,50 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; +import { shuffleValidatorList } from '../../../../src/modules/poa/utils'; +import * as validatorShufflingScenario from '../../../fixtures/pos_validator_shuffling/uniformly_shuffled_validator_list.json'; + +// Same as pos/utils/shuffleValidatorList +describe('utils', () => { + describe('shuffleValidatorList', () => { + const { previousRoundSeed1 } = validatorShufflingScenario.testCases.input; + const validatorsList = [...validatorShufflingScenario.testCases.input.validatorList].map( + address => ({ + address: Buffer.from(address, 'hex'), + weight: BigInt(1), + }), + ); + it('should return a list of uniformly shuffled list of validators', () => { + const shuffledValidatorList = shuffleValidatorList( + Buffer.from(previousRoundSeed1, 'hex'), + validatorsList, + ); + const lisk32Addresses = validatorsList.map(a => + cryptoAddress.getLisk32AddressFromAddress(a.address), + ); + + expect(shuffledValidatorList).toHaveLength(validatorsList.length); + shuffledValidatorList.forEach(validator => + expect(lisk32Addresses).toContain( + cryptoAddress.getLisk32AddressFromAddress(validator.address), + ), + ); + + expect(shuffledValidatorList.map(b => b.address.toString('hex'))).toEqual( + validatorShufflingScenario.testCases.output.validatorList, + ); + }); + }); +}); From 1aa07a6c25e2acd1460c12b5425c4f93e2a3e02d Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Tue, 1 Aug 2023 13:26:07 +0200 Subject: [PATCH 101/170] Add missing param --- .../src/modules/nft/commands/transfer_cross_chain.ts | 1 + framework/src/modules/nft/internal_method.ts | 2 ++ framework/test/unit/modules/nft/internal_method.spec.ts | 9 +++++++++ 3 files changed, 12 insertions(+) diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index 2dcc3ea7575..533dd83948e 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -134,6 +134,7 @@ export class TransferCrossChainCommand extends BaseCommand { params.messageFee, params.data, params.includeAttributes, + context.header.timestamp, ); } } diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 95cd702c0de..bb27b9eac6a 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -116,6 +116,7 @@ export class InternalMethod extends BaseMethod { messageFee: bigint, data: string, includeAttributes: boolean, + timestamp?: number, ): Promise { const chainID = this._method.getChainID(nftID); const nftStore = this.stores.get(NFTStore); @@ -172,6 +173,7 @@ export class InternalMethod extends BaseMethod { attributesArray, data, }), + timestamp, ); } diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 72e158fa376..b9a843e008e 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -205,6 +205,7 @@ describe('InternalMethod', () => { let receivingChainID: Buffer; const messageFee = BigInt(1000); const data = ''; + const timestamp = Math.floor(Date.now() / 1000); beforeEach(() => { receivingChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); @@ -254,6 +255,7 @@ describe('InternalMethod', () => { messageFee, data, includeAttributes, + timestamp, ), ).resolves.toBeUndefined(); @@ -294,6 +296,7 @@ describe('InternalMethod', () => { receivingChainID, messageFee, ccmParameters, + timestamp, ); }); @@ -330,6 +333,7 @@ describe('InternalMethod', () => { messageFee, data, includeAttributes, + timestamp, ), ).resolves.toBeUndefined(); @@ -362,6 +366,7 @@ describe('InternalMethod', () => { receivingChainID, messageFee, ccmParameters, + timestamp, ); }); }); @@ -407,6 +412,7 @@ describe('InternalMethod', () => { messageFee, data, includeAttributes, + timestamp, ), ).resolves.toBeUndefined(); @@ -447,6 +453,7 @@ describe('InternalMethod', () => { receivingChainID, messageFee, ccmParameters, + timestamp, ); }); @@ -490,6 +497,7 @@ describe('InternalMethod', () => { messageFee, data, includeAttributes, + timestamp, ), ).resolves.toBeUndefined(); @@ -522,6 +530,7 @@ describe('InternalMethod', () => { receivingChainID, messageFee, ccmParameters, + timestamp, ); }); }); From 505e3826d9617b47ce2dc1aaf4f6301694ee12d1 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 2 Aug 2023 10:13:07 +0200 Subject: [PATCH 102/170] Add backup.height to getNodeInfo (#8796) :seedling: Add backup.height to getNodeInfo --- framework/src/node/node.ts | 3 +++ framework/test/functional/actions/application.spec.ts | 3 +++ 2 files changed, 6 insertions(+) diff --git a/framework/src/node/node.ts b/framework/src/node/node.ts index cd9e4d7ed14..b63c2cb2082 100644 --- a/framework/src/node/node.ts +++ b/framework/src/node/node.ts @@ -513,6 +513,9 @@ export class Node { ...this._options.genesisConfig, }, registeredModules: this.getRegisteredModules(), + backup: { + height: this._options.backup.height, + }, network: { port: this._options.network.port, hostIp: this._options.network.hostIp, diff --git a/framework/test/functional/actions/application.spec.ts b/framework/test/functional/actions/application.spec.ts index a412759e4eb..f2632100538 100644 --- a/framework/test/functional/actions/application.spec.ts +++ b/framework/test/functional/actions/application.spec.ts @@ -43,6 +43,9 @@ describe('Application related actions', () => { networkIdentifier: expect.any(String), networkVersion: expect.any(String), lastBlockID: expect.any(String), + backup: { + height: expect.any(Number), + }, finalizedHeight: expect.any(Number), unconfirmedTransactions: expect.any(Number), }), From 35abcde60a17d40540a3ea074d204fa6ef0fc014 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 2 Aug 2023 11:58:49 +0200 Subject: [PATCH 103/170] Update node version (#8792) * :seedling: Update node version * :bug: Fix api-client integration test --- .github/actions/prepare-install/action.yml | 4 +- .nvmrc | 2 +- commander/package.json | 4 +- .../templates/init/package-template.json | 4 +- .../templates/init_plugin/package.json | 4 +- elements/lisk-api-client/package.json | 4 +- .../test/integration/ws_channel.spec.ts | 15 ++++--- elements/lisk-bft/package.json | 4 +- elements/lisk-chain/package.json | 4 +- elements/lisk-client/package.json | 4 +- elements/lisk-codec/package.json | 4 +- elements/lisk-cryptography/package.json | 4 +- elements/lisk-elements/package.json | 4 +- elements/lisk-genesis/package.json | 4 +- elements/lisk-p2p/package.json | 4 +- elements/lisk-passphrase/package.json | 4 +- elements/lisk-transaction-pool/package.json | 4 +- elements/lisk-transactions/package.json | 4 +- elements/lisk-tree/package.json | 4 +- elements/lisk-utils/package.json | 4 +- elements/lisk-validator/package.json | 4 +- .../package.json | 6 +-- .../lisk-framework-faucet-plugin/package.json | 6 +-- .../lisk-framework-forger-plugin/package.json | 4 +- .../package.json | 4 +- .../package.json | 4 +- .../package.json | 4 +- framework/package.json | 4 +- package.json | 4 +- protocol-specs/package.json | 2 +- sdk/package.json | 2 +- templates/package.json.tmpl | 4 +- yarn.lock | 43 +++++++++++-------- 33 files changed, 93 insertions(+), 87 deletions(-) diff --git a/.github/actions/prepare-install/action.yml b/.github/actions/prepare-install/action.yml index 6011880895d..7e4fe8a1c10 100644 --- a/.github/actions/prepare-install/action.yml +++ b/.github/actions/prepare-install/action.yml @@ -4,10 +4,10 @@ description: 'Prepares the repo for a job by checking out and installing depende runs: using: 'composite' steps: - - name: Use Node.js 16 + - name: Use Node.js 18 uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 18 cache: 'yarn' - name: Install dependencies shell: bash diff --git a/.nvmrc b/.nvmrc index d5d3b29a47a..b492b086355 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -16.14.1 +18.16 diff --git a/commander/package.json b/commander/package.json index 0011b2fd392..46b1d78ee0b 100644 --- a/commander/package.json +++ b/commander/package.json @@ -20,7 +20,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist/index.js", @@ -136,7 +136,7 @@ "@types/jest-when": "2.7.2", "@types/jquery": "3.3.33", "@types/listr": "0.14.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/progress": "2.0.3", "@types/semver": "7.1.0", "@types/strip-ansi": "5.2.1", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 6f35596bd8d..464968ecdc8 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -14,7 +14,7 @@ "homepage": "", "repository": {}, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist/index.js", @@ -115,7 +115,7 @@ "@oclif/dev-cli": "1.26.10", "@oclif/config": "1.18.3", "@types/fs-extra": "8.1.0", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/tar": "4.0.3", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 8edca7f9f97..d65b57692b5 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -11,7 +11,7 @@ "url": "" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -34,7 +34,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index d5831a0f7d4..e67842c0427 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -48,7 +48,7 @@ "@liskhq/lisk-chain": "^0.3.5-alpha.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-api-client/test/integration/ws_channel.spec.ts b/elements/lisk-api-client/test/integration/ws_channel.spec.ts index 6b26fe94fe5..e65b7d3651a 100644 --- a/elements/lisk-api-client/test/integration/ws_channel.spec.ts +++ b/elements/lisk-api-client/test/integration/ws_channel.spec.ts @@ -35,7 +35,7 @@ describe('WSChannel', () => { describe('connect', () => { it('should be connect to ws server', async () => { const server = new WebSocket.Server({ path: '/my-path', port: 65535 }); - const channel = new WSChannel('ws://localhost:65535/my-path'); + const channel = new WSChannel('ws://127.0.0.1:65535/my-path'); try { await expect(channel.connect()).resolves.toBeUndefined(); @@ -62,7 +62,7 @@ describe('WSChannel', () => { http.listen(65535); - const channel = new WSChannel('ws://localhost:65535/my-path'); + const channel = new WSChannel('ws://127.0.0.1:65535/my-path'); try { await expect(channel.connect()).rejects.toThrow('Could not connect in 2000ms'); @@ -75,7 +75,7 @@ describe('WSChannel', () => { }, 5000); it('should throw error if server is not running', async () => { - const channel = new WSChannel('ws://localhost:65534/my-path'); + const channel = new WSChannel('ws://127.0.0.1:65534/my-path'); await expect(channel.connect()).rejects.toThrow('connect ECONNREFUSED 127.0.0.1:65534'); }); @@ -84,19 +84,20 @@ describe('WSChannel', () => { describe('disconnect', () => { it('should close ws connection', async () => { const server = new WebSocket.Server({ path: '/my-path', port: 65535 }); - const channel = new WSChannel('ws://localhost:65535/my-path'); + const channel = new WSChannel('ws://127.0.0.1:65535/my-path'); await channel.connect(); try { await expect(channel.disconnect()).resolves.toBeUndefined(); // WebSocket.Server.channels are not cleaned immediately - expect(server.clients.size).toEqual(1); - expect([...server.clients][0].readyState).toEqual(WebSocket.CLOSING); + expect(server.clients.size).toBeLessThanOrEqual(1); + if (server.clients.size > 0) { + expect([...server.clients][0].readyState).toEqual(WebSocket.CLOSING); + } } finally { await closeServer(server); } - expect.assertions(3); }); }); }); diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index a0088415da8..340af7b6d18 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -41,7 +41,7 @@ "@liskhq/lisk-cryptography": "^3.2.1", "@liskhq/lisk-utils": "^0.2.1", "@liskhq/lisk-validator": "^0.6.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 227effae491..50bd87a0f94 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -50,7 +50,7 @@ "@types/faker": "4.1.10", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/randomstring": "1.1.6", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 9f3905bc0ef..8f1c889eab2 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -68,7 +68,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "browserify": "16.5.0", diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 75786cbc55e..95aca50256a 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -42,7 +42,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 96e1c1a6fd1..27628e36c5e 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -48,7 +48,7 @@ "@types/ed2curve": "0.2.2", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 87b9716a8fd..2927a4d8d34 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -53,7 +53,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 4bdbbffeb0d..16b7f0d6cd7 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -46,7 +46,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 276598b4e34..3e5d8d7fa97 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -56,7 +56,7 @@ "@types/jest-when": "2.7.2", "@types/jsonwebtoken": "8.3.8", "@types/lodash.shuffle": "4.2.6", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/semver": "7.1.0", "@types/socketcluster-client": "13.0.0", "@types/socketcluster-server": "14.2.2", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 37ce36609c5..e8040cab414 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -42,7 +42,7 @@ "@types/bip39": "3.0.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index c73bfbef64f..444e112f5d9 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -45,7 +45,7 @@ "@types/debug": "4.1.7", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 736611d38ba..1d1915fe248 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -43,7 +43,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 53ab8c4c6bb..91390b0c95e 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -42,7 +42,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "benchmark": "2.1.4", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index a1069f2915f..b3ef4d938b0 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -42,7 +42,7 @@ "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/lodash.clonedeep": "4.5.6", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 99bbb0467b8..9db404892b4 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -47,7 +47,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/semver": "7.3.4", "@types/validator": "13.1.3", "@typescript-eslint/eslint-plugin": "4.19.0", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 3dff8831d1e..7ef0a096cd0 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -34,7 +34,7 @@ "prebuild": "rm -r dist-node/* || mkdir dist-node || true", "build": "npm run build:node && npm run build:web", "build:node": "tsc", - "build:web": "node scripts/build.js --jsx react", + "build:web": "NODE_OPTIONS='--openssl-legacy-provider' node scripts/build.js --jsx react", "start:web": "NODE_ENV=development node scripts/start.js", "build:check": "node -e \"require('./dist-node')\"", "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" @@ -62,7 +62,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/react": "^17.0.0", "@types/react-dom": "^17.0.0", "@types/react-router-dom": "5.1.7", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 07af2442043..b78da81c25f 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -35,7 +35,7 @@ "prebuild": "rm -r dist-node/* || mkdir dist-node || true", "build": "npm run build:node && npm run build:web", "build:node": "tsc", - "build:web": "node scripts/build.js --jsx react", + "build:web": "NODE_OPTIONS='--openssl-legacy-provider' node scripts/build.js --jsx react", "start:web": "NODE_ENV=development node scripts/start.js", "build:check": "node -e \"require('./dist-node')\"", "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" @@ -65,7 +65,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@types/react": "^17.0.0", "@types/react-dom": "^17.0.0", "@types/react-router-dom": "5.1.7", diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index bf80faf9bab..8029f51114d 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -64,7 +64,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "axios": "1.3.2", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 47ca6cc03ae..626bf8e24ee 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -55,7 +55,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "axios": "1.3.2", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 9b538a37b49..991e2cd0ac5 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -55,7 +55,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index e46d79d6a48..5a16c01a6dc 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -57,7 +57,7 @@ "@types/ip": "1.1.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "axios": "1.3.2", diff --git a/framework/package.json b/framework/package.json index c270f137272..d2c72b84a37 100644 --- a/framework/package.json +++ b/framework/package.json @@ -20,7 +20,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -68,7 +68,7 @@ "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "16.11.26", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "copyfiles": "2.2.0", diff --git a/package.json b/package.json index 64ab8835298..d64b7790cd0 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "workspaces": { @@ -58,7 +58,7 @@ } }, "devDependencies": { - "@types/node": "12.20.6", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index f650e47ca95..f80c04899dc 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -14,7 +14,7 @@ "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "dependencies": { diff --git a/sdk/package.json b/sdk/package.json index 5a261fbdf82..3fcb496cdea 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", diff --git a/templates/package.json.tmpl b/templates/package.json.tmpl index c5ee8fe837e..24aea10a93a 100644 --- a/templates/package.json.tmpl +++ b/templates/package.json.tmpl @@ -17,7 +17,7 @@ "url": "https://github.com/LiskHQ/lisk-sdk/issues" }, "engines": { - "node": ">=16.14.1 <=16", + "node": ">=18.12.0 <=18", "npm": ">=8.1.0" }, "main": "dist-node/index.js", @@ -40,7 +40,7 @@ "devDependencies": { "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", - "@types/node": "12.20.6", + "@types/node": "18.15.3", "@typescript-eslint/eslint-plugin": "4.19.0", "@typescript-eslint/parser": "4.19.0", "eslint": "7.22.0", diff --git a/yarn.lock b/yarn.lock index 7bd678628de..1ccd44c92de 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4038,15 +4038,10 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-11.11.6.tgz#df929d1bb2eee5afdda598a41930fe50b43eaa6a" integrity sha512-Exw4yUWMBXM3X+8oqzJNRqZSwUAaS4+7NdvHqQuFi/d+synz++xmX3QIf+BFqneW8N31R8Ky+sikfZUXq07ggQ== -"@types/node@12.20.6": - version "12.20.6" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.6.tgz#7b73cce37352936e628c5ba40326193443cfba25" - integrity sha512-sRVq8d+ApGslmkE9e3i+D3gFGk7aZHAT+G4cIpIEdLJYPsWiSPwcAnJEjddLQQDqV3Ra2jOclX/Sv6YrvGYiWA== - -"@types/node@16.11.26": - version "16.11.26" - resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.26.tgz#63d204d136c9916fb4dcd1b50f9740fe86884e47" - integrity sha512-GZ7bu5A6+4DtG7q9GsoHXy3ALcgeIHP4NnL0Vv2wu0uUB/yQex26v0tf6/na1mm0+bS9Uw+0DFex7aaKr2qawQ== +"@types/node@18.15.3": + version "18.15.3" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.3.tgz#f0b991c32cfc6a4e7f3399d6cb4b8cf9a0315014" + integrity sha512-p6ua9zBxz5otCmbpb5D3U4B5Nanw6Pk3PPyX05xnxbB/fRv71N7CPmORg7uAD5P70T0xmx1pzAx/FUfa5X+3cw== "@types/node@^13.7.0": version "13.13.9" @@ -13375,10 +13370,10 @@ nan@^2.14.0: resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg== -nanoid@^3.1.20: - version "3.1.20" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.20.tgz#badc263c6b1dcf14b71efaa85f6ab4c1d6cfc788" - integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw== +nanoid@^3.3.6: + version "3.3.6" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" + integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== nanomatch@^1.2.9: version "1.2.13" @@ -14596,6 +14591,11 @@ performance-now@^2.1.0: resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + picomatch@^2.0.4, picomatch@^2.0.5: version "2.2.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.1.tgz#21bac888b6ed8601f831ce7816e335bc779f0a4a" @@ -15391,13 +15391,13 @@ postcss@^7, postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.14, postcss@^7.0.17, po supports-color "^6.1.0" postcss@^8.1.0: - version "8.2.6" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.6.tgz#5d69a974543b45f87e464bc4c3e392a97d6be9fe" - integrity sha512-xpB8qYxgPuly166AGlpRjUdEYtmOWx2iCwGmrv4vqZL9YPVviDVPZPRXxnXr6xPZOdxQ9lp3ZBFCRgWJ7LE3Sg== + version "8.4.27" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.27.tgz#234d7e4b72e34ba5a92c29636734349e0d9c3057" + integrity sha512-gY/ACJtJPSmUFPDCHtX78+01fHa64FaU4zaaWfuh1MhGJISufJAH4cun6k/8fwsHYeK4UQmENQK+tRLCFJE8JQ== dependencies: - colorette "^1.2.1" - nanoid "^3.1.20" - source-map "^0.6.1" + nanoid "^3.3.6" + picocolors "^1.0.0" + source-map-js "^1.0.2" preferred-pm@^3.0.3: version "3.0.3" @@ -17293,6 +17293,11 @@ source-list-map@^2.0.0: resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + source-map-resolve@^0.5.0, source-map-resolve@^0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" From fcc7b7b29389931630e860673c64822a3e615311 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 10 Aug 2023 17:10:09 +0100 Subject: [PATCH 104/170] Receiving chain incorrectly checks for the owner of nft when sending chain is not own chain (#8834) * Update verify hook * Add new test case * Remove redundant check --- .../modules/nft/cc_commands/cc_transfer.ts | 11 +++---- .../nft/cc_comands/cc_transfer.spec.ts | 32 ++++++++++++++++--- 2 files changed, 33 insertions(+), 10 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index ea61435c300..f20c72359b6 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -70,13 +70,12 @@ export class CrossChainTransferCommand extends BaseCCCommand { const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(getMethodContext(), nftID); - if (nftChainID.equals(ownChainID) && !nftExists) { - throw new Error('Non-existent entry in the NFT substore'); - } - const owner = await this._method.getNFTOwner(getMethodContext(), nftID); - if (nftChainID.equals(ownChainID) && !owner.equals(sendingChainID)) { - throw new Error('NFT has not been properly escrowed'); + if (nftChainID.equals(ownChainID)) { + const owner = await this._method.getNFTOwner(getMethodContext(), nftID); + if (!owner.equals(sendingChainID)) { + throw new Error('NFT has not been properly escrowed'); + } } if (!nftChainID.equals(ownChainID) && nftExists) { diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 354f587b1b2..200df9e5523 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -280,7 +280,7 @@ describe('CrossChain Transfer Command', () => { eventQueue, getStore, logger: fakeLogger, - chainID, + chainID: newConfig.ownChainID, }; await expect(command.verify(context)).rejects.toThrow( @@ -291,9 +291,7 @@ describe('CrossChain Transfer Command', () => { it('should throw if nft chain id equals own chain id but no entry exists in nft substore for the nft id', async () => { await nftStore.del(methodContext, nftID); - await expect(command.verify(context)).rejects.toThrow( - 'Non-existent entry in the NFT substore', - ); + await expect(command.verify(context)).rejects.toThrow('NFT substore entry does not exist'); }); it('should throw if nft chain id equals own chain id but the owner of nft is different from the sending chain', async () => { @@ -306,6 +304,32 @@ describe('CrossChain Transfer Command', () => { await expect(command.verify(context)).rejects.toThrow('NFT has not been properly escrowed'); }); + it('should not throw if nft chain id is not equal to own chain id and no entry exists in nft substore for the nft id', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID: newConfig.ownChainID, + }; + await nftStore.del(methodContext, nftID); + + await expect(command.verify(context)).resolves.toBeUndefined(); + }); + it('throw if nft chain id is not equal to own chain id and entry already exists in nft substore for the nft id', async () => { const newConfig = { ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), From 1fd3fe1fc55536b4f260f4e78b82f9a81404b0d5 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 11 Aug 2023 08:19:15 +0100 Subject: [PATCH 105/170] Method isNFTSupported incorrectly checks if the nft already exists (#8835) * Update verify hook * Remove check --------- Co-authored-by: shuse2 --- .../src/modules/nft/cc_commands/cc_transfer.ts | 4 ++++ framework/src/modules/nft/method.ts | 6 ------ .../modules/nft/cc_comands/cc_transfer.spec.ts | 4 +++- framework/test/unit/modules/nft/method.spec.ts | 14 -------------- 4 files changed, 7 insertions(+), 21 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index f20c72359b6..c8f9000446a 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -72,6 +72,10 @@ export class CrossChainTransferCommand extends BaseCCCommand { const nftExists = await nftStore.has(getMethodContext(), nftID); if (nftChainID.equals(ownChainID)) { + if (!nftExists) { + throw new Error('Non-existent entry in the NFT substore'); + } + const owner = await this._method.getNFTOwner(getMethodContext(), nftID); if (!owner.equals(sendingChainID)) { throw new Error('NFT has not been properly escrowed'); diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 7571db37d66..b1be7aa54b6 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -200,12 +200,6 @@ export class NFTMethod extends BaseMethod { methodContext: ImmutableMethodContext, nftID: Buffer, ): Promise { - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); - } - const nftChainID = this.getChainID(nftID); if (nftChainID.equals(this._config.ownChainID)) { return true; diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 200df9e5523..c2fadce28a4 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -291,7 +291,9 @@ describe('CrossChain Transfer Command', () => { it('should throw if nft chain id equals own chain id but no entry exists in nft substore for the nft id', async () => { await nftStore.del(methodContext, nftID); - await expect(command.verify(context)).rejects.toThrow('NFT substore entry does not exist'); + await expect(command.verify(context)).rejects.toThrow( + 'Non-existent entry in the NFT substore', + ); }); it('should throw if nft chain id equals own chain id but the owner of nft is different from the sending chain', async () => { diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 628ca3902ae..d1c933dfbb2 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -380,20 +380,6 @@ describe('NFTMethod', () => { }); describe('isNFTSupported', () => { - beforeEach(async () => { - await nftStore.save(methodContext, nftID, { - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: [], - }); - }); - - it('should throw if entry does not exist in the nft substore for the nft id', async () => { - await nftStore.del(methodContext, nftID); - await expect(method.isNFTSupported(methodContext, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); - }); - it('should return true if nft chain id equals own chain id', async () => { const isSupported = await method.isNFTSupported(methodContext, existingNativeNFT.nftID); expect(isSupported).toBe(true); From 78ee063d614a282cb2625d52cf6eec487efbf49f Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Fri, 11 Aug 2023 08:20:22 +0100 Subject: [PATCH 106/170] Update example app (#8836) * Update verify hook * Remove check * Update example app --- .../config/default/genesis_assets.json | 89 ++++++++++++++++++ .../config/default/genesis_block.blob | Bin 5920 -> 5933 bytes .../config/default/genesis_assets.json | 89 ++++++++++++++++++ .../config/default/genesis_block.blob | Bin 5924 -> 5941 bytes .../pos-sidechain-example-one/src/app/app.ts | 2 +- 5 files changed, 179 insertions(+), 1 deletion(-) diff --git a/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json b/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json index e091a26a9ad..f4fd8965034 100644 --- a/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json +++ b/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json @@ -1043,6 +1043,95 @@ } } }, + { + "module": "nft", + "data": { + "nftSubstore": [], + "supportedNFTsSubstore": [ + { + "chainID": "", + "supportedCollectionIDArray": [] + } + ] + }, + "schema": { + "$id": "/nft/module/genesis", + "type": "object", + "required": ["nftSubstore", "supportedNFTsSubstore"], + "properties": { + "nftSubstore": { + "type": "array", + "fieldNumber": 1, + "items": { + "type": "object", + "required": ["nftID", "owner", "attributesArray"], + "properties": { + "nftID": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 16, + "maxLength": 16 + }, + "owner": { + "dataType": "bytes", + "fieldNumber": 2 + }, + "attributesArray": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["module", "attributes"], + "properties": { + "module": { + "dataType": "string", + "minLength": 1, + "maxLength": 32, + "fieldNumber": 1 + }, + "attributes": { + "dataType": "bytes", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "supportedNFTsSubstore": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["chainID", "supportedCollectionIDArray"], + "properties": { + "chainID": { + "dataType": "bytes", + "fieldNumber": 1 + }, + "supportedCollectionIDArray": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["collectionID"], + "properties": { + "collectionID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + } + } + } + } + } + } + } + } + } + }, { "module": "pos", "data": { diff --git a/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob b/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob index e44d19a3fd2c2a7dae2f4d7b813e790a927c0da7..df3834bf72f497aa81b194ea17f447b44e3f942c 100644 GIT binary patch delta 115 zcmV-(0F3{jF0C#P3j6^G01%nv&!z?#03slf5gw5(JRsdiEO~xRJMlh30)L$jGgkxl zya1l%ZW6ofF2cYVy>5|-Bp|W(X!lQBGmhYiK)9Vy*%Oz87iJ3^luC?qbI^NG{tc1M VTMG*c18!z?5(E+g3bQ5w`V|03EJ*+W delta 103 zcmV-t0GR)+E}$+C3j6^G01(#il%fV003slf5gw5(JRtL+-{@FOJ%6bLMGv#gE*CDX z8w^ez654C)H_6X8e5jF$Bp|zls{t07?%f^jUfx!e6`9$?kd>CM7+yl{L`~S2yFHQ3 JTeCF*_!T(tELQ*k diff --git a/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json b/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json index 57352439b5d..34048e76cc8 100644 --- a/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json +++ b/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json @@ -1048,6 +1048,95 @@ } } }, + { + "module": "nft", + "data": { + "nftSubstore": [], + "supportedNFTsSubstore": [ + { + "chainID": "04000000", + "supportedCollectionIDArray": [] + } + ] + }, + "schema": { + "$id": "/nft/module/genesis", + "type": "object", + "required": ["nftSubstore", "supportedNFTsSubstore"], + "properties": { + "nftSubstore": { + "type": "array", + "fieldNumber": 1, + "items": { + "type": "object", + "required": ["nftID", "owner", "attributesArray"], + "properties": { + "nftID": { + "dataType": "bytes", + "fieldNumber": 1, + "minLength": 16, + "maxLength": 16 + }, + "owner": { + "dataType": "bytes", + "fieldNumber": 2 + }, + "attributesArray": { + "type": "array", + "fieldNumber": 3, + "items": { + "type": "object", + "required": ["module", "attributes"], + "properties": { + "module": { + "dataType": "string", + "minLength": 1, + "maxLength": 32, + "fieldNumber": 1 + }, + "attributes": { + "dataType": "bytes", + "fieldNumber": 2 + } + } + } + } + } + } + }, + "supportedNFTsSubstore": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["chainID", "supportedCollectionIDArray"], + "properties": { + "chainID": { + "dataType": "bytes", + "fieldNumber": 1 + }, + "supportedCollectionIDArray": { + "type": "array", + "fieldNumber": 2, + "items": { + "type": "object", + "required": ["collectionID"], + "properties": { + "collectionID": { + "dataType": "bytes", + "minLength": 4, + "maxLength": 4, + "fieldNumber": 1 + } + } + } + } + } + } + } + } + } + }, { "module": "pos", "data": { diff --git a/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob b/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob index 17af1bc9ec41bd4167f8857a7d03ad47976709f7..3e59180b2d3277e3ed8abdff6367ac006b4b6b9a 100644 GIT binary patch delta 119 zcmV--0EqvjF10QX3j6^G01&I>&!z?#03slf5gw5(JRo?>eKotWi{M;&${E={{P!bJ zgU(y%*Olj9fE#TZMV66?Bp}6L*J1aD3jg6d8o;LVHdshcz9W>T(v8!U{5O=oGqjP- ZO%D$W18!z?5(p9o3IqfI0J9hY3>FKyF!lfd delta 103 zcmV-t0GR)^E~G9G3j6^G01%Y$l%fV003slf5gw5(JRp7uS0a)-`cV81xf$>i)qkH_ z#!O9ErXQVgQO}H+Rp^n4Bp`06SApz^H4k01SvkdH$;RU3Zp*2xtyo56FpD4a!(5Tg JO|vTj3Kl`rEF1s; diff --git a/examples/interop/pos-sidechain-example-one/src/app/app.ts b/examples/interop/pos-sidechain-example-one/src/app/app.ts index 3250d66e460..61d1e557edf 100644 --- a/examples/interop/pos-sidechain-example-one/src/app/app.ts +++ b/examples/interop/pos-sidechain-example-one/src/app/app.ts @@ -4,7 +4,7 @@ import { registerModules } from './modules'; import { registerPlugins } from './plugins'; export const getApplication = (config: PartialApplicationConfig): Application => { - const { app, method } = Application.defaultApplication(config, true); + const { app, method } = Application.defaultApplication(config, false); const nftModule = new NFTModule(); const testNftModule = new TestNftModule(); From 88a1de590f899e2ce3422c2de0af21e6931f8c66 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 11 Aug 2023 10:28:41 +0200 Subject: [PATCH 107/170] :arrow_up: Update version --- commander/package.json | 24 ++++++------ elements/lisk-api-client/package.json | 10 ++--- elements/lisk-bft/package.json | 12 +++--- elements/lisk-chain/package.json | 14 +++---- elements/lisk-client/package.json | 18 ++++----- elements/lisk-codec/package.json | 6 +-- elements/lisk-cryptography/package.json | 2 +- elements/lisk-elements/package.json | 28 +++++++------- elements/lisk-genesis/package.json | 12 +++--- elements/lisk-p2p/package.json | 8 ++-- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +-- elements/lisk-transactions/package.json | 8 ++-- elements/lisk-tree/package.json | 6 +-- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../package.json | 10 ++--- .../lisk-framework-faucet-plugin/package.json | 16 ++++---- .../lisk-framework-forger-plugin/package.json | 20 +++++----- .../package.json | 14 +++---- .../package.json | 14 +++---- .../package.json | 18 ++++----- framework/package.json | 28 +++++++------- protocol-specs/package.json | 8 ++-- sdk/package.json | 38 +++++++++---------- 25 files changed, 164 insertions(+), 164 deletions(-) diff --git a/commander/package.json b/commander/package.json index 46b1d78ee0b..e6b9e0d872c 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.11-alpha.0", + "version": "5.1.11-alpha.2", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,17 +91,17 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-client": "^5.2.3-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-client": "^5.2.3-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.2.5-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-genesis": "^0.2.5-alpha.2", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "@oclif/command": "1.8.16", "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.3-alpha.0", + "lisk-framework": "^0.9.3-alpha.2", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index e67842c0427..45f5a88b196 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.7-alpha.0", + "version": "5.1.7-alpha.2", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "18.15.3", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index 340af7b6d18..e5bd1c63790 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.5-alpha.0", + "version": "0.3.5-alpha.2", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "@types/node": "18.15.3", "debug": "4.3.4" }, diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 50bd87a0f94..32d8e4d1efe 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.5-alpha.0", + "version": "0.3.5-alpha.2", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-tree": "^0.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-tree": "^0.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 8f1c889eab2..6d48abdea86 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.3-alpha.0", + "version": "5.2.3-alpha.2", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,14 +55,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-passphrase": "^3.1.1", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-tree": "^0.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-tree": "^0.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 95aca50256a..072a74136a4 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.2.2", + "version": "0.2.3-alpha.1", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2" + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 27628e36c5e..983fb31f41f 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "3.2.1", + "version": "3.2.2-alpha.1", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 2927a4d8d34..8e2fcdafe1a 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.3-alpha.0", + "version": "5.2.3-alpha.2", "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,19 +36,19 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-bft": "^0.3.5-alpha.0", - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-genesis": "^0.2.5-alpha.0", - "@liskhq/lisk-p2p": "^0.7.4-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1", - "@liskhq/lisk-transaction-pool": "^0.5.3", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-tree": "^0.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2" + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-bft": "^0.3.5-alpha.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-genesis": "^0.2.5-alpha.2", + "@liskhq/lisk-p2p": "^0.7.4-alpha.2", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-tree": "^0.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 16b7f0d6cd7..34ef25a3a5a 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.5-alpha.0", + "version": "0.2.5-alpha.2", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "lodash.clonedeep": "4.5.0" }, "devDependencies": { diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 3e5d8d7fa97..3f318700d78 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.7.4-alpha.0", + "version": "0.7.4-alpha.2", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,9 +42,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "lodash.shuffle": "4.2.0", "semver": "7.3.5", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index e8040cab414..66c6e59c18f 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "3.1.1", + "version": "3.1.2-alpha.1", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 444e112f5d9..af6fcfb4792 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.5.3", + "version": "0.5.4-alpha.1", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,8 +37,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 1d1915fe248..24404fe6021 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "5.2.2", + "version": "5.2.3-alpha.1", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,9 +36,9 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-validator": "^0.6.2" + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 91390b0c95e..3b6bae4ccb8 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.2.2", + "version": "0.2.3-alpha.1", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1" + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index b3ef4d938b0..546ce79d423 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.2.1", + "version": "0.2.2-alpha.1", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 9db404892b4..7f5a9f30760 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.6.2", + "version": "0.6.3-alpha.1", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "ajv": "8.1.0", "ajv-formats": "2.0.2", "debug": "4.3.4", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 7ef0a096cd0..b26b44e5c99 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.8-alpha.0", + "version": "0.1.8-alpha.2", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.3-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1", + "@liskhq/lisk-client": "^5.2.3-alpha.2", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.3-alpha.0", + "lisk-framework": "^0.9.3-alpha.2", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index b78da81c25f..b77166680b4 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.8-alpha.0", + "version": "0.1.8-alpha.2", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-client": "^5.2.3-alpha.0", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-client": "^5.2.3-alpha.2", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "axios": "1.3.2", "express": "4.17.3", - "lisk-framework": "^0.9.3-alpha.0", + "lisk-framework": "^0.9.3-alpha.2", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 8029f51114d..f4b1d641f20 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.8-alpha.0", + "version": "0.2.8-alpha.2", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,13 +38,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "axios": "1.3.2", "cors": "2.8.5", "debug": "4.3.4", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.0" + "lisk-framework": "^0.9.3-alpha.2" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-genesis": "^0.2.5-alpha.0", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-genesis": "^0.2.5-alpha.2", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 626bf8e24ee..941f79df314 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.8-alpha.0", + "version": "0.2.8-alpha.2", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,18 +37,18 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.0" + "lisk-framework": "^0.9.3-alpha.2" }, "devDependencies": { - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-transactions": "^5.2.2", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", "@types/cors": "2.8.6", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 991e2cd0ac5..80526ee6478 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.8-alpha.0", + "version": "0.2.8-alpha.2", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,16 +37,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.0" + "lisk-framework": "^0.9.3-alpha.2" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 5a16c01a6dc..bbb89bbbf3d 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.8-alpha.0", + "version": "0.2.8-alpha.2", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.5-alpha.0", - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-bft": "^0.3.5-alpha.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.3-alpha.0" + "lisk-framework": "^0.9.3-alpha.2" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index d2c72b84a37..82ffa17ef2e 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.3-alpha.0", + "version": "0.9.3-alpha.2", "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,19 +40,19 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-bft": "^0.3.5-alpha.0", - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-bft": "^0.3.5-alpha.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.2.5-alpha.0", - "@liskhq/lisk-p2p": "^0.7.4-alpha.0", - "@liskhq/lisk-transaction-pool": "^0.5.3", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-tree": "^0.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", + "@liskhq/lisk-genesis": "^0.2.5-alpha.2", + "@liskhq/lisk-p2p": "^0.7.4-alpha.2", + "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-tree": "^0.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.5", @@ -64,7 +64,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.1", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index f80c04899dc..4e1a917f9af 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.2.2", - "@liskhq/lisk-cryptography": "3.2.1", - "@liskhq/lisk-passphrase": "3.1.1", - "@liskhq/lisk-validator": "0.6.2", + "@liskhq/lisk-codec": "0.2.3-alpha.0", + "@liskhq/lisk-cryptography": "3.2.2-alpha.0", + "@liskhq/lisk-passphrase": "3.1.2-alpha.0", + "@liskhq/lisk-validator": "0.6.3-alpha.0", "protobufjs": "6.9.0" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 3fcb496cdea..1d00cfdb4fd 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.3-alpha.0", + "version": "5.2.3-alpha.2", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.0", - "@liskhq/lisk-bft": "^0.3.5-alpha.0", - "@liskhq/lisk-chain": "^0.3.5-alpha.0", - "@liskhq/lisk-codec": "^0.2.2", - "@liskhq/lisk-cryptography": "^3.2.1", + "@liskhq/lisk-api-client": "^5.1.7-alpha.2", + "@liskhq/lisk-bft": "^0.3.5-alpha.2", + "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-codec": "^0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-framework-forger-plugin": "^0.2.8-alpha.0", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.8-alpha.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.8-alpha.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.8-alpha.0", - "@liskhq/lisk-genesis": "^0.2.5-alpha.0", - "@liskhq/lisk-p2p": "^0.7.4-alpha.0", - "@liskhq/lisk-passphrase": "^3.1.1", - "@liskhq/lisk-transaction-pool": "^0.5.3", - "@liskhq/lisk-transactions": "^5.2.2", - "@liskhq/lisk-tree": "^0.2.2", - "@liskhq/lisk-utils": "^0.2.1", - "@liskhq/lisk-validator": "^0.6.2", - "lisk-framework": "^0.9.3-alpha.0" + "@liskhq/lisk-framework-forger-plugin": "^0.2.8-alpha.2", + "@liskhq/lisk-framework-http-api-plugin": "^0.2.8-alpha.2", + "@liskhq/lisk-framework-monitor-plugin": "^0.2.8-alpha.2", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.8-alpha.2", + "@liskhq/lisk-genesis": "^0.2.5-alpha.2", + "@liskhq/lisk-p2p": "^0.7.4-alpha.2", + "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", + "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-tree": "^0.2.3-alpha.1", + "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "lisk-framework": "^0.9.3-alpha.2" }, "devDependencies": { "eslint": "7.22.0", From ffc5406cbdd0e82986e62af837fd29de500fd513 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Fri, 11 Aug 2023 11:02:35 +0200 Subject: [PATCH 108/170] fix --- protocol-specs/package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 4e1a917f9af..f1aa47055fe 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.2.3-alpha.0", - "@liskhq/lisk-cryptography": "3.2.2-alpha.0", - "@liskhq/lisk-passphrase": "3.1.2-alpha.0", - "@liskhq/lisk-validator": "0.6.3-alpha.0", + "@liskhq/lisk-codec": "0.2.3-alpha.1", + "@liskhq/lisk-cryptography": "3.2.2-alpha.1", + "@liskhq/lisk-passphrase": "3.1.2-alpha.1", + "@liskhq/lisk-validator": "0.6.3-alpha.1", "protobufjs": "6.9.0" }, "devDependencies": { From 1f1477174fa238aaf85db394b63d530787f364c1 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 24 Aug 2023 09:22:22 +0200 Subject: [PATCH 109/170] :nail_care: Fix merge conflict --- elements/lisk-chain/test/unit/transactions.spec.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/elements/lisk-chain/test/unit/transactions.spec.ts b/elements/lisk-chain/test/unit/transactions.spec.ts index 4c1bc833a4d..005dfb78974 100644 --- a/elements/lisk-chain/test/unit/transactions.spec.ts +++ b/elements/lisk-chain/test/unit/transactions.spec.ts @@ -13,7 +13,7 @@ */ import { utils } from '@liskhq/lisk-cryptography'; import { Transaction } from '../../src/transaction'; -import { TRANSACTION_MAX_PARAMS_SIZE } from '../../src/constants'; +import { MAX_PARAMS_SIZE } from '../../src/constants'; describe('blocks/transactions', () => { describe('#validateTransaction', () => { @@ -25,7 +25,7 @@ describe('blocks/transactions', () => { command: 'transfer', fee: BigInt(613000), // 126 is the size of other properties - params: utils.getRandomBytes(TRANSACTION_MAX_PARAMS_SIZE), + params: utils.getRandomBytes(MAX_PARAMS_SIZE), nonce: BigInt(2), senderPublicKey: utils.getRandomBytes(32), signatures: [utils.getRandomBytes(64)], @@ -65,7 +65,7 @@ describe('blocks/transactions', () => { command: 'transfer', fee: BigInt(613000), // 126 is the size of other properties - params: utils.getRandomBytes(TRANSACTION_MAX_PARAMS_SIZE + 1), + params: utils.getRandomBytes(MAX_PARAMS_SIZE + 1), nonce: BigInt(2), senderPublicKey: utils.getRandomBytes(32), signatures: [utils.getRandomBytes(64)], From ed5649eb954c7c47e11eb2d2ea2b84b9336c4c4b Mon Sep 17 00:00:00 2001 From: Ishan Date: Fri, 25 Aug 2023 14:53:46 +0200 Subject: [PATCH 110/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version=206?= =?UTF-8?q?.1.0-beta.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- commander/package.json | 22 +-- .../templates/init/package-template.json | 16 +- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 12 +- elements/lisk-chain/package.json | 14 +- elements/lisk-client/package.json | 18 +- elements/lisk-codec/package.json | 8 +- elements/lisk-cryptography/package.json | 4 +- elements/lisk-elements/package.json | 24 +-- elements/lisk-p2p/package.json | 8 +- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +- elements/lisk-transactions/package.json | 8 +- elements/lisk-tree/package.json | 6 +- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../config/default/genesis_block.blob | Bin 5937 -> 7306 bytes .../interop/pos-mainchain-fast/package.json | 15 +- .../config/default/genesis_block.blob | Bin 5941 -> 7081 bytes .../pos-sidechain-example-one/package.json | 15 +- .../config/default/genesis_block.blob | Bin 7064 -> 7064 bytes .../pos-sidechain-example-two/package.json | 15 +- examples/poa-sidechain/package.json | 15 +- examples/pos-mainchain/package.json | 16 +- .../package.json | 4 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 16 +- .../lisk-framework-forger-plugin/package.json | 6 +- .../package.json | 4 +- .../package.json | 6 +- framework/package.json | 24 +-- protocol-specs/package.json | 8 +- sdk/package.json | 26 +-- yarn.lock | 156 ++++++++++++++++++ 34 files changed, 320 insertions(+), 168 deletions(-) diff --git a/commander/package.json b/commander/package.json index a1f5a7c8d3a..847a2a67b44 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "6.0.0-beta.7", + "version": "6.1.0-beta.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -101,16 +101,16 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-chain": "^0.4.0-beta.6", - "@liskhq/lisk-client": "^6.0.0-beta.6", - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-chain": "^0.5.0-beta.0", + "@liskhq/lisk-client": "^6.1.0-beta.0", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,7 +121,7 @@ "cli-table3": "0.6.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-framework": "^0.10.0-beta.7", + "lisk-framework": "^0.11.0-beta.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.5.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index da9cfefc0ee..3c78a0b2fbf 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -85,12 +85,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "0.2.0-beta.7", - "@liskhq/lisk-framework-faucet-plugin": "0.2.0-beta.7", - "@liskhq/lisk-framework-monitor-plugin": "0.3.0-beta.7", - "@liskhq/lisk-framework-forger-plugin": "0.3.0-beta.7", - "@liskhq/lisk-framework-report-misbehavior-plugin": "0.3.0-beta.7", - "@liskhq/lisk-framework-chain-connector-plugin": "0.1.0-beta.7", + "@liskhq/lisk-framework-dashboard-plugin": "0.3.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "0.3.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "0.4.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "0.4.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "0.4.0-beta.0", + "@liskhq/lisk-framework-chain-connector-plugin": "0.2.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -98,8 +98,8 @@ "axios": "0.21.2", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "6.0.0-beta.7", - "lisk-sdk": "6.0.0-beta.7", + "lisk-commander": "6.1.0-beta.0", + "lisk-sdk": "6.1.0-beta.0", "tar": "6.1.11", "tslib": "2.4.1" }, diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 9d30e79eaa1..b7e73396a5f 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -28,7 +28,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "lisk-sdk": "^6.0.0-beta.7" + "lisk-sdk": "^6.1.0-beta.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index ae7b3b68575..5fb8171afef 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "6.0.0-beta.6", + "version": "6.1.0-beta.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "isomorphic-ws": "4.0.1", "ws": "8.11.0", "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.4.0-beta.6", + "@liskhq/lisk-chain": "^0.5.0-beta.0", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", "@types/node": "18.15.3", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 1bfd6fccb11..52c22658e19 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.4.0-beta.6", + "version": "0.5.0-beta.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-tree": "^0.3.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", "@types/debug": "4.1.5", "@types/faker": "4.1.10", "@types/jest": "29.2.3", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 7503f017180..ef2bc09f153 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "6.0.0-beta.6", + "version": "6.1.0-beta.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -56,14 +56,14 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-tree": "^0.3.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 41095805907..6bc5bacdca1 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.3.0-beta.5", + "version": "0.4.0-beta.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5" + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 00e6edf793e..94451df2436 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "4.0.0-beta.4", + "version": "4.1.0-beta.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,7 +35,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", "buffer-reverse": "1.0.1", "hash-wasm": "4.9.0", "tweetnacl": "1.0.3" diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index e89dc106e70..fd29ab1e7db 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "6.0.0-beta.6", + "version": "6.1.0-beta.0", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,18 +35,18 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-chain": "^0.4.0-beta.6", - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-chain": "^0.5.0-beta.0", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.8.0-beta.6", - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", - "@liskhq/lisk-transaction-pool": "^0.6.0-beta.6", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-tree": "^0.3.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5" + "@liskhq/lisk-p2p": "^0.9.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index ac50eee07a8..a3fc16fafe7 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.8.0-beta.6", + "version": "0.9.0-beta.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,9 +41,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "lodash.shuffle": "4.2.0", "semver": "7.5.2", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index f1ab97a2fd0..6bce1a2bac5 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "4.0.0-beta.2", + "version": "4.1.0-beta.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index ff6bf059a5d..3f0f183dc68 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.6.0-beta.6", + "version": "0.7.0-beta.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-utils": "^0.3.0-beta.3", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index ea9d7e13a6c..34d85a23857 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "6.0.0-beta.5", + "version": "6.1.0-beta.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-validator": "^0.7.0-beta.5" + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 325297e82f7..97023c9d307 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.3.0-beta.5", + "version": "0.4.0-beta.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,8 +35,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-utils": "^0.3.0-beta.3" + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index e64a8e2bcfd..d34e3e7ab82 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.3.0-beta.3", + "version": "0.4.0-beta.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 0883ecbe101..4527de4102f 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.7.0-beta.5", + "version": "0.8.0-beta.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "ajv": "8.1.0", "ajv-formats": "2.1.1", "debug": "4.3.4", diff --git a/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob b/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob index ded37d914ede9cfd7e8069bd377a2340fa7c69c9..9a35413a212582d8a23bb8b4046f9ed1db52c01e 100644 GIT binary patch delta 1502 zcmZA1i9Ztx0KoAHVJb&+z8AAd&9Vs_tz5~lnVg$*&N)iPNGL;*kSpfOHOFj)@RFO* zbLA;SBpJDK=kEFN-sgQj@AW?Y3E$7}^Z8XO$7%`z_qq51!bNNb^Qa^M0!|i@Nj`tc zt=(q22WPLM93)PH2(oe-I#1%@VM*T|9FS_$3GKD&df=kh3PkQO`h=}3>dm~+@cS>> znSGrD;6234hxu9z~b7M<4kqQvOrK2KmL z2*^$GrHZzIs?f6~%hO-&LN#}e7g~ z+OP*kk&2^$LZ-?UYb0CUg+4k7LzvVCp>eZEDK4aIzVztR7C1j!%*H6YZ4Tm;kS|F| zG%bJ^Z+BsBWC!e#I;ggxF2aPxtx(hW)0qI{VxbHWn@QV1gMLksGzx)=m|q}%Ni)LC zf-K7dWR=t?4D}{7s^? z|CC5+#LGXBVbWp}!FS?y2N9;hcv~=HGAfm50qt#==TRg{Mfn|W4(qi6a2HDoI67Oq z5U2bQ-sH9A#HMk7)4)f~6vEi-;?eExNx#(l#R4%A8j2M=u&X3NLifX6tE|)^-{+FO zM{HE>sMbyQ&z6v`ueMmyD$~A59{=jgU&%vCN*-!gaZSPp`5lZKLX%~d4i-zHV5uCl z=-xNDg{Y6mnu0o<3OQ+l}BaF;{N3`eQ1M0l?y(5I-B})|j&R9B>rKhcarpB2 zPMfS>h8*XqM-d$C%*pGy<=lB&0dd`-o;bI$W_L5?Ztu-adUStSH9t)i|K4R53f_p{ zUDfGl9t}7_UI`9GLk~w7!H(Hgfg*EG2MYT?P)Z{%G>Pd!Jm}+qx&%@cS5uAhf5UXe9(0AGH>?} zrKU83skJbvD1YRoK2?VUdpZt!V*qD$XQae^!;A?puD#Ux5Jh5#8%KO@oe`96^e!5O zIHkmP^`wTL!~f|luz_+GF{U{`NIi@!zp+=)ceb8|f#$>7Hc!~7`MEg^mRiO?1$psx z$>39L(>lDhPW24xvu@#6t4l=~R)pLkF3#Yqk@g%iTHkB@Oax=(3aiy%DH{qdc{hdD z^#aae9P=#7dOyILB1Aj6#!`T}30Q2N(vH*rkW*Zb@jwY0@ZZy@AfyrOX7OF^Q!3}5 z%^z});%C$=GIUE6bUm>rRpE`RV%O{lwc0!_C?YrsJs54|J%ajsH9Lt|linG7ttWeL zBahdt!DyO1_xhQ40UUr7xOlZ*VqOX-rU3Nm@z{91Yi@KEu#9<4cO6#PH$CST*>9h( zr@j9?Yl-twW;aZ3NSNH)QXrSi`P4Bai{L_6s>3m2P;h(eW4oMqVX^8#+t>Ho-QWyo>x!5SaAu}dNd452^ByX9{tq8sH);vLzs;4HhN+5A2 z#2P=h0}MmyU5!%(bWzOOMwCiU#1|6h?DX@hHoB3`XbSEL3IlL&a}wqWvp)fJ1hWha Fei4emKLP*% diff --git a/examples/interop/pos-mainchain-fast/package.json b/examples/interop/pos-mainchain-fast/package.json index fdc62dc3eab..c1f7f9603ed 100644 --- a/examples/interop/pos-mainchain-fast/package.json +++ b/examples/interop/pos-mainchain-fast/package.json @@ -108,12 +108,11 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +120,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-beta.0", - "lisk-sdk": "^6.0.0-beta.0", + "lisk-commander": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob b/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob index 0f733a67f999f1f70834e07606160b333d49dd47..61822d8bb3bdf96b513d6a09550d43defbfd2f94 100644 GIT binary patch delta 1280 zcmZ9}Z8#GM0KoCtTG~!#%Z5qKdYM-1mc^_@co;4eby$s6+z8|HaxEQVW7(}}UeXF< zcyKc>5f!3y$xBBG=~yd~(4AV$W^+&X;d;7{|Bt`t-?+Py41=!dtpgans4A&gYYM<( zvf-Pu3HBj=er@k@?rOQdIogo0cd_9Xn$-4PI+*T%0%rp3@;~5=87Yrq6fOmJR{z2e zsB;kGnac8wtZXSIaiqE0*=V576Eim+uY=B=37$I5KdmKqt!4xxBE30Gx2M3zk)L>w zq3r%3!&e9>C?+}})+y1qWyFw9`h+|T&d$?lB7QIOhG87IGOAMtJ zI>VL5eCu=EeYz$)$xHbO(|pD0^XPFwp?IJTUnH^=819+X}M_CzO6r z@Tjb9l3VZLOyGoD{NPR_bMRWpAjaLS&@4Hk(04<6zIg#htQIBYp1LQdAt64R(te&* zV$c4Bb$ATh!b^G7OU$YwJ?uf2-?LG-nEv}`$Kp9Xg18a z_#6G~tp6xjRWrdHKiJF@j5yu+W0f|T15yJ!hFiM%9C~e25&98fEbUM?Xt&y1vX^+) z^Ugs$#yw0(<1Q*a1ShAa-|~(a_Qyo|85~T_mT^bNeL5l8Unzw(Fq1Fi?9jw=J5KIe z+SVvXQV65YBQ?a-v!fb0nJaH)`zx^D^BvCZv`5)wQ7U~!Q>g^zN>^#0;O3a`^(ZI z=La`Na-74bxann*&(t@iSN7aZvLneS@-XS^y&RXZ7rJL$@R%|6)KSq+a9?PIB{<-F=mnbzR8F5tGDi3)&DY%f+liCw+gxQ zBlF6~Sj>v&iR#VwjD54L}~wU_lM^F;af`H^fKO7V^cQAyoQ0Wj4N%Buw5&E zEB7`0tTCtC+zqa+FJs#rYea1!sN}``8b9ssRM7HB%-hMAs7OH<3W(TuBC)kVlA~$d zKq63-slZH;9rM>>H&>cThzWnj*$jeQ&_)-H9eaTD1+X*D<1x3Nq@}h;v4Zj~0%q?o chGpd})NK8Bb7yfe^ywjO#cWwa5jYS25BftSX3M;YA%Nn{+8Cd`T delta 63 zcmV-F0Kos4H<&jM3j6^G01({4f~N);03slf5gw5(fFMyn<#lr%I1vPJ{~o0j=$DI- VmJuX;miD?=RIg{Xy|=N=%No4H8Ik}1 diff --git a/examples/interop/pos-sidechain-example-two/package.json b/examples/interop/pos-sidechain-example-two/package.json index f7d33b123b6..f7e1332e9d1 100644 --- a/examples/interop/pos-sidechain-example-two/package.json +++ b/examples/interop/pos-sidechain-example-two/package.json @@ -108,12 +108,11 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +120,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-beta.0", - "lisk-sdk": "^6.0.0-beta.0", + "lisk-commander": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/poa-sidechain/package.json b/examples/poa-sidechain/package.json index edb14eee5c8..6c696a74504 100755 --- a/examples/poa-sidechain/package.json +++ b/examples/poa-sidechain/package.json @@ -113,12 +113,11 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-alpha.7", - "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-alpha.7", - "@liskhq/lisk-framework-forger-plugin": "^0.3.0-alpha.7", - "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-alpha.7", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-alpha.7", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -126,8 +125,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-alpha.7", - "lisk-sdk": "^6.0.0-alpha.7", + "lisk-commander": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/pos-mainchain/package.json b/examples/pos-mainchain/package.json index 9e597a8449e..935b76d40c9 100755 --- a/examples/pos-mainchain/package.json +++ b/examples/pos-mainchain/package.json @@ -114,12 +114,12 @@ } }, "dependencies": { - "@liskhq/lisk-validator": "^0.7.0-beta.0", - "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-beta.1", - "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-beta.1", - "@liskhq/lisk-framework-forger-plugin": "^0.3.0-beta.1", - "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-beta.1", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-beta.1", + "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -127,8 +127,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-beta.1", - "lisk-sdk": "^6.0.0-beta.1", + "lisk-commander": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/package.json b/framework-plugins/lisk-framework-chain-connector-plugin/package.json index e072da5af5d..975e9d276d7 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/package.json +++ b/framework-plugins/lisk-framework-chain-connector-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-chain-connector-plugin", - "version": "0.1.0-beta.7", + "version": "0.2.0-beta.0", "description": "A plugin used by a relayer node to automatically create and submit Cross Chain Transaction by aggregating off-chain information of a chain", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -34,7 +34,7 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-beta.7" + "lisk-sdk": "^6.1.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 2703b14989a..381b2b01966 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.2.0-beta.7", + "version": "0.3.0-beta.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -46,10 +46,10 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-client": "^6.0.0-beta.6", + "@liskhq/lisk-client": "^6.1.0-beta.0", "express": "4.18.2", "json-format-highlight": "1.0.4", - "lisk-sdk": "^6.0.0-beta.7", + "lisk-sdk": "^6.1.0-beta.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index a8e96aaaffe..1387e799d65 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.2.0-beta.7", + "version": "0.3.0-beta.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -47,15 +47,15 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-client": "^6.0.0-beta.6", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-client": "^6.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "axios": "1.2.0", "express": "4.18.2", - "lisk-sdk": "^6.0.0-beta.7", + "lisk-sdk": "^6.1.0-beta.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 068b724dd18..31b2c86da06 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.3.0-beta.7", + "version": "0.4.0-beta.0", "description": "A plugin for lisk-framework that monitors configured validators forging activity and stakers information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,10 +40,10 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-beta.7" + "lisk-sdk": "^6.1.0-beta.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^6.0.0-beta.6", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", "@types/debug": "4.1.5", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 1e42d73388f..eb2cb2352d8 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.3.0-beta.7", + "version": "0.4.0-beta.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,7 +40,7 @@ "express": "4.18.2", "express-rate-limit": "6.7.0", "ip": "1.1.5", - "lisk-sdk": "^6.0.0-beta.7" + "lisk-sdk": "^6.1.0-beta.0" }, "devDependencies": { "@types/cors": "2.8.12", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 233cd6d80cf..967043510d0 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.3.0-beta.7", + "version": "0.4.0-beta.0", "description": "A plugin for lisk-framework that provides automatic detection of validator misbehavior and sends a reportValidatorMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,9 +38,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-beta.7" + "lisk-sdk": "^6.1.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework/package.json b/framework/package.json index b7e796fabca..37316208680 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.10.0-beta.7", + "version": "0.11.0-beta.0", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,17 +42,17 @@ }, "dependencies": { "@chainsafe/blst": "0.2.9", - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-chain": "^0.4.0-beta.6", - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-chain": "^0.5.0-beta.0", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.8.0-beta.6", - "@liskhq/lisk-transaction-pool": "^0.6.0-beta.6", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-tree": "^0.3.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", + "@liskhq/lisk-p2p": "^0.9.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.9", @@ -64,7 +64,7 @@ "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", "@types/bunyan": "1.8.6", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 10b09549562..5707fccdc63 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.3.0-beta.5", - "@liskhq/lisk-cryptography": "4.0.0-beta.4", - "@liskhq/lisk-passphrase": "4.0.0-beta.2", - "@liskhq/lisk-validator": "0.7.0-beta.5", + "@liskhq/lisk-codec": "0.4.0-beta.0", + "@liskhq/lisk-cryptography": "4.1.0-beta.0", + "@liskhq/lisk-passphrase": "4.1.0-beta.0", + "@liskhq/lisk-validator": "0.8.0-beta.0", "protobufjs": "7.2.4" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 3b33495f319..00d61cdfb5b 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "6.0.0-beta.7", + "version": "6.1.0-beta.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,19 +29,19 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-beta.6", - "@liskhq/lisk-chain": "^0.4.0-beta.6", - "@liskhq/lisk-codec": "^0.3.0-beta.5", - "@liskhq/lisk-cryptography": "^4.0.0-beta.4", + "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-chain": "^0.5.0-beta.0", + "@liskhq/lisk-codec": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.8.0-beta.6", - "@liskhq/lisk-passphrase": "^4.0.0-beta.2", - "@liskhq/lisk-transaction-pool": "^0.6.0-beta.6", - "@liskhq/lisk-transactions": "^6.0.0-beta.5", - "@liskhq/lisk-tree": "^0.3.0-beta.5", - "@liskhq/lisk-utils": "^0.3.0-beta.3", - "@liskhq/lisk-validator": "^0.7.0-beta.5", - "lisk-framework": "^0.10.0-beta.7" + "@liskhq/lisk-p2p": "^0.9.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-validator": "^0.8.0-beta.0", + "lisk-framework": "^0.11.0-beta.0" }, "devDependencies": { "eslint": "8.28.0", diff --git a/yarn.lock b/yarn.lock index b81a0b4fd47..75e691fdb4f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1803,6 +1803,51 @@ dependencies: "@types/node" "11.11.2" +"@liskhq/lisk-api-client@^6.0.0-beta.6": + version "6.0.0-beta.6" + resolved "https://npm.lisk.com/@liskhq/lisk-api-client/-/lisk-api-client-6.0.0-beta.6.tgz#9377c1f0af898c4f1ca49babd606faa4ba5f6e0e" + integrity sha512-Dhd7yWw2N7DlSrhTiQyitZ/Y8OSLCl1vlyWJx2GQXm+hbDf4nezcSdoAO3Z1W/hYFbvOckl+JKgZtALydOZogA== + dependencies: + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-transactions" "^6.0.0-beta.5" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + isomorphic-ws "4.0.1" + ws "8.11.0" + zeromq "6.0.0-beta.6" + +"@liskhq/lisk-chain@^0.4.0-beta.6": + version "0.4.0-beta.6" + resolved "https://npm.lisk.com/@liskhq/lisk-chain/-/lisk-chain-0.4.0-beta.6.tgz#6ea5fbe9769975e7f73665a676643c8553d90e04" + integrity sha512-3ttMQkWQn4ZJe/STzF8ZD4tjPV0Mt/4eNBHBjkrilEcN/J0YQUGN9ORh3pxhupBXfGt88Ht+jqdq+ZAnm5dEOA== + dependencies: + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-db" "0.3.7" + "@liskhq/lisk-tree" "^0.3.0-beta.5" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + debug "4.3.4" + +"@liskhq/lisk-codec@^0.3.0-beta.5": + version "0.3.0-beta.5" + resolved "https://npm.lisk.com/@liskhq/lisk-codec/-/lisk-codec-0.3.0-beta.5.tgz#39599debb707574a851e1697c992747d1e3318d9" + integrity sha512-VAtJH5C2RNNkQ4751NRIl7HfpoojSmfuGEI3jHSq9SbtXKYigkXSzkh6FhiwnSLIuEZNtCEoO1kyla6WQox3aQ== + dependencies: + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + +"@liskhq/lisk-cryptography@^4.0.0-beta.4": + version "4.0.0-beta.4" + resolved "https://npm.lisk.com/@liskhq/lisk-cryptography/-/lisk-cryptography-4.0.0-beta.4.tgz#36f3c18ee806a8e56e7ec6e1b6775ffa89215159" + integrity sha512-m+rwGPqVT4WmdHJ8HB6fCUyY4iCPweWf/DZBqmVmO3Mqnrxq0E0FjSXutofWiBrzLxtyNmolpGSENrMKvIdaSw== + dependencies: + "@liskhq/lisk-passphrase" "^4.0.0-beta.2" + buffer-reverse "1.0.1" + hash-wasm "4.9.0" + tweetnacl "1.0.3" + "@liskhq/lisk-db@0.3.7": version "0.3.7" resolved "https://registry.yarnpkg.com/@liskhq/lisk-db/-/lisk-db-0.3.7.tgz#9dce3d0c37f248f9221b26f0d57c3306d7d072ef" @@ -1813,6 +1858,71 @@ cargo-cp-artifact "^0.1" shelljs "^0.8.5" +"@liskhq/lisk-p2p@^0.8.0-beta.6": + version "0.8.0-beta.6" + resolved "https://npm.lisk.com/@liskhq/lisk-p2p/-/lisk-p2p-0.8.0-beta.6.tgz#3a30c3ef6774042b213f2ca94ee620ba74178e1d" + integrity sha512-aN50jZV8iwhT13GK5ZwguW5oj/5IKb/HV5LnEcDUhV7FeQig3DAnnBJJhxzcHZtUrGgSmRkiPKrDmT+w0XwAmw== + dependencies: + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + lodash.shuffle "4.2.0" + semver "7.5.2" + socketcluster-client "14.3.1" + socketcluster-server "14.6.0" + +"@liskhq/lisk-passphrase@^4.0.0-beta.2": + version "4.0.0-canary.33179" + resolved "https://npm.lisk.com/@liskhq/lisk-passphrase/-/lisk-passphrase-4.0.0-canary.33179.tgz#8e04848238e72eafa2f2bb80ea91cb0e69bbedc3" + integrity sha512-EPXgHb4YBme/FoS7CLEbvPU0pshKH9FRDL9qUn8Po2tB72IdIfDh+wP9nAD/92v4JcsVVGtPgX5MWiiYHb1GCw== + dependencies: + bip39 "3.0.3" + +"@liskhq/lisk-transaction-pool@^0.6.0-beta.6": + version "0.6.0-beta.6" + resolved "https://npm.lisk.com/@liskhq/lisk-transaction-pool/-/lisk-transaction-pool-0.6.0-beta.6.tgz#85c36789c45cffbbd73f90a961710763cdd857aa" + integrity sha512-OG/RFVj/uEEdNgKdE1KEipkNqPhwW6QlwRlzTmwIQQ9m+O9dHW3f0xoPufFJVqR8BtFwRDlctRv6HTfJWUUdcA== + dependencies: + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + debug "4.3.4" + +"@liskhq/lisk-transactions@^6.0.0-beta.5": + version "6.0.0-beta.5" + resolved "https://npm.lisk.com/@liskhq/lisk-transactions/-/lisk-transactions-6.0.0-beta.5.tgz#cb00e54dc417c138e53da2aec89a65768860e8ca" + integrity sha512-vgdGjw4D1wrZVa0YAOc5YlvrfjYarHbKo9gx8CUrEJv72M0B/UImMyVQQbVODLT9YeIvJ1ajUWLYl372b9ZFow== + dependencies: + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + +"@liskhq/lisk-tree@^0.3.0-beta.5": + version "0.3.0-beta.5" + resolved "https://npm.lisk.com/@liskhq/lisk-tree/-/lisk-tree-0.3.0-beta.5.tgz#eaca57ba3942c97b23c13a92723f2cff3303848f" + integrity sha512-XUrTVmRpx/bn96TJOdrqbJG5sglfbeKk7+39o6BWH2KVFacyPAbEJi0bRmCCsEVjK9dzazxjWKjRp+qlqbjQcw== + dependencies: + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + +"@liskhq/lisk-utils@^0.3.0-beta.3": + version "0.3.0-canary.33253" + resolved "https://npm.lisk.com/@liskhq/lisk-utils/-/lisk-utils-0.3.0-canary.33253.tgz#07f3c1274a4660bc2f13280ca11f783a4434ade6" + integrity sha512-hIaYerKi0gC/c7nj9hn9CQfKxih8kSNkinS3N/2au2qs4LWNDQOKk0aUynMpQej1Xv26WcF51tY//Je/+k3wbg== + dependencies: + lodash.clonedeep "4.5.0" + +"@liskhq/lisk-validator@^0.7.0-beta.5": + version "0.7.0-beta.5" + resolved "https://npm.lisk.com/@liskhq/lisk-validator/-/lisk-validator-0.7.0-beta.5.tgz#d8a7fdcb5eaa45c2cd01b23d1fde0c365e5cff1a" + integrity sha512-oySl7xe84Fnh7gUtZwrHPCtpLruPfFo//Owbf0spW56QAkYZWu+1eeGOICgKW8RzxPyFX9WAjWwK3P8Md8tNTg== + dependencies: + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + ajv "8.1.0" + ajv-formats "2.1.1" + debug "4.3.4" + semver "7.5.2" + validator "13.7.0" + "@lmdb/lmdb-darwin-arm64@2.5.2": version "2.5.2" resolved "https://registry.yarnpkg.com/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-2.5.2.tgz#bc66fa43286b5c082e8fee0eacc17995806b6fbe" @@ -10934,6 +11044,52 @@ lint-staged@13.0.3: string-argv "^0.3.1" yaml "^2.1.1" +lisk-framework@^0.10.0-beta.7: + version "0.10.0-beta.7" + resolved "https://npm.lisk.com/lisk-framework/-/lisk-framework-0.10.0-beta.7.tgz#ec405c771d2de2e228b286d7ba04aa9330b0d4b7" + integrity sha512-BytJ2/l3CvN6nBOjlAZ6oP5db+ksbFYejjT9oICw4facKuKMO+dEDl2OMAH8gAn6wsPD9VgFT1UUy+FuY0k+Rw== + dependencies: + "@chainsafe/blst" "0.2.9" + "@liskhq/lisk-api-client" "^6.0.0-beta.6" + "@liskhq/lisk-chain" "^0.4.0-beta.6" + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-db" "0.3.7" + "@liskhq/lisk-p2p" "^0.8.0-beta.6" + "@liskhq/lisk-transaction-pool" "^0.6.0-beta.6" + "@liskhq/lisk-transactions" "^6.0.0-beta.5" + "@liskhq/lisk-tree" "^0.3.0-beta.5" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + bunyan "1.8.15" + debug "4.3.4" + eventemitter2 "6.4.9" + fs-extra "11.1.0" + prom-client "14.2.0" + ps-list "7.2.0" + sodium-native "3.2.1" + ws "8.11.0" + zeromq "6.0.0-beta.6" + +lisk-sdk@^6.0.0-alpha.0: + version "6.0.0-beta.7" + resolved "https://npm.lisk.com/lisk-sdk/-/lisk-sdk-6.0.0-beta.7.tgz#35157d05d80cb82f65a72240b266b32cb4d6285d" + integrity sha512-GuIEgZ4HmRKt6GXSAkkDJed9JOJIJdKjdSJxqfbIO2B8wYrMQv10bpmbVEXli1QP8B8jsGq7K3XCr+yFXeNvVg== + dependencies: + "@liskhq/lisk-api-client" "^6.0.0-beta.6" + "@liskhq/lisk-chain" "^0.4.0-beta.6" + "@liskhq/lisk-codec" "^0.3.0-beta.5" + "@liskhq/lisk-cryptography" "^4.0.0-beta.4" + "@liskhq/lisk-db" "0.3.7" + "@liskhq/lisk-p2p" "^0.8.0-beta.6" + "@liskhq/lisk-passphrase" "^4.0.0-beta.2" + "@liskhq/lisk-transaction-pool" "^0.6.0-beta.6" + "@liskhq/lisk-transactions" "^6.0.0-beta.5" + "@liskhq/lisk-tree" "^0.3.0-beta.5" + "@liskhq/lisk-utils" "^0.3.0-beta.3" + "@liskhq/lisk-validator" "^0.7.0-beta.5" + lisk-framework "^0.10.0-beta.7" + listr-silent-renderer@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" From 943beb2529577e0f4503a88b7f38cd4af97b51d5 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Tue, 29 Aug 2023 13:11:20 +0100 Subject: [PATCH 111/170] Implement nft_getSupportedNFTs to fetch list of supported NFTs (#8906) Implement endpoint --- framework/src/modules/nft/endpoint.ts | 32 ++++++++++- .../test/unit/modules/nft/endpoint.spec.ts | 56 +++++++++++++++++++ 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts index b41b4afd07d..6c71df19002 100644 --- a/framework/src/modules/nft/endpoint.ts +++ b/framework/src/modules/nft/endpoint.ts @@ -26,7 +26,7 @@ import { isNFTSupportedRequestSchema, } from './schemas'; import { NFTStore } from './stores/nft'; -import { LENGTH_ADDRESS, LENGTH_NFT_ID } from './constants'; +import { ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_NFT_ID } from './constants'; import { UserStore } from './stores/user'; import { NFT } from './types'; import { SupportedNFTsStore } from './stores/supported_nfts'; @@ -243,4 +243,34 @@ export class NFTEndpoint extends BaseEndpoint { return { isNFTSupported }; } + + public async getSupportedNFTs( + context: ModuleEndpointContext, + ): Promise<{ supportedNFTs: string[] }> { + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + + const areAllNFTsSupported = await supportedNFTsStore.has(context, ALL_SUPPORTED_NFTS_KEY); + if (areAllNFTsSupported) { + return { + supportedNFTs: ['*'], + }; + } + + const supportedNFTs: string[] = []; + + const storeData = await supportedNFTsStore.getAll(context); + for (const { key, value } of storeData) { + if (!value.supportedCollectionIDArray.length) { + supportedNFTs.push(`${key.toString('hex')}********`); + } else { + for (const supportedCollectionID of value.supportedCollectionIDArray) { + supportedNFTs.push( + key.toString('hex') + supportedCollectionID.collectionID.toString('hex'), + ); + } + } + } + + return { supportedNFTs }; + } } diff --git a/framework/test/unit/modules/nft/endpoint.spec.ts b/framework/test/unit/modules/nft/endpoint.spec.ts index 64fbabfe0d3..9515ac1abf5 100644 --- a/framework/test/unit/modules/nft/endpoint.spec.ts +++ b/framework/test/unit/modules/nft/endpoint.spec.ts @@ -783,4 +783,60 @@ describe('NFTEndpoint', () => { await expect(endpoint.isNFTSupported(context)).resolves.toEqual({ isNFTSupported: false }); }); }); + + describe('getSupportedNFTs', () => { + it('should return * when all nft`s are supported globally', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + const moduleEndpointContext = createTransientModuleEndpointContext({ stateStore }); + + await expect(endpoint.getSupportedNFTs(moduleEndpointContext)).resolves.toEqual({ + supportedNFTs: ['*'], + }); + }); + + it('should return the list of supported nft`s when all the nft`s from a chain are supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: [], + }); + + const moduleEndpointContext = createTransientModuleEndpointContext({ + stateStore, + chainID, + }); + + await expect(endpoint.getSupportedNFTs(moduleEndpointContext)).resolves.toEqual({ + supportedNFTs: [`${chainID.toString('hex')}********`], + }); + }); + + it('should return the list of supported nft`s when not all the nft`s from a chain are supported', async () => { + const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const supportedCollections = [ + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + { + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), + }, + ]; + await supportedNFTsStore.save(methodContext, chainID, { + supportedCollectionIDArray: supportedCollections, + }); + + const moduleEndpointContext = createTransientModuleEndpointContext({ + stateStore, + chainID, + }); + + await expect(endpoint.getSupportedNFTs(moduleEndpointContext)).resolves.toEqual({ + supportedNFTs: [ + chainID.toString('hex') + supportedCollections[0].collectionID.toString('hex'), + chainID.toString('hex') + supportedCollections[1].collectionID.toString('hex'), + ], + }); + }); + }); }); From a98241496a2edb48a75f062100f80c3ecf0fd602 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 4 Sep 2023 17:00:46 +0200 Subject: [PATCH 112/170] Fix backup feature to use correct fs method (#8922) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :bug: Fix backup feature to use correct fs method * :nail_care: Fix format * Update framework/src/node/node.ts Co-authored-by: Boban Milošević --------- Co-authored-by: Boban Milošević --- framework/src/node/node.ts | 20 +++---- framework/src/node/utils/backup.ts | 25 ++++++++ framework/test/unit/node/utils/backup.spec.ts | 60 +++++++++++++++++++ 3 files changed, 92 insertions(+), 13 deletions(-) create mode 100644 framework/src/node/utils/backup.ts create mode 100644 framework/test/unit/node/utils/backup.spec.ts diff --git a/framework/src/node/node.ts b/framework/src/node/node.ts index b63c2cb2082..4358b9fe8b1 100644 --- a/framework/src/node/node.ts +++ b/framework/src/node/node.ts @@ -71,6 +71,7 @@ import { EVENT_SYNCHRONIZER_SYNC_REQUIRED } from './synchronizer/base_synchroniz import { Network } from './network'; import { BaseAsset, BaseModule } from '../modules'; import { Bus } from '../controller/bus'; +import { backupDatabase } from './utils/backup'; const forgeInterval = 1000; const { EVENT_NEW_BLOCK, EVENT_DELETE_BLOCK, EVENT_VALIDATORS_CHANGED } = chainEvents; @@ -733,19 +734,12 @@ export class Node { this._options.backup.height > 0 && this._options.backup.height === block.header.height ) { - const backupPath = path.resolve(this._dataPath, 'backup'); - // if backup already exist, it should remove the directory and create a new checkpoint - if (fs.existsSync(backupPath)) { - fs.removeSync(backupPath); - } - this._blockchainDB - .checkpoint(backupPath) - .catch(err => - this._logger.fatal( - { err: err as Error, height: this._options.backup.height, path: backupPath }, - 'Fail to create backup', - ), - ); + backupDatabase(this._dataPath, this._blockchainDB).catch(err => + this._logger.fatal( + { err: err as Error, heght: this._options.backup.height }, + 'Failed to create backup', + ), + ); } // Remove any transactions from the pool on new block diff --git a/framework/src/node/utils/backup.ts b/framework/src/node/utils/backup.ts new file mode 100644 index 00000000000..96a764750e6 --- /dev/null +++ b/framework/src/node/utils/backup.ts @@ -0,0 +1,25 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import * as path from 'path'; +import * as fs from 'fs'; +import { Database } from '@liskhq/lisk-db'; + +export const backupDatabase = async (dataPath: string, db: Database) => { + const backupPath = path.resolve(dataPath, 'backup'); + // if backup already exist, it should remove the directory and create a new checkpoint + if (fs.existsSync(backupPath)) { + fs.rmSync(backupPath, { recursive: true, force: true }); + } + await db.checkpoint(backupPath); +}; diff --git a/framework/test/unit/node/utils/backup.spec.ts b/framework/test/unit/node/utils/backup.spec.ts new file mode 100644 index 00000000000..aad1462521d --- /dev/null +++ b/framework/test/unit/node/utils/backup.spec.ts @@ -0,0 +1,60 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import * as os from 'os'; +import * as path from 'path'; +import * as fs from 'fs'; +import { Database } from '@liskhq/lisk-db'; +import { getRandomBytes } from '@liskhq/lisk-cryptography'; +import { backupDatabase } from '../../../../src/node/utils/backup'; + +describe('backup', () => { + const getDataPath = (name: string) => path.join(os.tmpdir(), Date.now().toString(), name); + + it('should create backup', async () => { + const dbName = 'db-1'; + const dataPath = getDataPath(dbName); + const db = new Database(path.join(dataPath, dbName)); + const key = getRandomBytes(10); + await db.set(key, getRandomBytes(20)); + + await backupDatabase(dataPath, db); + + expect(fs.existsSync(path.join(dataPath, 'backup'))).toBeTrue(); + db.close(); + }); + + it('should remove old backup and create new one if exist', async () => { + const dbName = 'db-2'; + const dataPath = getDataPath(dbName); + const db = new Database(path.join(dataPath, dbName)); + const key = getRandomBytes(10); + await db.set(key, getRandomBytes(20)); + + await backupDatabase(dataPath, db); + const key2 = getRandomBytes(10); + await db.set(key2, getRandomBytes(20)); + + expect(fs.existsSync(path.join(dataPath, 'backup'))).toBeTrue(); + + await backupDatabase(dataPath, db); + + expect(fs.existsSync(path.join(dataPath, 'backup'))).toBeTrue(); + db.close(); + const backupDB = new Database(path.join(dataPath, 'backup')); + + await expect(backupDB.has(key2)).resolves.toBeTrue(); + backupDB.close(); + }); +}); From 49168961bd89f3e774cc2349fbfb0c9dca3fa898 Mon Sep 17 00:00:00 2001 From: Ishan Date: Tue, 5 Sep 2023 13:17:20 +0200 Subject: [PATCH 113/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version=205?= =?UTF-8?q?.3.0-rc.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- commander/package.json | 24 ++++++------ .../templates/init/package-template.json | 8 ++-- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 10 ++--- elements/lisk-bft/package.json | 12 +++--- elements/lisk-chain/package.json | 14 +++---- elements/lisk-client/package.json | 18 ++++----- elements/lisk-codec/package.json | 6 +-- elements/lisk-cryptography/package.json | 2 +- elements/lisk-elements/package.json | 28 +++++++------- elements/lisk-genesis/package.json | 12 +++--- elements/lisk-p2p/package.json | 8 ++-- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +-- elements/lisk-transactions/package.json | 8 ++-- elements/lisk-tree/package.json | 6 +-- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../package.json | 10 ++--- .../lisk-framework-faucet-plugin/package.json | 16 ++++---- .../lisk-framework-forger-plugin/package.json | 20 +++++----- .../package.json | 14 +++---- .../package.json | 14 +++---- .../package.json | 18 ++++----- framework/package.json | 28 +++++++------- protocol-specs/package.json | 8 ++-- sdk/package.json | 38 +++++++++---------- 27 files changed, 169 insertions(+), 169 deletions(-) diff --git a/commander/package.json b/commander/package.json index e6b9e0d872c..b744a736d05 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.1.11-alpha.2", + "version": "5.2.0-rc.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,17 +91,17 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-client": "^5.2.3-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-client": "^5.3.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.2.5-alpha.2", - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-genesis": "^0.3.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "@oclif/command": "1.8.16", "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.9.3-alpha.2", + "lisk-framework": "^0.10.0-rc.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 464968ecdc8..65151d58e15 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -98,15 +98,15 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.1.7", - "@liskhq/lisk-framework-faucet-plugin": "^0.1.7", + "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-rc.0", "@oclif/command": "1.8.16", "@oclif/plugin-autocomplete": "1.2.0", "@oclif/plugin-help": "5.1.12", "fs-extra": "9.1.0", "inquirer": "7.3.2", - "lisk-commander": "^5.1.10", - "lisk-sdk": "^5.2.2", + "lisk-commander": "^5.2.0-rc.0", + "lisk-sdk": "^5.3.0-rc.0", "tar": "6.0.2", "tslib": "1.13.0", "axios": "0.21.1" diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index d65b57692b5..b2ace81c253 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -29,7 +29,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "lisk-sdk": "^5.2.2" + "lisk-sdk": "^5.3.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 45f5a88b196..1a348113f9d 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.1.7-alpha.2", + "version": "5.2.0-rc.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", + "@liskhq/lisk-chain": "^0.4.0-rc.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "18.15.3", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index e5bd1c63790..c448bbe9db2 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.3.5-alpha.2", + "version": "0.4.0-rc.0", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "@types/node": "18.15.3", "debug": "4.3.4" }, diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 32d8e4d1efe..6245a257d1c 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.3.5-alpha.2", + "version": "0.4.0-rc.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-tree": "^0.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-tree": "^0.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 6d48abdea86..3831b539188 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.2.3-alpha.2", + "version": "5.3.0-rc.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,14 +55,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-tree": "^0.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-tree": "^0.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 072a74136a4..c1e5b9eb7c2 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.2.3-alpha.1", + "version": "0.3.0-rc.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1" + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 983fb31f41f..3b9f0572302 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "3.2.2-alpha.1", + "version": "3.3.0-rc.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 8e2fcdafe1a..0f4f314cb97 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.2.3-alpha.2", + "version": "5.3.0-rc.0", "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,19 +36,19 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-bft": "^0.3.5-alpha.2", - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-genesis": "^0.2.5-alpha.2", - "@liskhq/lisk-p2p": "^0.7.4-alpha.2", - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", - "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-tree": "^0.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1" + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-bft": "^0.4.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-genesis": "^0.3.0-rc.0", + "@liskhq/lisk-p2p": "^0.8.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-tree": "^0.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 34ef25a3a5a..6519e637ad5 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.2.5-alpha.2", + "version": "0.3.0-rc.0", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "lodash.clonedeep": "4.5.0" }, "devDependencies": { diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 3f318700d78..37a0bf2c69c 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.7.4-alpha.2", + "version": "0.8.0-rc.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,9 +42,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "lodash.shuffle": "4.2.0", "semver": "7.3.5", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 66c6e59c18f..0fa41a7752d 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "3.1.2-alpha.1", + "version": "3.2.0-rc.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index af6fcfb4792..2f267434245 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.5.4-alpha.1", + "version": "0.6.0-rc.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,8 +37,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 24404fe6021..98f55660e09 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "5.2.3-alpha.1", + "version": "5.3.0-rc.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,9 +36,9 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1" + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 3b6bae4ccb8..575885bb549 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.2.3-alpha.1", + "version": "0.3.0-rc.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1" + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 546ce79d423..9027aae3dc3 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.2.2-alpha.1", + "version": "0.3.0-rc.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 7f5a9f30760..8a60275e67f 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.6.3-alpha.1", + "version": "0.7.0-rc.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "ajv": "8.1.0", "ajv-formats": "2.0.2", "debug": "4.3.4", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index b26b44e5c99..bd9dd54fb88 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.1.8-alpha.2", + "version": "0.2.0-rc.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.2.3-alpha.2", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", + "@liskhq/lisk-client": "^5.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.9.3-alpha.2", + "lisk-framework": "^0.10.0-rc.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index b77166680b4..f72c6c33bf8 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.1.8-alpha.2", + "version": "0.2.0-rc.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-client": "^5.2.3-alpha.2", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-client": "^5.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "axios": "1.3.2", "express": "4.17.3", - "lisk-framework": "^0.9.3-alpha.2", + "lisk-framework": "^0.10.0-rc.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index f4b1d641f20..398305254f6 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.2.8-alpha.2", + "version": "0.3.0-rc.0", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,13 +38,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "axios": "1.3.2", "cors": "2.8.5", "debug": "4.3.4", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.2" + "lisk-framework": "^0.10.0-rc.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-genesis": "^0.2.5-alpha.2", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-genesis": "^0.3.0-rc.0", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 941f79df314..122d5f0a703 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.2.8-alpha.2", + "version": "0.3.0-rc.0", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,18 +37,18 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.2" + "lisk-framework": "^0.10.0-rc.0" }, "devDependencies": { - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", "@types/cors": "2.8.6", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 80526ee6478..e315df05678 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.2.8-alpha.2", + "version": "0.3.0-rc.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,16 +37,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.9.3-alpha.2" + "lisk-framework": "^0.10.0-rc.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index bbb89bbbf3d..0be5a556fa4 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.2.8-alpha.2", + "version": "0.3.0-rc.0", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.3.5-alpha.2", - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-bft": "^0.4.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.9.3-alpha.2" + "lisk-framework": "^0.10.0-rc.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index 82ffa17ef2e..61dfbc81fab 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.9.3-alpha.2", + "version": "0.10.0-rc.0", "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,19 +40,19 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-bft": "^0.3.5-alpha.2", - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-bft": "^0.4.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.2.5-alpha.2", - "@liskhq/lisk-p2p": "^0.7.4-alpha.2", - "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-tree": "^0.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", + "@liskhq/lisk-genesis": "^0.3.0-rc.0", + "@liskhq/lisk-p2p": "^0.8.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-tree": "^0.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.5", @@ -64,7 +64,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index f1aa47055fe..b5017858cf7 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "3.2.2-alpha.1", - "@liskhq/lisk-passphrase": "3.1.2-alpha.1", - "@liskhq/lisk-validator": "0.6.3-alpha.1", + "@liskhq/lisk-codec": "0.3.0-rc.0", + "@liskhq/lisk-cryptography": "3.3.0-rc.0", + "@liskhq/lisk-passphrase": "3.2.0-rc.0", + "@liskhq/lisk-validator": "0.7.0-rc.0", "protobufjs": "6.9.0" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 1d00cfdb4fd..c73c5f81f68 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.2.3-alpha.2", + "version": "5.3.0-rc.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.1.7-alpha.2", - "@liskhq/lisk-bft": "^0.3.5-alpha.2", - "@liskhq/lisk-chain": "^0.3.5-alpha.2", - "@liskhq/lisk-codec": "^0.2.3-alpha.1", - "@liskhq/lisk-cryptography": "^3.2.2-alpha.1", + "@liskhq/lisk-api-client": "^5.2.0-rc.0", + "@liskhq/lisk-bft": "^0.4.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0-rc.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-framework-forger-plugin": "^0.2.8-alpha.2", - "@liskhq/lisk-framework-http-api-plugin": "^0.2.8-alpha.2", - "@liskhq/lisk-framework-monitor-plugin": "^0.2.8-alpha.2", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.2.8-alpha.2", - "@liskhq/lisk-genesis": "^0.2.5-alpha.2", - "@liskhq/lisk-p2p": "^0.7.4-alpha.2", - "@liskhq/lisk-passphrase": "^3.1.2-alpha.1", - "@liskhq/lisk-transaction-pool": "^0.5.4-alpha.1", - "@liskhq/lisk-transactions": "^5.2.3-alpha.1", - "@liskhq/lisk-tree": "^0.2.3-alpha.1", - "@liskhq/lisk-utils": "^0.2.2-alpha.1", - "@liskhq/lisk-validator": "^0.6.3-alpha.1", - "lisk-framework": "^0.9.3-alpha.2" + "@liskhq/lisk-framework-forger-plugin": "^0.3.0-rc.0", + "@liskhq/lisk-framework-http-api-plugin": "^0.3.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-rc.0", + "@liskhq/lisk-genesis": "^0.3.0-rc.0", + "@liskhq/lisk-p2p": "^0.8.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-tree": "^0.3.0-rc.0", + "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-validator": "^0.7.0-rc.0", + "lisk-framework": "^0.10.0-rc.0" }, "devDependencies": { "eslint": "7.22.0", From f4f244b2a2d5d9970b9cd73ec108d317f3503805 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Fri, 8 Sep 2023 17:31:58 +0200 Subject: [PATCH 114/170] UX improvements for dashboard and commands (#8946) * Update description and examples of generate:command * Bootstrapped modules and plugins no longer throw error at build * My Account dashboard widget now also shows the passphrase * Remove unused children from CopiableText component * Fix formatting issues * Fix a typo --- .../templates/init/src/app/modules.ts | 3 +-- commander/src/commands/generate/command.ts | 7 ++----- .../interop/pos-mainchain-fast/src/app/modules.ts | 3 +-- .../pos-sidechain-example-one/src/app/modules.ts | 3 +-- .../pos-sidechain-example-two/src/app/modules.ts | 3 +-- .../components/CopiableText/CopiableText.module.scss | 1 + .../src/ui/components/dialogs/AccountDialog.tsx | 6 +++--- .../src/ui/components/widgets/BlockWidget.tsx | 6 ++---- .../src/ui/components/widgets/MyAccountWidget.tsx | 12 +++++++++--- .../src/ui/components/widgets/TransactionWidget.tsx | 6 ++---- 10 files changed, 23 insertions(+), 27 deletions(-) diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/src/app/modules.ts b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/src/app/modules.ts index acdfa4fb8f5..d69352da8ae 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/src/app/modules.ts +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/src/app/modules.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; -// @ts-expect-error app will have typescript error for unsued variable -export const registerModules = (app: Application): void => {}; +export const registerModules = (_app: Application): void => {}; diff --git a/commander/src/commands/generate/command.ts b/commander/src/commands/generate/command.ts index 817f414178e..eca875ae018 100644 --- a/commander/src/commands/generate/command.ts +++ b/commander/src/commands/generate/command.ts @@ -22,11 +22,8 @@ interface CommandCommandArgs { } export default class CommandCommand extends BaseBootstrapCommand { - static description = 'Creates an command skeleton for the given module name, name and id.'; - static examples = [ - 'generate:command moduleName commandName commandID', - 'generate:command nft transfer 1', - ]; + static description = 'Creates a command skeleton for the given module name and command name.'; + static examples = ['generate:command moduleName commandName', 'generate:command nft transfer']; static args = [ { name: 'moduleName', diff --git a/examples/interop/pos-mainchain-fast/src/app/modules.ts b/examples/interop/pos-mainchain-fast/src/app/modules.ts index acdfa4fb8f5..d69352da8ae 100644 --- a/examples/interop/pos-mainchain-fast/src/app/modules.ts +++ b/examples/interop/pos-mainchain-fast/src/app/modules.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; -// @ts-expect-error app will have typescript error for unsued variable -export const registerModules = (app: Application): void => {}; +export const registerModules = (_app: Application): void => {}; diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules.ts b/examples/interop/pos-sidechain-example-one/src/app/modules.ts index acdfa4fb8f5..d69352da8ae 100644 --- a/examples/interop/pos-sidechain-example-one/src/app/modules.ts +++ b/examples/interop/pos-sidechain-example-one/src/app/modules.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; -// @ts-expect-error app will have typescript error for unsued variable -export const registerModules = (app: Application): void => {}; +export const registerModules = (_app: Application): void => {}; diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules.ts b/examples/interop/pos-sidechain-example-two/src/app/modules.ts index acdfa4fb8f5..d69352da8ae 100644 --- a/examples/interop/pos-sidechain-example-two/src/app/modules.ts +++ b/examples/interop/pos-sidechain-example-two/src/app/modules.ts @@ -1,5 +1,4 @@ /* eslint-disable @typescript-eslint/no-empty-function */ import { Application } from 'lisk-sdk'; -// @ts-expect-error app will have typescript error for unsued variable -export const registerModules = (app: Application): void => {}; +export const registerModules = (_app: Application): void => {}; diff --git a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/CopiableText/CopiableText.module.scss b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/CopiableText/CopiableText.module.scss index eb1d03e1577..41707b21572 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/CopiableText/CopiableText.module.scss +++ b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/CopiableText/CopiableText.module.scss @@ -5,6 +5,7 @@ } .copyText { + white-space: nowrap; overflow-wrap: unset !important; overflow: hidden; text-overflow: ellipsis; diff --git a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/dialogs/AccountDialog.tsx b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/dialogs/AccountDialog.tsx index 943ea324025..7f6591169f8 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/dialogs/AccountDialog.tsx +++ b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/dialogs/AccountDialog.tsx @@ -40,13 +40,13 @@ const AccountDialog: React.FC = props => { Lisk32 address - {account.address} + Public Key - {account.publicKey} + @@ -54,7 +54,7 @@ const AccountDialog: React.FC = props => { Passphrase - {account.passphrase} + diff --git a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/BlockWidget.tsx b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/BlockWidget.tsx index 9b1a5c9c059..7bbf24c6702 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/BlockWidget.tsx +++ b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/BlockWidget.tsx @@ -54,12 +54,10 @@ const BlockWidget: React.FC = props => { {blocks.map(block => ( - {block.header.id} + - - {block.header.generatorAddress} - + {block.header.height} diff --git a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/MyAccountWidget.tsx b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/MyAccountWidget.tsx index 9fdc7078fc2..a1349d2703c 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/MyAccountWidget.tsx +++ b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/MyAccountWidget.tsx @@ -44,21 +44,27 @@ const MyAccountWidget: React.FC = props => { - Binary addresss + Lisk32 address Public Key + + Passphrase + {accounts.map((account: Account) => ( handleClick(account)} key={account.address}> - {account.address} + + + + - {account.publicKey} + ))} diff --git a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/TransactionWidget.tsx b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/TransactionWidget.tsx index ff11d903e5c..1e3529fb98c 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/TransactionWidget.tsx +++ b/framework-plugins/lisk-framework-dashboard-plugin/src/ui/components/widgets/TransactionWidget.tsx @@ -77,12 +77,10 @@ const TransactionWidget: React.FC = props => { {transactions.map(transaction => ( - {transaction.id} + - - {transaction.senderPublicKey} - + From 6615719c5beb9ddc9fc8fb2826e4adf760fcb6c2 Mon Sep 17 00:00:00 2001 From: Ishan Date: Fri, 8 Sep 2023 18:31:54 +0200 Subject: [PATCH 115/170] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20version=206?= =?UTF-8?q?.1.0-beta.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- commander/package.json | 18 ++-- .../templates/init/package-template.json | 16 ++-- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 12 +-- elements/lisk-chain/package.json | 10 +-- elements/lisk-client/package.json | 14 +-- elements/lisk-codec/package.json | 6 +- elements/lisk-cryptography/package.json | 2 +- elements/lisk-elements/package.json | 20 ++--- elements/lisk-p2p/package.json | 8 +- elements/lisk-transaction-pool/package.json | 4 +- elements/lisk-transactions/package.json | 8 +- elements/lisk-tree/package.json | 4 +- elements/lisk-validator/package.json | 4 +- examples/interop/README.md | 14 ++- .../config/default/genesis_block.blob | Bin 7306 -> 7306 bytes .../config/scripts/mint_nft_mainchain_one.ts | 57 ++++++++++++ .../scripts/transfer_lsk_sidechain_one.ts | 6 +- .../scripts/transfer_lsk_sidechain_two.ts | 6 +- .../scripts/transfer_nft_sidechain_one.ts | 81 ++++++++++++++++++ .../interop/pos-mainchain-fast/package.json | 16 ++-- .../config/default/genesis_assets.json | 72 +--------------- .../config/default/genesis_block.blob | Bin 7081 -> 5937 bytes .../pos-sidechain-example-one/package.json | 16 ++-- .../config/default/genesis_block.blob | Bin 7064 -> 7064 bytes .../pos-sidechain-example-two/package.json | 16 ++-- .../config/default/genesis_block.blob | Bin 27339 -> 27339 bytes examples/poa-sidechain/package.json | 16 ++-- .../config/default/genesis_block.blob | Bin 30475 -> 30484 bytes examples/pos-mainchain/package.json | 12 +-- .../package.json | 4 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 14 +-- .../lisk-framework-forger-plugin/package.json | 6 +- .../package.json | 4 +- .../package.json | 6 +- framework/package.json | 20 ++--- protocol-specs/package.json | 6 +- sdk/package.json | 22 ++--- yarn.lock | 29 +++++++ 40 files changed, 334 insertions(+), 223 deletions(-) create mode 100644 examples/interop/pos-mainchain-fast/config/scripts/mint_nft_mainchain_one.ts create mode 100644 examples/interop/pos-mainchain-fast/config/scripts/transfer_nft_sidechain_one.ts diff --git a/commander/package.json b/commander/package.json index 847a2a67b44..55e85af8029 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -101,16 +101,16 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-chain": "^0.5.0-beta.0", - "@liskhq/lisk-client": "^6.1.0-beta.0", - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-chain": "^0.6.0-beta.0", + "@liskhq/lisk-client": "^6.1.0-beta.1", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-db": "0.3.7", "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,7 +121,7 @@ "cli-table3": "0.6.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-framework": "^0.11.0-beta.0", + "lisk-framework": "^0.12.0-beta.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.5.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 3c78a0b2fbf..e69ccf633c2 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -85,12 +85,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "0.3.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "0.3.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "0.4.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "0.4.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "0.2.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "0.4.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "0.4.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "0.5.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "0.5.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "0.5.0-beta.0", + "@liskhq/lisk-framework-chain-connector-plugin": "0.2.0-beta.1", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -98,8 +98,8 @@ "axios": "0.21.2", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "6.1.0-beta.0", - "lisk-sdk": "6.1.0-beta.0", + "lisk-commander": "6.1.0-beta.1", + "lisk-sdk": "6.1.0-beta.1", "tar": "6.1.11", "tslib": "2.4.1" }, diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index b7e73396a5f..2758af86d0d 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -28,7 +28,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "lisk-sdk": "^6.1.0-beta.0" + "lisk-sdk": "^6.1.0-beta.1" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 5fb8171afef..3a959a27301 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "isomorphic-ws": "4.0.1", "ws": "8.11.0", "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.5.0-beta.0", + "@liskhq/lisk-chain": "^0.6.0-beta.0", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", "@types/node": "18.15.3", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 52c22658e19..ca1640eaade 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.5.0-beta.0", + "version": "0.6.0-beta.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,12 +35,12 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-tree": "^0.5.0-beta.0", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index ef2bc09f153..4d4cfa9bc22 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -56,14 +56,14 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", - "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", + "@liskhq/lisk-tree": "^0.5.0-beta.0", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 6bc5bacdca1..4212c3c295e 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.4.0-beta.0", + "version": "0.5.0-beta.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0" + "@liskhq/lisk-validator": "^0.9.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 94451df2436..7da9623ef91 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "4.1.0-beta.0", + "version": "4.1.0-beta.1", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index fd29ab1e7db..7021e7939bd 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,18 +35,18 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-chain": "^0.5.0-beta.0", - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-chain": "^0.6.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.9.0-beta.0", + "@liskhq/lisk-p2p": "^0.10.0-beta.0", "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", - "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", + "@liskhq/lisk-tree": "^0.5.0-beta.0", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0" + "@liskhq/lisk-validator": "^0.9.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index a3fc16fafe7..5fb0f73eca1 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.9.0-beta.0", + "version": "0.10.0-beta.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,9 +41,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "lodash.shuffle": "4.2.0", "semver": "7.5.2", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 3f0f183dc68..513e69bf69e 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.7.0-beta.0", + "version": "0.8.0-beta.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-utils": "^0.4.0-beta.0", "debug": "4.3.4" }, diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 34d85a23857..28169ad315f 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0" + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-validator": "^0.9.0-beta.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 97023c9d307..056f2f0ce98 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.4.0-beta.0", + "version": "0.5.0-beta.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,7 +35,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-utils": "^0.4.0-beta.0" }, "devDependencies": { diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 4527de4102f..e8c8fb6e0d8 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.8.0-beta.0", + "version": "0.9.0-beta.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "ajv": "8.1.0", "ajv-formats": "2.1.1", "debug": "4.3.4", diff --git a/examples/interop/README.md b/examples/interop/README.md index 50cfcda127c..61750cf20eb 100644 --- a/examples/interop/README.md +++ b/examples/interop/README.md @@ -52,7 +52,8 @@ Install and build `pos-sidechain-example-two` #### Run apps using pm2 -Install [pm2](https://pm2.keymetrics.io/) if not installed using `npm install pm2 -g` +- Install [pm2](https://pm2.keymetrics.io/) if not installed using `npm install pm2 -g` +- Install []`ts-node`](https://www.npmjs.com/package/ts-node) globally Run 2 instances mainchain node @@ -79,8 +80,8 @@ Interact with applications using `pm2` #### Register chains - Run `ts-node pos-mainchain-fast/config/scripts/sidechain_registration.ts` to register all the sidechains on the mainchain node. -- Run `ts-node pos-sidechain-example-one/config/scripts/mainchain_registration.ts` to register sidechain `sidechain_example_one` on mainchain. -- Run `ts-node pos-sidechain-example-two/config/scripts/mainchain_registration.ts` to register sidechain `sidechain_example_two` on mainchain. +- Run `ts-node pos-sidechain-example-one/config/scripts/mainchain_registration.ts` to register mainchain on sidechain `sidechain_example_one`. +- Run `ts-node pos-sidechain-example-two/config/scripts/mainchain_registration.ts` to register mainchain on sidechain `sidechain_example_two`. #### Check chain status @@ -89,7 +90,12 @@ Interact with applications using `pm2` Now observe logs, initially it will log `No valid CCU can be generated for the height: ${newBlockHeader.height}` until first finalized height is reached. -When the finalized height is reached, check chain status as described above and it should update lastCertificate height > 0 and status to 1 which means the CCU was sent successfully and chain is active now. +When the finalized height is reached, check chain status as described above and it should update lastCertificate `height > 0` and status to `1` which means the CCU was sent successfully and chain is active now. + +### Authorize ChainConnector plugin to sign and send CCU(Cross-Chain Update) transactions + +Run below command inside each application folder. +`./bin/run endpoint:invoke 'chainConnector_authorize' '{"password": "lisk" }'` #### Cross Chain transfers diff --git a/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob b/examples/interop/pos-mainchain-fast/config/default/genesis_block.blob index bcbb443cf7fbddeeb4b95a8e95ec4e5066644e61..4291af9b6a523c19a95edd890212311f65a1277e 100644 GIT binary patch delta 63 zcmV-F0Kor>If^+C3j6^G01)ez?WYD903slf5gw5(fFJ~t`Efh`-3ULfLJJ&4{!kO_ V(OQ|5*C2&~Q^Vbxgfp?ty&K`?8a@C3 delta 63 zcmV-F0Kor>If^+C3j6^G01%zc?574803slf5gw5(fFM-6|HiImX~msAe3T;ht=hxz V(}I`hv(Ngg)Fy3&zw)uoy&FgUA8r5u diff --git a/examples/interop/pos-mainchain-fast/config/scripts/mint_nft_mainchain_one.ts b/examples/interop/pos-mainchain-fast/config/scripts/mint_nft_mainchain_one.ts new file mode 100644 index 00000000000..f4cb6fe4171 --- /dev/null +++ b/examples/interop/pos-mainchain-fast/config/scripts/mint_nft_mainchain_one.ts @@ -0,0 +1,57 @@ +import { apiClient, codec, cryptography, Transaction } from 'lisk-sdk'; +import { keys } from '../default/dev-validators.json'; +import { LENGTH_COLLECTION_ID } from '../../../../pos-mainchain/src/app/modules/testNft/constants'; +import { mintNftParamsSchema } from '../../../../pos-mainchain/src/app/modules/testNft/schema'; +(async () => { + const { address } = cryptography; + + const nodeAlias = 'one'; + + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + + const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + + const relayerkeyInfo = keys[2]; + const mintNftParams = { + address: address.getAddressFromLisk32Address(relayerkeyInfo.address), + collectionID: Buffer.alloc(LENGTH_COLLECTION_ID, 1), + attributesArray: [ + { + module: 'token', + attributes: Buffer.alloc(8, 2), + }, + ], + }; + + const { nonce } = await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { + address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerkeyInfo.publicKey, 'hex')), + }); + + const tx = new Transaction({ + module: 'testNft', + command: 'mintNft', + fee: BigInt(200000000), + params: codec.encode(mintNftParamsSchema, mintNftParams), + nonce: BigInt(nonce), + senderPublicKey: Buffer.from(relayerkeyInfo.publicKey, 'hex'), + signatures: [], + }); + + tx.sign( + Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), + Buffer.from(relayerkeyInfo.privateKey, 'hex'), + ); + + const result = await mainchainClient.invoke<{ + transactionId: string; + }>('txpool_postTransaction', { + transaction: tx.getBytes().toString('hex'), + }); + + console.log( + `Sent mint nft transaction to address: ${relayerkeyInfo} to node ${nodeAlias}. Result from transaction pool is: `, + result, + ); + + process.exit(0); +})(); diff --git a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts index 8be39bb443c..97266315f99 100644 --- a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts +++ b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts @@ -68,7 +68,11 @@ type ModulesMetadata = [ }); console.log( - `Sent cross chain transfer transaction (amount: ${params.amount}, recipientAddress: ${recipientLSKAddress}) to send of sidechain (sendingChainID: ${params.receivingChainID}) node ${nodeAlias}. Result from transaction pool is: `, + `Sent cross chain transfer transaction (amount: ${ + params.amount + }, recipientAddress: ${recipientLSKAddress}) to sidechain (receivingChainID: ${params.receivingChainID.toString( + 'hex', + )}) node ${nodeAlias}. Result from transaction pool is: `, result, ); diff --git a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_two.ts b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_two.ts index d65d3945013..65216d13f3f 100644 --- a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_two.ts +++ b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_two.ts @@ -68,7 +68,11 @@ type ModulesMetadata = [ }); console.log( - `Sent cross chain transfer transaction (amount: ${params.amount}, recipientAddress: ${recipientLSKAddress}) to send of sidechain (sendingChainID: ${params.receivingChainID}) node ${nodeAlias}. Result from transaction pool is: `, + `Sent cross chain transfer transaction (amount: ${ + params.amount + }, recipientAddress: ${recipientLSKAddress}) to sidechain (receivingChainID: ${params.receivingChainID.toString( + 'hex', + )}) node ${nodeAlias}. Result from transaction pool is: `, result, ); diff --git a/examples/interop/pos-mainchain-fast/config/scripts/transfer_nft_sidechain_one.ts b/examples/interop/pos-mainchain-fast/config/scripts/transfer_nft_sidechain_one.ts new file mode 100644 index 00000000000..2097b6bb774 --- /dev/null +++ b/examples/interop/pos-mainchain-fast/config/scripts/transfer_nft_sidechain_one.ts @@ -0,0 +1,81 @@ +import { apiClient, codec, cryptography, Schema, Transaction } from 'lisk-sdk'; +import { keys } from '../default/dev-validators.json'; +type ModulesMetadata = [ + { + stores: { key: string; data: Schema }[]; + events: { name: string; data: Schema }[]; + name: string; + commands: { name: string; params: Schema }[]; + }, +]; +(async () => { + const { address } = cryptography; + + const nodeAlias = 'one'; + const tokenID = Buffer.from('0400000000000000', 'hex'); + const nftID = Buffer.from('04000000010101010000000000000000', 'hex'); + const sidechainID = Buffer.from('04000001', 'hex'); // Update this to send to another sidechain + const recipientAddress = address.getAddressFromLisk32Address( + 'lskxz85sur2yo22dmcxybe39uvh2fg7s2ezxq4ny9', + ); + + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + + const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + + const { modules: modulesMetadata } = await mainchainClient.invoke<{ + modules: ModulesMetadata; + }>('system_getMetadata'); + const tokenMetadata = modulesMetadata.find(m => m.name === 'nft'); + + const ccTransferCMDSchema = tokenMetadata?.commands.filter( + cmd => cmd.name == 'transferCrossChain', + )[0].params as Schema; + + const params = { + nftID, + receivingChainID: sidechainID, + recipientAddress, + data: 'cc nft transfer testing', + messageFee: BigInt('10000000'), + messageFeeTokenID: tokenID, + includeAttributes: true, + }; + + const relayerkeyInfo = keys[2]; + const { nonce } = await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { + address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerkeyInfo.publicKey, 'hex')), + }); + + const tx = new Transaction({ + module: 'nft', + command: 'transferCrossChain', + fee: BigInt(200000000), + params: codec.encode(ccTransferCMDSchema, params), + nonce: BigInt(nonce), + senderPublicKey: Buffer.from(relayerkeyInfo.publicKey, 'hex'), + signatures: [], + }); + + tx.sign( + Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), + Buffer.from(relayerkeyInfo.privateKey, 'hex'), + ); + + const result = await mainchainClient.invoke<{ + transactionId: string; + }>('txpool_postTransaction', { + transaction: tx.getBytes().toString('hex'), + }); + + console.log( + `Sent cross chain nft transfer transaction recipientAddress: ${params.recipientAddress.toString( + 'hex', + )}) to send of sidechain (sendingChainID: ${ + params.receivingChainID + }) node ${nodeAlias}. Result from transaction pool is: `, + result, + ); + + process.exit(0); +})(); diff --git a/examples/interop/pos-mainchain-fast/package.json b/examples/interop/pos-mainchain-fast/package.json index 3ad599d9c73..e950d54d9dd 100644 --- a/examples/interop/pos-mainchain-fast/package.json +++ b/examples/interop/pos-mainchain-fast/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.0", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-commander": "^6.1.0-beta.1", + "lisk-sdk": "^6.1.0-beta.1", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json b/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json index 27c812d54d0..2319c6390c0 100644 --- a/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json +++ b/examples/interop/pos-sidechain-example-one/config/default/genesis_assets.json @@ -1054,7 +1054,7 @@ "nftSubstore": [], "supportedNFTsSubstore": [ { - "chainID": "04000000", + "chainID": "", "supportedCollectionIDArray": [] } ] @@ -1210,76 +1210,6 @@ "commission": 0, "lastCommissionIncreaseHeight": 0, "sharingCoefficients": [] - }, - { - "address": "lskq3t9jjy3jpt2y3yrc67gs267e4vaedxzwpwzt4", - "name": "genesis_5", - "blsKey": "afc027c9fde4f7f78449ffa7c6d6d0a4417083629cb69988d1344348ae7ebf78c73c378f589bb796157680ad864eca84", - "proofOfPossession": "b01cd09f2911791e47549bce676ca99bf8a9fcd2a975d48ef8d5f863b5d46fadea95df33419a0d43618799d303ecbf3616551a43a1803675b2fe3d008ab025992cfbf9d4ed05aa79573e987e510ce3cab95de7837b7c9d78f716b690df30315e", - "generatorKey": "b837393cb9d926741547bd3e5a4db1cc90f4d9ba01fcbebedff8fe940191fe5b", - "lastGeneratedHeight": 0, - "isBanned": false, - "reportMisbehaviorHeights": [], - "consecutiveMissedBlocks": 0, - "commission": 0, - "lastCommissionIncreaseHeight": 0, - "sharingCoefficients": [] - }, - { - "address": "lskoer36v4xqh5ugsnhebpdzyjxm3nga2o9ogvewh", - "name": "genesis_6", - "blsKey": "8a5e2abfa418fd28e639cbcdfecaa170809a494522371dcb7e3eaf162712733065479e4211f4148b328643a1596e3669", - "proofOfPossession": "93c5fe759302a489f1941ac1156e8da5db210cdeb3038ed0e4c316e5d03685bd0f38692da3f2e188476e25a4b4ee116702de4675d12b101b0506a4d12048199e196b9e521386971b9984a1fa9b769562bca15a15094ffba3cd88287bca3b7608", - "generatorKey": "791d4df4544da16eaa33c7ca15a6bc217990614abb1527b38d4feb32b11bffa0", - "lastGeneratedHeight": 0, - "isBanned": false, - "reportMisbehaviorHeights": [], - "consecutiveMissedBlocks": 0, - "commission": 0, - "lastCommissionIncreaseHeight": 0, - "sharingCoefficients": [] - }, - { - "address": "lska5ezp53c5pu64avrhpj4n9w4n8dgpvqqmqedfd", - "name": "genesis_7", - "blsKey": "b4ab52a758a348ad1d5ae7adad9a7929fad91169f42ccd5d39d7bd1c7718ad1b477c87d511e397501f68596bded868af", - "proofOfPossession": "8a9600fe85b06b5b85a2fa3c28f1fbeba73233fd3f93c26f5d1af3a5a4d7eabda35dcd65297119d66081c45d72eb57d805f8aede67dd40b8888dd7419ccfff5ad19403fa02c1d6b9c88b845db0b69f1ac92dda834cc8032efa4ea430724aba40", - "generatorKey": "4868935a85f6f4498d61e6e7e888510ecd706d530c408291af85d8dd4fc20991", - "lastGeneratedHeight": 0, - "isBanned": false, - "reportMisbehaviorHeights": [], - "consecutiveMissedBlocks": 0, - "commission": 0, - "lastCommissionIncreaseHeight": 0, - "sharingCoefficients": [] - }, - { - "address": "lskydvwhgzryehb9zgogdvvqbrupwog7fpahttffv", - "name": "genesis_8", - "blsKey": "a6388495065a246d3f336460b1498264184ac1ab15e495dfbd7957e122568b3e782b3c163590478ef9427af60ee78687", - "proofOfPossession": "af02d60745287e0fdd056ab9f9139784330f00d074073550fb8d4657cb98e9108b6e47c2268d59cc96fbdc02415f37e914cc922d652d39b8fbafef211d94664660743ef574e7f9334fe60ba38f31bb7e3833dfde9620860d4d3ff9227cc8e943", - "generatorKey": "dafae6549b2b05cc69763f9500da92f3151ae0edf8e5a5fd0cb7a4ba09b6e2a2", - "lastGeneratedHeight": 0, - "isBanned": false, - "reportMisbehaviorHeights": [], - "consecutiveMissedBlocks": 0, - "commission": 0, - "lastCommissionIncreaseHeight": 0, - "sharingCoefficients": [] - }, - { - "address": "lskochy8jh28ne4bxz8c9c7c2uxe3aoag2n9z8zys", - "name": "genesis_9", - "blsKey": "84d8249385edd7edf4ce2220e00f183fc9c2edfc9836c10ff86ba3cdd6c8f2b83b6923a2e5cb60e8ab22801f70e30f03", - "proofOfPossession": "8fbe09682cfcf0f43ab37dfb1b5d35bb5fb0b2a5793555b516232df8dfa08bae53fd2e8203f9d770e8e4e0166c8dc51602123a5c7bbd99a494fbb513332d39f482028a9b3867aca046455a1764178be942190915f1fd6f9fda55c3520079877a", - "generatorKey": "b7d28382bf6d6e5e971d58ebc0f6689094f1ae242f1fc2a0a00ad34cfda7eba5", - "lastGeneratedHeight": 0, - "isBanned": false, - "reportMisbehaviorHeights": [], - "consecutiveMissedBlocks": 0, - "commission": 0, - "lastCommissionIncreaseHeight": 0, - "sharingCoefficients": [] } ], "stakers": [], diff --git a/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob b/examples/interop/pos-sidechain-example-one/config/default/genesis_block.blob index 28b481ce57147666fbc20c5eb6e3c8624a537662..422cc88eb14404a9de0bc4eacf6a7dd596a3b591 100644 GIT binary patch delta 171 zcmV;c095~}H?b}c3j6^G01%s&?WYD903slf5gw5(JY<1S8_ufK{n;MJuH(r#6sKA_ zd=uWNOW^rp%_1>f3oAk(I3*o+-eSN1D^BMbo>VtpqY($p)#0-W0o5yRJIs*JN+8HQ z0g~%sVh88)5%;UrxM=_wrvhlMfPO*`z7<%s{SA@LO&SXd18!z?5(E+g3IH1J2?_&n ZZ*vmn2@2r>lb#9}vrhq91hX9qj1h>HLYDvl delta 1289 zcmZ9~{Xf$Q0KoCDt*Px~Hf{JWXVwEvbIZb%2rq_9M4g9Pt&VUqE)Qqv5F6qqYaW`F zjNv7DSRxdn>+-aY9t?L`N*+RYYBBcEycNU&&^7S}SpR{|q={88|{5BIt4?JdJ1bG;^&lNALie{9q1ozbb zk{cv(5)s1{RhyZ-I)`LgYptunki;85b#pYw3}!W8Xx--@F@{*2+S8ggBe1zy7!ECO z8_k;GNqulg1eAF?9=!pri&(b~008h68Uc-si9?U0n|A5MuS-Qt>L51V*gkRAa?&EZ z+y=a+nLK#QT^NB!!^6XvVR4ahCu~7-!+ncB&D6>Y+v`*HZzDrum)ML`S6c+x!@J!) z>r=Xt9y!`y3KHHa!X&2EWghHj6Bvz~hpz8HvA(7q7CsI?QzKN?sGrm%Jj+u)SAO68 zET-;lp`7AUvN6@|WR~D5bfL=*djxb7rP(Fi((VIt8n+2{s#c#Z!fvnvovx%D(tF+C zMxV(@K7TD~1>2G@C+|8=#JAczIJS*#kH^qDoI-pggZYZFb{%zRr(CHm(BWy1k^w5< z3HSj)z`w{aF$Zqm5QtpRNG_JqE)bFGNiI6|KcyW=&N)u(5*uqQryT~Q+I~@NT8WoC z!QONr#i;>nfsP}G(MnxWicPsTnQo$Brd=d~!GEYt;6ub0mkPjc43k^=hq>NF6EtsV z>H+r6kR7K3NsS~`yr0abF}I0tze9yXCOi^`tqgE5UGXs9)1=fSHLBF#FtZpZV2jp- ziG`=`i9#@NUsZ)P+cLR-Z{h|bo@MHzIP4>0-k?0{$5h=jXXT&tzKgMF;`+|pNN}IZ zzEJirKuFMFym-ogj>>PD+xzi7qWN=5dyss)*1tNa!n4jaWTvjJB$c&eZ44Eu*eRtu z$T~LvWL$@%ozHrXdR^>i77-jZF&fc8VB{15T25osQBL`qqveunzS@SOapw2L(7_Lt zV%gh{3c56G$B!l>j0?Zg&&~&q!j$zB;V<^LW^-jOSAYK$GF$*%gLIFy_2#najV;%~ zKGJx`fnKQf+CgzD`K)*Qej?s8f*-v- z?{b}ztQ@hn`Lw*$Ro8RN`ZJkX1V#g7WWBp(3i1W)hqhJ2Vm1W{48^av^;2;@0tc?l z8RRl)J=?j#gGH)8AucEE=Qa&qCWVn4T2&2ynVS}z_F%+2eT<)3rT9*NRdH$8-4rTC zK2e0v-00)HN;uy;=SIYjuT38oTEPY*6P*iz@yj1D;7f2(`KD5%cSqa~Z+TVzKP6vZ z&*<}>4j`F5x|Ppalr1VA6YwvQ#?E~`i|Q+O-AH9rg>A0Qyxy^Y@n{@(Y&gw|-;RQ&xs}dO%Y>U)dyt6`}qCVj5AP diff --git a/examples/interop/pos-sidechain-example-one/package.json b/examples/interop/pos-sidechain-example-one/package.json index d617cba6b4f..3ba9c705106 100644 --- a/examples/interop/pos-sidechain-example-one/package.json +++ b/examples/interop/pos-sidechain-example-one/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.0", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-commander": "^6.1.0-beta.1", + "lisk-sdk": "^6.1.0-beta.1", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-two/config/default/genesis_block.blob b/examples/interop/pos-sidechain-example-two/config/default/genesis_block.blob index 6ec14613783c8869032093910096cc78da6e14d8..cb9a7a2ab902b4fb99a2e71d05a32b141b3c1942 100644 GIT binary patch delta 63 zcmV-F0Kos4H<&jM3j6^G01%;;?WYD903slf5gw5(fFSKY#Dtq&V`hwWb>d|5#-)D) VC=+IzV`TNKe`hg66=1Q=%Np=98My!e delta 63 zcmV-F0Kos4H<&jM3j6^G01*Do?574803slf5gw5(fFM2HIj0oyJqvz@P34MKl~M22 VWL9eO;V1}jOJH@}n(494%NqQb8z}$) diff --git a/examples/interop/pos-sidechain-example-two/package.json b/examples/interop/pos-sidechain-example-two/package.json index dd44c52cb9c..b30d345d46d 100644 --- a/examples/interop/pos-sidechain-example-two/package.json +++ b/examples/interop/pos-sidechain-example-two/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.0", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-commander": "^6.1.0-beta.1", + "lisk-sdk": "^6.1.0-beta.1", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/poa-sidechain/config/default/genesis_block.blob b/examples/poa-sidechain/config/default/genesis_block.blob index 9ae7bd5f182ed5eb9c9feab35e5cc831f85fac90..ba62c0c69584a5066d006386c9bf985abe56c59c 100644 GIT binary patch delta 29 lcmX?omGSgdMt-h8j2sLC&C}m5XOm!1QkWCIRJjL^d-c delta 105 zcmV-v0G9uh?g5ML0S^lN0SEvPtewcG1{eS$AdwLsku5wRH$e&47F+~)AzD=qWqx{h z;*;>{3+b%CHXxCH@*WZWk%=TAm&kJ>^fkNVBY46Y>emanIg{Oteww!g@tZ1f^}IFZ Lk, lightcurve GmbH ", "license": "Apache-2.0", @@ -34,7 +34,7 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.0" + "lisk-sdk": "^6.1.0-beta.1" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 381b2b01966..4d8e8d3b1a1 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.3.0-beta.0", + "version": "0.4.0-beta.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -46,10 +46,10 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-client": "^6.1.0-beta.0", + "@liskhq/lisk-client": "^6.1.0-beta.1", "express": "4.18.2", "json-format-highlight": "1.0.4", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.1", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 1387e799d65..b2daa1b99bd 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.3.0-beta.0", + "version": "0.4.0-beta.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -47,15 +47,15 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-client": "^6.1.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-client": "^6.1.0-beta.1", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "axios": "1.2.0", "express": "4.18.2", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-sdk": "^6.1.0-beta.1", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 31b2c86da06..43c9ae11d5c 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.4.0-beta.0", + "version": "0.5.0-beta.0", "description": "A plugin for lisk-framework that monitors configured validators forging activity and stakers information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,10 +40,10 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.0" + "lisk-sdk": "^6.1.0-beta.1" }, "devDependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", "@types/debug": "4.1.5", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index eb2cb2352d8..1f70f973bba 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.4.0-beta.0", + "version": "0.5.0-beta.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,7 +40,7 @@ "express": "4.18.2", "express-rate-limit": "6.7.0", "ip": "1.1.5", - "lisk-sdk": "^6.1.0-beta.0" + "lisk-sdk": "^6.1.0-beta.1" }, "devDependencies": { "@types/cors": "2.8.12", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 967043510d0..d06184f3450 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.4.0-beta.0", + "version": "0.5.0-beta.0", "description": "A plugin for lisk-framework that provides automatic detection of validator misbehavior and sends a reportValidatorMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,9 +38,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.0" + "lisk-sdk": "^6.1.0-beta.1" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework/package.json b/framework/package.json index 37316208680..769c65e5ef4 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.11.0-beta.0", + "version": "0.12.0-beta.0", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,17 +42,17 @@ }, "dependencies": { "@chainsafe/blst": "0.2.9", - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-chain": "^0.5.0-beta.0", - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-chain": "^0.6.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.9.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", - "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-p2p": "^0.10.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", + "@liskhq/lisk-tree": "^0.5.0-beta.0", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", + "@liskhq/lisk-validator": "^0.9.0-beta.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.9", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 5707fccdc63..21d67b5187d 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.4.0-beta.0", - "@liskhq/lisk-cryptography": "4.1.0-beta.0", + "@liskhq/lisk-codec": "0.5.0-beta.0", + "@liskhq/lisk-cryptography": "4.1.0-beta.1", "@liskhq/lisk-passphrase": "4.1.0-beta.0", - "@liskhq/lisk-validator": "0.8.0-beta.0", + "@liskhq/lisk-validator": "0.9.0-beta.0", "protobufjs": "7.2.4" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 00d61cdfb5b..39ee0c20173 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "6.1.0-beta.0", + "version": "6.1.0-beta.1", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,19 +29,19 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.0", - "@liskhq/lisk-chain": "^0.5.0-beta.0", - "@liskhq/lisk-codec": "^0.4.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-chain": "^0.6.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-beta.1", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.9.0-beta.0", + "@liskhq/lisk-p2p": "^0.10.0-beta.0", "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.7.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.0", - "@liskhq/lisk-tree": "^0.4.0-beta.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", + "@liskhq/lisk-transactions": "^6.1.0-beta.1", + "@liskhq/lisk-tree": "^0.5.0-beta.0", "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.8.0-beta.0", - "lisk-framework": "^0.11.0-beta.0" + "@liskhq/lisk-validator": "^0.9.0-beta.0", + "lisk-framework": "^0.12.0-beta.0" }, "devDependencies": { "eslint": "8.28.0", diff --git a/yarn.lock b/yarn.lock index 75e691fdb4f..40d0909b755 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1848,6 +1848,16 @@ hash-wasm "4.9.0" tweetnacl "1.0.3" +"@liskhq/lisk-cryptography@^4.0.0-rc.0": + version "4.0.0-rc.0" + resolved "https://npm.lisk.com/@liskhq/lisk-cryptography/-/lisk-cryptography-4.0.0-rc.0.tgz#0cf5e1b8f67116c8ff258e62d5da3d23a5cef185" + integrity sha512-OIqxD9oNcY2OlFNkI+Ay3Mex+EEt3AcmDFKpkshGBieuMvZzgfOAupPfaB3L36q6pMxGHpRc9Nzz6VQt0vNCYQ== + dependencies: + "@liskhq/lisk-passphrase" "^4.0.0-rc.0" + buffer-reverse "1.0.1" + hash-wasm "4.9.0" + tweetnacl "1.0.3" + "@liskhq/lisk-db@0.3.7": version "0.3.7" resolved "https://registry.yarnpkg.com/@liskhq/lisk-db/-/lisk-db-0.3.7.tgz#9dce3d0c37f248f9221b26f0d57c3306d7d072ef" @@ -1878,6 +1888,13 @@ dependencies: bip39 "3.0.3" +"@liskhq/lisk-passphrase@^4.0.0-rc.0": + version "4.0.0-rc.0" + resolved "https://npm.lisk.com/@liskhq/lisk-passphrase/-/lisk-passphrase-4.0.0-rc.0.tgz#78fe583229c96d76258906375e34ff84a413be05" + integrity sha512-m87nhvUpOlSLr5NRV2M4INtg0IjjFF7Bte96Iq6X1dhzOjlmPg/QUQa7MFUzQu3NEWWHnpwON8QQK1FUE6ixYw== + dependencies: + bip39 "3.0.3" + "@liskhq/lisk-transaction-pool@^0.6.0-beta.6": version "0.6.0-beta.6" resolved "https://npm.lisk.com/@liskhq/lisk-transaction-pool/-/lisk-transaction-pool-0.6.0-beta.6.tgz#85c36789c45cffbbd73f90a961710763cdd857aa" @@ -1923,6 +1940,18 @@ semver "7.5.2" validator "13.7.0" +"@liskhq/lisk-validator@^0.8.0-beta.0": + version "0.8.0-rc.0" + resolved "https://npm.lisk.com/@liskhq/lisk-validator/-/lisk-validator-0.8.0-rc.0.tgz#fb136717f71ce35c7937d79d3b857d26195ff8c8" + integrity sha512-ymKW2eRw4KVBXJLN7co2iJv9WmkytL4IyxXiwJq5xvSzAN/YIx7NYViT8WCcGmUA/ryd55kwdKX/9kKkhafiDw== + dependencies: + "@liskhq/lisk-cryptography" "^4.0.0-rc.0" + ajv "8.1.0" + ajv-formats "2.1.1" + debug "4.3.4" + semver "7.5.2" + validator "13.7.0" + "@lmdb/lmdb-darwin-arm64@2.5.2": version "2.5.2" resolved "https://registry.yarnpkg.com/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-2.5.2.tgz#bc66fa43286b5c082e8fee0eacc17995806b6fbe" From 50ab4da332af59ff0b00fbac9e22b43ceef5e424 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Tue, 12 Sep 2023 08:18:07 +0200 Subject: [PATCH 116/170] Add missing BaseStore unit test (#8951) --- framework/src/modules/base_store.ts | 3 +-- framework/test/unit/modules/base_store.spec.ts | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/framework/src/modules/base_store.ts b/framework/src/modules/base_store.ts index 13138a313fa..5d3b6deb718 100644 --- a/framework/src/modules/base_store.ts +++ b/framework/src/modules/base_store.ts @@ -11,8 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { emptySchema } from '@liskhq/lisk-codec'; -import { Schema } from '@liskhq/lisk-codec'; +import { Schema, emptySchema } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; import { IterateOptions } from '@liskhq/lisk-db'; import { ImmutableSubStore, SubStore } from '../state_machine/types'; diff --git a/framework/test/unit/modules/base_store.spec.ts b/framework/test/unit/modules/base_store.spec.ts index 7e909aef002..a32b8c5a6e4 100644 --- a/framework/test/unit/modules/base_store.spec.ts +++ b/framework/test/unit/modules/base_store.spec.ts @@ -85,6 +85,10 @@ describe('BaseStore', () => { await expect(store.has(context, key)).resolves.toBeTrue(); }); + + it('should return false when key does not exist', async () => { + await expect(store.has(context, key)).resolves.toBeFalse(); + }); }); describe('set', () => { From 4f88fb8674b71d2c30325e3b032204428662e772 Mon Sep 17 00:00:00 2001 From: Mitsuaki Uchimoto <36514357+mitsuaki-u@users.noreply.github.com> Date: Wed, 13 Sep 2023 17:58:29 +0300 Subject: [PATCH 117/170] Update authority command checks for weight zero (#8953) Co-authored-by: Mitsuaki Uchimoto --- .../modules/poa/commands/update_authority.ts | 64 ++++++-- framework/src/modules/poa/schemas.ts | 2 + .../poa/commands/update_authority.spec.ts | 155 ++++++++++++------ 3 files changed, 152 insertions(+), 69 deletions(-) diff --git a/framework/src/modules/poa/commands/update_authority.ts b/framework/src/modules/poa/commands/update_authority.ts index 63fe5c30575..e5a40414203 100644 --- a/framework/src/modules/poa/commands/update_authority.ts +++ b/framework/src/modules/poa/commands/update_authority.ts @@ -20,7 +20,6 @@ import { BaseCommand } from '../../base_command'; import { updateAuthoritySchema, validatorSignatureMessageSchema } from '../schemas'; import { COMMAND_UPDATE_AUTHORITY, - MAX_NUM_VALIDATORS, MESSAGE_TAG_POA, EMPTY_BYTES, UpdateAuthorityResult, @@ -54,20 +53,20 @@ export class UpdateAuthorityCommand extends BaseCommand { context: CommandVerifyContext, ): Promise { const { newValidators, threshold, validatorsUpdateNonce } = context.params; - - if (newValidators.length < 1 || newValidators.length > MAX_NUM_VALIDATORS) { - throw new Error( - `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive).`, - ); - } - const newValidatorsAddresses = newValidators.map(newValidator => newValidator.address); + if (!objectUtils.isBufferArrayOrdered(newValidatorsAddresses)) { - throw new Error('Addresses in newValidators are not lexicographically ordered.'); + return { + status: VerifyStatus.FAIL, + error: new Error('Addresses in newValidators are not lexicographically ordered.'), + }; } if (!objectUtils.bufferArrayUniqueItems(newValidatorsAddresses)) { - throw new Error('Addresses in newValidators are not unique.'); + return { + status: VerifyStatus.FAIL, + error: new Error('Addresses in newValidators are not unique.'), + }; } const validatorStore = this.stores.get(ValidatorStore); @@ -75,29 +74,58 @@ export class UpdateAuthorityCommand extends BaseCommand { for (const newValidator of newValidators) { const validatorExists = await validatorStore.has(context, newValidator.address); if (!validatorExists) { - throw new Error( - `No validator found for given address ${newValidator.address.toString('hex')}.`, - ); + return { + status: VerifyStatus.FAIL, + error: new Error( + `No validator found for given address ${newValidator.address.toString('hex')}.`, + ), + }; } + + if (newValidator.weight === BigInt(0)) { + return { + status: VerifyStatus.FAIL, + error: new Error(`Validator weight cannot be zero.`), + }; + } + totalWeight += newValidator.weight; } + if (totalWeight === BigInt(0)) { + return { + status: VerifyStatus.FAIL, + error: new Error(`Validators total weight cannot be zero.`), + }; + } + if (totalWeight > MAX_UINT64) { - throw new Error(`Validators total weight exceeds ${MAX_UINT64}.`); + return { + status: VerifyStatus.FAIL, + error: new Error(`Validators total weight exceeds ${MAX_UINT64}.`), + }; } const minThreshold = totalWeight / BigInt(3) + BigInt(1); if (threshold < minThreshold || threshold > totalWeight) { - throw new Error(`Threshold must be between ${minThreshold} and ${totalWeight} (inclusive).`); + return { + status: VerifyStatus.FAIL, + error: new Error( + `Threshold must be between ${minThreshold} and ${totalWeight} (inclusive).`, + ), + }; } const chainPropertiesStore = await this.stores .get(ChainPropertiesStore) .get(context, EMPTY_BYTES); if (validatorsUpdateNonce !== chainPropertiesStore.validatorsUpdateNonce) { - throw new Error( - `validatorsUpdateNonce must be equal to ${chainPropertiesStore.validatorsUpdateNonce}.`, - ); + return { + status: VerifyStatus.FAIL, + error: new Error( + `validatorsUpdateNonce must be equal to ${chainPropertiesStore.validatorsUpdateNonce}.`, + ), + }; } return { diff --git a/framework/src/modules/poa/schemas.ts b/framework/src/modules/poa/schemas.ts index c06d1ca840e..d15bd761335 100644 --- a/framework/src/modules/poa/schemas.ts +++ b/framework/src/modules/poa/schemas.ts @@ -105,6 +105,8 @@ export const updateAuthoritySchema = { type: 'array', fieldNumber: 1, items: validator, + minItems: 1, + maxItems: MAX_NUM_VALIDATORS, }, threshold: { dataType: 'uint64', diff --git a/framework/test/unit/modules/poa/commands/update_authority.spec.ts b/framework/test/unit/modules/poa/commands/update_authority.spec.ts index eff3fd528ff..7f001634e5d 100644 --- a/framework/test/unit/modules/poa/commands/update_authority.spec.ts +++ b/framework/test/unit/modules/poa/commands/update_authority.spec.ts @@ -1,10 +1,11 @@ import { bls, utils } from '@liskhq/lisk-cryptography'; import { codec } from '@liskhq/lisk-codec'; import { TransactionAttrs } from '@liskhq/lisk-chain'; -import { MAX_UINT64 } from '@liskhq/lisk-validator'; +import { MAX_UINT64, validator } from '@liskhq/lisk-validator'; import { CommandExecuteContext, CommandVerifyContext, + MAX_NUM_VALIDATORS, PoAModule, Transaction, VerifyStatus, @@ -17,7 +18,6 @@ import { EMPTY_BYTES, KEY_SNAPSHOT_0, KEY_SNAPSHOT_2, - MAX_NUM_VALIDATORS, MODULE_NAME_POA, UpdateAuthorityResult, } from '../../../../../src/modules/poa/constants'; @@ -132,59 +132,99 @@ describe('UpdateAuthority', () => { .createCommandVerifyContext(updateAuthoritySchema); }); - it('should throw error when length of newValidators is less than 1', async () => { + it('should throw error when length of newValidators is less than 1', () => { + expect(() => + validator.validate(updateAuthorityCommand.schema, { + ...updateAuthorityValidatorParams, + newValidators: [], + }), + ).toThrow('must NOT have fewer than 1 items'); + }); + + it('should throw error when length of newValidators is greater than MAX_NUM_VALIDATORS', async () => { + expect(() => + validator.validate(updateAuthorityCommand.schema, { + ...updateAuthorityValidatorParams, + newValidators: Array.from(Array(MAX_NUM_VALIDATORS + 1).keys()).map(_ => ({ + address: utils.getRandomBytes(20), + weight: BigInt(1), + })), + }), + ).toThrow(`must NOT have more than ${MAX_NUM_VALIDATORS} items`); + }); + + it('should return error when newValidators are not lexicographically ordered', async () => { context = testing .createTransactionContext({ stateStore, transaction: buildTransaction({ params: buildUpdateAuthorityValidatorParams({ - newValidators: [], + newValidators: [ + { + address: address1, + weight: BigInt(1), + }, + { + address: address0, + weight: BigInt(1), + }, + ], }), }), chainID, }) .createCommandVerifyContext(updateAuthoritySchema); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive).`, + const result = await updateAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude( + `Addresses in newValidators are not lexicographically ordered.`, ); }); - it('should throw error when length of newValidators is greater than MAX_NUM_VALIDATORS', async () => { + it('should return error when addresses are in newValidators are not unique', async () => { context = testing .createTransactionContext({ stateStore, transaction: buildTransaction({ params: buildUpdateAuthorityValidatorParams({ - newValidators: Array.from(Array(MAX_NUM_VALIDATORS + 1).keys()).map(_ => ({ - address: utils.getRandomBytes(20), - weight: BigInt(1), - })), + newValidators: [ + { + address: address0, + weight: BigInt(1), + }, + { + address: address1, + weight: BigInt(1), + }, + { + address: address1, + weight: BigInt(1), + }, + ], }), }), chainID, }) .createCommandVerifyContext(updateAuthoritySchema); + const result = await updateAuthorityCommand.verify(context); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - `newValidators length must be between 1 and ${MAX_NUM_VALIDATORS} (inclusive)`, - ); + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude(`Addresses in newValidators are not unique.`); }); - it('should throw error when newValidators are not lexicographically ordered', async () => { + it('should return error when validator is not in ValidatorStore', async () => { context = testing .createTransactionContext({ stateStore, transaction: buildTransaction({ params: buildUpdateAuthorityValidatorParams({ newValidators: [ + ...updateAuthorityValidatorParams.newValidators, { - address: address1, - weight: BigInt(1), - }, - { - address: address0, - weight: BigInt(1), + address: address2, + weight: BigInt(2), }, ], }), @@ -192,13 +232,15 @@ describe('UpdateAuthority', () => { chainID, }) .createCommandVerifyContext(updateAuthoritySchema); + const result = await updateAuthorityCommand.verify(context); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - 'Addresses in newValidators are not lexicographically ordered.', + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude( + `No validator found for given address ${address2.toString('hex')}.`, ); }); - it('should throw error when addresses are in newValidators are not unique', async () => { + it('should return error when validator weight is zero', async () => { context = testing .createTransactionContext({ stateStore, @@ -207,11 +249,7 @@ describe('UpdateAuthority', () => { newValidators: [ { address: address0, - weight: BigInt(1), - }, - { - address: address1, - weight: BigInt(1), + weight: BigInt(0), }, { address: address1, @@ -223,23 +261,26 @@ describe('UpdateAuthority', () => { chainID, }) .createCommandVerifyContext(updateAuthoritySchema); + const result = await updateAuthorityCommand.verify(context); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - 'Addresses in newValidators are not unique.', - ); + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude(`Validator weight cannot be zero.`); }); - it('should throw error when validator is not in ValidatorStore', async () => { + it('should return error when totalWeight is zero', async () => { context = testing .createTransactionContext({ stateStore, transaction: buildTransaction({ params: buildUpdateAuthorityValidatorParams({ newValidators: [ - ...updateAuthorityValidatorParams.newValidators, { - address: address2, - weight: BigInt(2), + address: address0, + weight: BigInt(0), + }, + { + address: address1, + weight: BigInt(0), }, ], }), @@ -247,13 +288,13 @@ describe('UpdateAuthority', () => { chainID, }) .createCommandVerifyContext(updateAuthoritySchema); + const result = await updateAuthorityCommand.verify(context); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - `No validator found for given address ${address2.toString('hex')}.`, - ); + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude(`Validator weight cannot be zero.`); }); - it('should throw error when totalWeight is greater than MAX_UINT64', async () => { + it('should return error when totalWeight is greater than MAX_UINT64', async () => { context = testing .createTransactionContext({ stateStore, @@ -274,13 +315,13 @@ describe('UpdateAuthority', () => { chainID, }) .createCommandVerifyContext(updateAuthoritySchema); + const result = await updateAuthorityCommand.verify(context); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( - `Validators total weight exceeds ${MAX_UINT64}`, - ); + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude(`Validators total weight exceeds ${MAX_UINT64}`); }); - it('should throw error when trsParams.threshold is less than (totalWeight / 3) + 1 ', async () => { + it('should return error when trsParams.threshold is less than (totalWeight / 3) + 1 ', async () => { context = testing .createTransactionContext({ stateStore, @@ -294,16 +335,20 @@ describe('UpdateAuthority', () => { .createCommandVerifyContext(updateAuthoritySchema); const totalWeight = updateAuthorityValidatorParams.newValidators.reduce( - (acc, validator) => acc + validator.weight, + (acc, v) => acc + v.weight, BigInt(0), ); const minThreshold = totalWeight / BigInt(3) + BigInt(1); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + + const result = await updateAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude( `Threshold must be between ${minThreshold} and ${totalWeight} (inclusive).`, ); }); - it('should throw error when trsParams.threshold is greater than totalWeight', async () => { + it('should return error when trsParams.threshold is greater than totalWeight', async () => { context = testing .createTransactionContext({ stateStore, @@ -317,16 +362,20 @@ describe('UpdateAuthority', () => { .createCommandVerifyContext(updateAuthoritySchema); const totalWeight = updateAuthorityValidatorParams.newValidators.reduce( - (acc, validator) => acc + validator.weight, + (acc, v) => acc + v.weight, BigInt(0), ); const minThreshold = totalWeight / BigInt(3) + BigInt(1); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + + const result = await updateAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude( `Threshold must be between ${minThreshold} and ${totalWeight}`, ); }); - it('should throw error when trsParams.validatorsUpdateNonce does not equal to chainProperties.validatorsUpdateNonce', async () => { + it('should return error when trsParams.validatorsUpdateNonce does not equal to chainProperties.validatorsUpdateNonce', async () => { context = testing .createTransactionContext({ stateStore, @@ -340,7 +389,11 @@ describe('UpdateAuthority', () => { .createCommandVerifyContext(updateAuthoritySchema); const chainProperties = await chainPropertiesStore.get(context, EMPTY_BYTES); - await expect(updateAuthorityCommand.verify(context)).rejects.toThrow( + + const result = await updateAuthorityCommand.verify(context); + + expect(result.status).toBe(VerifyStatus.FAIL); + expect(result.error?.message).toInclude( `validatorsUpdateNonce must be equal to ${chainProperties.validatorsUpdateNonce}.`, ); }); From e3a31ab2096bd3c3316080369f3afe0409b8cab4 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 14 Sep 2023 15:29:32 +0100 Subject: [PATCH 118/170] Incorrect MIN_SINT32 constant in lisk validator (#8976) Update constant --- elements/lisk-validator/src/constants.ts | 10 +++++----- elements/lisk-validator/test/validation.spec.ts | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/elements/lisk-validator/src/constants.ts b/elements/lisk-validator/src/constants.ts index a2473c75065..13b8267d5b3 100644 --- a/elements/lisk-validator/src/constants.ts +++ b/elements/lisk-validator/src/constants.ts @@ -13,9 +13,9 @@ * */ -export const MAX_SINT32 = 2147483647; // (2 ** (32 - 1)) + 1 * -1 -export const MIN_SINT32 = MAX_SINT32 * -1; // ((2 ** (32 - 1)) - 1) * -1 +export const MAX_SINT32 = 2147483647; // (2 ** (32 - 1)) - 1 +export const MIN_SINT32 = MAX_SINT32 * -1 - 1; // (2 ** (32 - 1)) * -1 export const MAX_UINT32 = 4294967295; // (2 ** 32) - 1 -export const MAX_UINT64 = BigInt('18446744073709551615'); // BigInt((2 ** 64) - 1) - 1 -export const MAX_SINT64 = BigInt('9223372036854775807'); // BigInt(2 ** (64 - 1) - 1) -1 -export const MIN_SINT64 = MAX_SINT64 * BigInt(-1) - BigInt(1); // (BigInt(2 ** (64 - 1) - 1) -1) * BigInt(-1) +export const MAX_UINT64 = BigInt('18446744073709551615'); // BigInt((2 ** 64) - 1) - BigInt(1) +export const MAX_SINT64 = BigInt('9223372036854775807'); // BigInt(2 ** (64 - 1) - 1) - BigInt(1) +export const MIN_SINT64 = MAX_SINT64 * BigInt(-1) - BigInt(1); // BigInt(2 ** (64 - 1)) * BigInt(-1) diff --git a/elements/lisk-validator/test/validation.spec.ts b/elements/lisk-validator/test/validation.spec.ts index 4e5aa726076..8bd6077ed86 100644 --- a/elements/lisk-validator/test/validation.spec.ts +++ b/elements/lisk-validator/test/validation.spec.ts @@ -327,8 +327,8 @@ describe('validation', () => { return expect(isSInt32(2147483648)).toBeFalse(); }); - it('should return false when a number "-2147483648" which is just below the limit of sint32', () => { - return expect(isSInt32(-2147483648)).toBeFalse(); + it('should return false when a number "-2147483649" which is just below the limit of sint32', () => { + return expect(isSInt32(-2147483649)).toBeFalse(); }); it('should return true when a valid number was provided', () => { From 0462a9a4d5093e182a18d0230f3be659e05ce587 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 14 Sep 2023 15:58:39 +0100 Subject: [PATCH 119/170] Update get name function of NFT module (#8988) Use constant --- framework/src/modules/nft/module.ts | 3 ++- framework/test/unit/modules/nft/module.spec.ts | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index a772194a149..92c876a4dfb 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -66,6 +66,7 @@ import { ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_CHAIN_ID, + MODULE_NAME_NFT, NFT_NOT_LOCKED, } from './constants'; @@ -122,7 +123,7 @@ export class NFTModule extends BaseInteroperableModule { } public get name(): string { - return 'nft'; + return MODULE_NAME_NFT; } public addDependencies( diff --git a/framework/test/unit/modules/nft/module.spec.ts b/framework/test/unit/modules/nft/module.spec.ts index 99ae42a0dd3..bf791397b69 100644 --- a/framework/test/unit/modules/nft/module.spec.ts +++ b/framework/test/unit/modules/nft/module.spec.ts @@ -29,6 +29,7 @@ import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, + MODULE_NAME_NFT, NFT_NOT_LOCKED, } from '../../../../src/modules/nft/constants'; import { NFTStore } from '../../../../src/modules/nft/stores/nft'; @@ -55,7 +56,7 @@ describe('nft module', () => { }; it('should have the name "nft"', () => { - expect(module.name).toBe('nft'); + expect(module.name).toBe(MODULE_NAME_NFT); }); describe('initGenesisState', () => { From 67ce4faa730c298a9d42bd539f3b0dee41bc5c21 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 14 Sep 2023 16:48:59 +0100 Subject: [PATCH 120/170] Define and use constant for indexLength of NFT module (#8989) Define constant --- framework/src/modules/nft/constants.ts | 1 + framework/src/modules/nft/method.ts | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/framework/src/modules/nft/constants.ts b/framework/src/modules/nft/constants.ts index 475de2ac82b..52251e153ca 100644 --- a/framework/src/modules/nft/constants.ts +++ b/framework/src/modules/nft/constants.ts @@ -15,6 +15,7 @@ export const LENGTH_CHAIN_ID = 4; export const LENGTH_NFT_ID = 16; export const LENGTH_COLLECTION_ID = 4; +export const LENGTH_INDEX = LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID; export const MIN_LENGTH_MODULE_NAME = 1; export const MAX_LENGTH_MODULE_NAME = 32; export const LENGTH_ADDRESS = 20; diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index b1be7aa54b6..35e35edf456 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -23,6 +23,7 @@ import { LENGTH_ADDRESS, LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, + LENGTH_INDEX, LENGTH_NFT_ID, MAX_LENGTH_DATA, NFT_NOT_LOCKED, @@ -273,12 +274,11 @@ export class NFTMethod extends BaseMethod { methodContext: MethodContext, collectionID: Buffer, ): Promise { - const indexLength = LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID; const nftStore = this.stores.get(NFTStore); const nftStoreData = await nftStore.iterate(methodContext, { - gte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(indexLength, 0)]), - lte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(indexLength, 255)]), + gte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(LENGTH_INDEX, 0)]), + lte: Buffer.concat([this._config.ownChainID, collectionID, Buffer.alloc(LENGTH_INDEX, 255)]), }); if (nftStoreData.length === 0) { @@ -312,7 +312,7 @@ export class NFTMethod extends BaseMethod { } const index = await this.getNextAvailableIndex(methodContext, collectionID); - const indexBytes = Buffer.alloc(LENGTH_NFT_ID - LENGTH_CHAIN_ID - LENGTH_COLLECTION_ID); + const indexBytes = Buffer.alloc(LENGTH_INDEX); indexBytes.writeBigInt64BE(index); const nftID = Buffer.concat([this._config.ownChainID, collectionID, indexBytes]); From 09c3aa8d2c8ecf53971e64e83d23863c44e51d63 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 15 Sep 2023 10:04:13 +0200 Subject: [PATCH 121/170] :necktie: NFTMethod.getCollectionID (#8963) :necktie: Removes validation logic from NFTMethod.getCollectionID --- framework/src/modules/nft/method.ts | 14 +++----------- framework/test/unit/modules/nft/method.spec.ts | 16 +++++----------- 2 files changed, 8 insertions(+), 22 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 35e35edf456..8f49a42fbb5 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -185,16 +185,8 @@ export class NFTMethod extends BaseMethod { }); } - public async getCollectionID( - methodContext: ImmutableMethodContext, - nftID: Buffer, - ): Promise { - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); - } - return nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + public getCollectionID(nftID: Buffer): Buffer { + return nftID.subarray(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); } public async isNFTSupported( @@ -221,7 +213,7 @@ export class NFTMethod extends BaseMethod { if (supportedNFTsStoreData.supportedCollectionIDArray.length === 0) { return true; } - const collectionID = await this.getCollectionID(methodContext, nftID); + const collectionID = this.getCollectionID(nftID); if ( supportedNFTsStoreData.supportedCollectionIDArray.some(id => collectionID.equals(id.collectionID), diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index d1c933dfbb2..b4f5ab6b53e 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -362,19 +362,13 @@ describe('NFTMethod', () => { }); describe('getCollectionID', () => { - it('should throw if entry does not exist in the nft substore for the nft id', async () => { - await expect(method.getCollectionID(methodContext, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); - }); - it('should return the first bytes of length LENGTH_CHAIN_ID from provided nftID', async () => { await nftStore.save(methodContext, nftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: [], }); - const expectedValue = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); - const receivedValue = await method.getCollectionID(methodContext, nftID); + const expectedValue = nftID.subarray(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + const receivedValue = method.getCollectionID(nftID); expect(receivedValue).toEqual(expectedValue); }); }); @@ -395,7 +389,7 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array is empty', async () => { - await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + await supportedNFTsStore.set(methodContext, nftID.subarray(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [], }); @@ -404,9 +398,9 @@ describe('NFTMethod', () => { }); it('should return true if nft chain id does not equal own chain id but nft chain id is supported and corresponding supported collection id array includes collection id for nft id', async () => { - await supportedNFTsStore.set(methodContext, nftID.slice(0, LENGTH_CHAIN_ID), { + await supportedNFTsStore.set(methodContext, nftID.subarray(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ - { collectionID: nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID) }, + { collectionID: nftID.subarray(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID) }, { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, ], }); From 7c5ca585c50369ef46c590de242aa33bdc2410f3 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 15 Sep 2023 14:42:29 +0200 Subject: [PATCH 122/170] :fire: messageFeeTokenID from TransferCrossChainCommand of NFT Module (#8966) --- .../nft/commands/transfer_cross_chain.ts | 7 +--- framework/src/modules/nft/schemas.ts | 10 +---- .../nft/commands/transfer_cross_chain.spec.ts | 38 ------------------- 3 files changed, 2 insertions(+), 53 deletions(-) diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index 533dd83948e..daf3c4f472f 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -33,7 +33,6 @@ export interface Params { recipientAddress: Buffer; data: string; messageFee: bigint; - messageFeeTokenID: Buffer; includeAttributes: boolean; } @@ -90,10 +89,6 @@ export class TransferCrossChainCommand extends BaseCommand { params.receivingChainID, ); - if (!params.messageFeeTokenID.equals(messageFeeTokenID)) { - throw new Error('Mismatching message fee Token ID'); - } - if (!owner.equals(context.transaction.senderAddress)) { throw new Error('Transfer not initiated by the NFT owner'); } @@ -110,7 +105,7 @@ export class TransferCrossChainCommand extends BaseCommand { const availableBalance = await this._tokenMethod.getAvailableBalance( context.getMethodContext(), context.transaction.senderAddress, - params.messageFeeTokenID, + messageFeeTokenID, ); if (availableBalance < params.messageFee) { diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index c3bfbc15e69..7d0de1825df 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -16,7 +16,6 @@ import { LENGTH_CHAIN_ID, LENGTH_COLLECTION_ID, LENGTH_NFT_ID, - LENGTH_TOKEN_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, MAX_LENGTH_DATA, @@ -114,7 +113,6 @@ export const crossChainTransferParamsSchema = { 'recipientAddress', 'data', 'messageFee', - 'messageFeeTokenID', 'includeAttributes', ], properties: { @@ -145,15 +143,9 @@ export const crossChainTransferParamsSchema = { dataType: 'uint64', fieldNumber: 5, }, - messageFeeTokenID: { - dataType: 'bytes', - minLength: LENGTH_TOKEN_ID, - maxLength: LENGTH_TOKEN_ID, - fieldNumber: 6, - }, includeAttributes: { dataType: 'boolean', - fieldNumber: 7, + fieldNumber: 6, }, }, }; diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts index 897a8ae0732..d7c2d9e0d59 100644 --- a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -82,7 +82,6 @@ describe('TransferCrossChainComand', () => { recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS), data: '', messageFee: BigInt(100000), - messageFeeTokenID, includeAttributes: false, }; @@ -302,32 +301,6 @@ describe('TransferCrossChainComand', () => { ).rejects.toThrow("'.data' must NOT have more than 64 characters"); }); - it('should fail if messageFeeTokenID does not have valid length', async () => { - const messageFeeTokenIDMinLengthContext = createTransactionContextWithOverridingParams({ - messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID - 1), - }); - - const messageFeeTokenIDMaxLengthContext = createTransactionContextWithOverridingParams({ - messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID + 1), - }); - - await expect( - command.verify( - messageFeeTokenIDMinLengthContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.messageFeeTokenID' minLength not satisfied"); - - await expect( - command.verify( - messageFeeTokenIDMaxLengthContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.messageFeeTokenID' maxLength exceeded"); - }); - it('should fail if NFT does not exist', async () => { const context = createTransactionContextWithOverridingParams({ nftID: utils.getRandomBytes(LENGTH_NFT_ID), @@ -365,17 +338,6 @@ describe('TransferCrossChainComand', () => { ).rejects.toThrow(''); }); - it('should fail if messageFeeTokenID for receiving chain differs from the messageFeeTokenID of parameters', async () => { - const context = createTransactionContextWithOverridingParams({ - nftID: existingNFT.nftID, - messageFeeTokenID: utils.getRandomBytes(LENGTH_TOKEN_ID), - }); - - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('Mismatching message fee Token ID'); - }); - it('should fail if the owner of the NFT is not the sender', async () => { const context = createTransactionContextWithOverridingParams({ nftID: existingNFT.nftID, From c659e396bf1c5ed4f21d840e7300fb60045a502c Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Fri, 15 Sep 2023 15:06:22 +0200 Subject: [PATCH 123/170] NFT module can not lock an NFT (#8992) * :necktie: Removes validation logic from NFTMethod.getCollectionID * :necktie: NFTMethod.lock throws if locking module is nft * Removes commented code * Updates NFTMethod.lock --- framework/src/modules/nft/method.ts | 4 +++ .../test/unit/modules/nft/method.spec.ts | 29 +++++++++++-------- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 8f49a42fbb5..4ae5995f69d 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -329,6 +329,10 @@ export class NFTMethod extends BaseMethod { } public async lock(methodContext: MethodContext, module: string, nftID: Buffer): Promise { + if (module === NFT_NOT_LOCKED) { + throw new Error('Cannot be locked by NFT module'); + } + const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(methodContext, nftID); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index b4f5ab6b53e..b69fdf29a57 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -92,6 +92,7 @@ describe('NFTMethod', () => { let methodContext!: MethodContext; + const lockingModule = 'token'; const nftStore = module.stores.get(NFTStore); const userStore = module.stores.get(UserStore); const supportedNFTsStore = module.stores.get(SupportedNFTsStore); @@ -252,8 +253,6 @@ describe('NFTMethod', () => { }); it('should return the lockingModule for the owner of the NFT', async () => { - const lockingModule = 'nft'; - await nftStore.save(methodContext, nftID, { owner, attributesArray: [], @@ -626,8 +625,14 @@ describe('NFTMethod', () => { }); describe('lock', () => { + it('should throw if provided locking module is "nft"', async () => { + await expect(method.lock(methodContext, NFT_NOT_LOCKED, existingNFT.nftID)).rejects.toThrow( + 'Cannot be locked by NFT module', + ); + }); + it('should throw and log LockEvent if NFT does not exist', async () => { - await expect(method.lock(methodContext, module.name, nftID)).rejects.toThrow( + await expect(method.lock(methodContext, lockingModule, nftID)).rejects.toThrow( 'NFT substore entry does not exist', ); @@ -637,7 +642,7 @@ describe('NFTMethod', () => { LockEvent, 0, { - module: module.name, + module: lockingModule, nftID, }, NftEventResult.RESULT_NFT_DOES_NOT_EXIST, @@ -645,7 +650,7 @@ describe('NFTMethod', () => { }); it('should throw and log LockEvent if NFT is escrowed', async () => { - await expect(method.lock(methodContext, module.name, escrowedNFT.nftID)).rejects.toThrow( + await expect(method.lock(methodContext, lockingModule, escrowedNFT.nftID)).rejects.toThrow( 'NFT is escrowed to another chain', ); @@ -655,7 +660,7 @@ describe('NFTMethod', () => { LockEvent, 0, { - module: module.name, + module: lockingModule, nftID: escrowedNFT.nftID, }, NftEventResult.RESULT_NFT_ESCROWED, @@ -664,7 +669,7 @@ describe('NFTMethod', () => { it('should throw and log LockEvent if NFT is locked', async () => { await expect( - method.lock(methodContext, module.name, lockedExistingNFT.nftID), + method.lock(methodContext, lockingModule, lockedExistingNFT.nftID), ).rejects.toThrow('NFT is already locked'); checkEventResult( @@ -673,7 +678,7 @@ describe('NFTMethod', () => { LockEvent, 0, { - module: module.name, + module: lockingModule, nftID: lockedExistingNFT.nftID, }, NftEventResult.RESULT_NFT_LOCKED, @@ -698,12 +703,12 @@ describe('NFTMethod', () => { NftEventResult.RESULT_SUCCESSFUL, ); - const { lockingModule } = await userStore.get( + const { lockingModule: actualLockingModule } = await userStore.get( methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID), ); - expect(lockingModule).toEqual(expectedLockingModule); + expect(actualLockingModule).toEqual(expectedLockingModule); }); }); @@ -785,12 +790,12 @@ describe('NFTMethod', () => { NftEventResult.RESULT_SUCCESSFUL, ); - const { lockingModule } = await userStore.get( + const { lockingModule: expectedLockingModule } = await userStore.get( methodContext, userStore.getKey(lockedExistingNFT.owner, lockedExistingNFT.nftID), ); - expect(lockingModule).toEqual(NFT_NOT_LOCKED); + expect(expectedLockingModule).toEqual(NFT_NOT_LOCKED); }); }); From 3a95f36ff8a8241988f3505c1da483ced5c9ef81 Mon Sep 17 00:00:00 2001 From: Mitsuaki Uchimoto <36514357+mitsuaki-u@users.noreply.github.com> Date: Mon, 18 Sep 2023 13:57:08 +0300 Subject: [PATCH 124/170] Update poa module schema validation and endpoint (#8967) * Update poa endpoint to use schema * Use constants in PoA * Update getValidator * Update utils typing * Remove validation from poa command and add tests * Update constant naming * Update getValidators default return value weight * Update update_generator_key.spec.ts --------- Co-authored-by: Mitsuaki Uchimoto --- .../poa/commands/register_authority.ts | 26 +++---- .../poa/commands/update_generator_key.ts | 10 --- framework/src/modules/poa/constants.ts | 5 +- framework/src/modules/poa/endpoint.ts | 27 ++++---- framework/src/modules/poa/module.ts | 26 +++++-- framework/src/modules/poa/stores/snapshot.ts | 4 -- framework/src/modules/poa/utils.ts | 5 +- .../poa/commands/register_authority.spec.ts | 67 +++++++++++++++++++ .../poa/commands/update_authority.spec.ts | 28 ++++---- .../poa/commands/update_generator_key.spec.ts | 33 +++++++++ .../test/unit/modules/poa/endpoint.spec.ts | 2 +- 11 files changed, 164 insertions(+), 69 deletions(-) diff --git a/framework/src/modules/poa/commands/register_authority.ts b/framework/src/modules/poa/commands/register_authority.ts index cf5d8947e84..38f7c5e5c9a 100644 --- a/framework/src/modules/poa/commands/register_authority.ts +++ b/framework/src/modules/poa/commands/register_authority.ts @@ -12,8 +12,6 @@ * Removal or modification of this copyright notice is prohibited. */ -import { address } from '@liskhq/lisk-cryptography'; -import { validator } from '@liskhq/lisk-validator'; import { BaseCommand } from '../../base_command'; import { registerAuthoritySchema } from '../schemas'; import { @@ -50,26 +48,19 @@ export class RegisterAuthorityCommand extends BaseCommand { context: CommandVerifyContext, ): Promise { const { name } = context.params; - try { - validator.validate(registerAuthoritySchema, context.params); - } catch (err) { - return { - status: VerifyStatus.FAIL, - error: err as Error, - }; - } if (!POA_VALIDATOR_NAME_REGEX.test(name)) { throw new Error(`Name does not comply with format ${POA_VALIDATOR_NAME_REGEX.toString()}.`); } - const nameExists = await this.stores.get(NameStore).has(context, Buffer.from(name)); + const nameExists = await this.stores.get(NameStore).has(context, Buffer.from(name, 'utf-8')); if (nameExists) { throw new Error('Name already exists.'); } - const senderAddress = address.getAddressFromPublicKey(context.transaction.senderPublicKey); - const validatorExists = await this.stores.get(ValidatorStore).has(context, senderAddress); + const validatorExists = await this.stores + .get(ValidatorStore) + .has(context, context.transaction.senderAddress); if (validatorExists) { throw new Error('Validator already exists.'); } @@ -82,20 +73,19 @@ export class RegisterAuthorityCommand extends BaseCommand { public async execute(context: CommandExecuteContext): Promise { const { params } = context; - const senderAddress = address.getAddressFromPublicKey(context.transaction.senderPublicKey); this._feeMethod.payFee(context, this._authorityRegistrationFee); - await this.stores.get(ValidatorStore).set(context, senderAddress, { + await this.stores.get(ValidatorStore).set(context, context.transaction.senderAddress, { name: params.name, }); - await this.stores.get(NameStore).set(context, Buffer.from(params.name), { - address: senderAddress, + await this.stores.get(NameStore).set(context, Buffer.from(params.name, 'utf-8'), { + address: context.transaction.senderAddress, }); await this._validatorsMethod.registerValidatorKeys( context, - senderAddress, + context.transaction.senderAddress, params.blsKey, params.generatorKey, params.proofOfPossession, diff --git a/framework/src/modules/poa/commands/update_generator_key.ts b/framework/src/modules/poa/commands/update_generator_key.ts index d3f7f4f994d..4a94d28b776 100644 --- a/framework/src/modules/poa/commands/update_generator_key.ts +++ b/framework/src/modules/poa/commands/update_generator_key.ts @@ -12,7 +12,6 @@ * Removal or modification of this copyright notice is prohibited. */ -import { validator } from '@liskhq/lisk-validator'; import { BaseCommand } from '../../base_command'; import { updateGeneratorKeySchema } from '../schemas'; import { COMMAND_UPDATE_KEY } from '../constants'; @@ -41,15 +40,6 @@ export class UpdateGeneratorKeyCommand extends BaseCommand { public async verify( context: CommandVerifyContext, ): Promise { - try { - validator.validate(updateGeneratorKeySchema, context.params); - } catch (err) { - return { - status: VerifyStatus.FAIL, - error: err as Error, - }; - } - const validatorExists = await this.stores .get(ValidatorStore) .has(context, context.transaction.senderAddress); diff --git a/framework/src/modules/poa/constants.ts b/framework/src/modules/poa/constants.ts index 2e5d0f3a6c8..056a6110edf 100644 --- a/framework/src/modules/poa/constants.ts +++ b/framework/src/modules/poa/constants.ts @@ -33,7 +33,6 @@ export const COMMAND_REGISTER_AUTHORITY = 'registerAuthority'; export const COMMAND_UPDATE_KEY = 'updateKey'; export const COMMAND_UPDATE_AUTHORITY = 'updateAuthority'; export const MAX_UINT64 = BigInt(2) ** BigInt(64) - BigInt(1); -export const LENGTH_PROOF_OF_POSESSION = 96; export const defaultConfig = { authorityRegistrationFee: AUTHORITY_REGISTRATION_FEE.toString(), }; @@ -43,3 +42,7 @@ export const defaultConfig = { export const KEY_SNAPSHOT_0 = utils.intToBuffer(0, 4); export const KEY_SNAPSHOT_1 = utils.intToBuffer(1, 4); export const KEY_SNAPSHOT_2 = utils.intToBuffer(2, 4); +export const SUBSTORE_PREFIX_VALIDATOR_INDEX = 0; +export const SUBSTORE_PREFIX_CHAIN_INDEX = 1; +export const SUBSTORE_PREFIX_NAME_INDEX = 2; +export const SUBSTORE_PREFIX_SNAPSHOT_INDEX = 3; diff --git a/framework/src/modules/poa/endpoint.ts b/framework/src/modules/poa/endpoint.ts index c59e74e6c09..e5f9212a39e 100644 --- a/framework/src/modules/poa/endpoint.ts +++ b/framework/src/modules/poa/endpoint.ts @@ -12,14 +12,16 @@ * Removal or modification of this copyright notice is prohibited. */ +import { validator } from '@liskhq/lisk-validator'; import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; import { NotFoundError } from '@liskhq/lisk-db'; import { BaseEndpoint } from '../base_endpoint'; import { ValidatorStore } from './stores/validator'; import { ModuleEndpointContext } from '../../types'; -import { KEY_SNAPSHOT_0 } from './constants'; +import { KEY_SNAPSHOT_0, NUM_BYTES_ADDRESS } from './constants'; import { SnapshotStore } from './stores'; import { Validator } from './types'; +import { getValidatorRequestSchema } from './schemas'; export class PoAEndpoint extends BaseEndpoint { private _authorityRegistrationFee!: bigint; @@ -30,10 +32,10 @@ export class PoAEndpoint extends BaseEndpoint { public async getValidator(context: ModuleEndpointContext): Promise { const validatorSubStore = this.stores.get(ValidatorStore); - const { address } = context.params; - if (typeof address !== 'string') { - throw new Error('Parameter address must be a string.'); - } + + validator.validate(getValidatorRequestSchema, context.params); + const address = context.params.address as string; + cryptoAddress.validateLisk32Address(address); let validatorName: { name: string }; @@ -70,8 +72,8 @@ export class PoAEndpoint extends BaseEndpoint { context: ModuleEndpointContext, ): Promise<{ validators: Validator[] }> { const validatorStore = this.stores.get(ValidatorStore); - const startBuf = Buffer.alloc(20); - const endBuf = Buffer.alloc(20, 255); + const startBuf = Buffer.alloc(NUM_BYTES_ADDRESS); + const endBuf = Buffer.alloc(NUM_BYTES_ADDRESS, 255); const validatorStoreData = await validatorStore.iterate(context, { gte: startBuf, lte: endBuf, @@ -83,18 +85,17 @@ export class PoAEndpoint extends BaseEndpoint { const validatorsData: Validator[] = []; for (const data of validatorStoreData) { const address = cryptoAddress.getLisk32AddressFromAddress(data.key); - // `name` comes from type `ValidatorName` - const { name } = await validatorStore.get(context, data.key); + const { value } = data; const activeValidator = currentRoundSnapshot.validators.find( v => cryptoAddress.getLisk32AddressFromAddress(v.address) === address, ); - const validator: Validator = { - name, + const v: Validator = { + name: value.name, address, - weight: activeValidator ? activeValidator.weight.toString() : '', + weight: activeValidator ? activeValidator.weight.toString() : '0', }; - validatorsData.push(validator); + validatorsData.push(v); } // This is needed since response from this endpoint is returning data in unexpected sorting order on next execution diff --git a/framework/src/modules/poa/module.ts b/framework/src/modules/poa/module.ts index 02049d574d4..167463be4b2 100644 --- a/framework/src/modules/poa/module.ts +++ b/framework/src/modules/poa/module.ts @@ -29,6 +29,11 @@ import { KEY_SNAPSHOT_2, MAX_UINT64, defaultConfig, + POA_VALIDATOR_NAME_REGEX, + SUBSTORE_PREFIX_VALIDATOR_INDEX, + SUBSTORE_PREFIX_CHAIN_INDEX, + SUBSTORE_PREFIX_NAME_INDEX, + SUBSTORE_PREFIX_SNAPSHOT_INDEX, } from './constants'; import { shuffleValidatorList } from './utils'; import { NextValidatorsSetter, MethodContext } from '../../state_machine/types'; @@ -78,14 +83,23 @@ export class PoAModule extends BaseModule { public constructor() { super(); this.events.register(AuthorityUpdateEvent, new AuthorityUpdateEvent(this.name)); - this.stores.register(ValidatorStore, new ValidatorStore(this.name, 0)); - this.stores.register(ChainPropertiesStore, new ChainPropertiesStore(this.name, 1)); - this.stores.register(NameStore, new NameStore(this.name, 2)); - this.stores.register(SnapshotStore, new SnapshotStore(this.name, 3)); + this.stores.register( + ValidatorStore, + new ValidatorStore(this.name, SUBSTORE_PREFIX_VALIDATOR_INDEX), + ); + this.stores.register( + ChainPropertiesStore, + new ChainPropertiesStore(this.name, SUBSTORE_PREFIX_CHAIN_INDEX), + ); + this.stores.register(NameStore, new NameStore(this.name, SUBSTORE_PREFIX_NAME_INDEX)); + this.stores.register( + SnapshotStore, + new SnapshotStore(this.name, SUBSTORE_PREFIX_SNAPSHOT_INDEX), + ); } public get name() { - return 'poa'; + return MODULE_NAME_POA; } public addDependencies( @@ -213,7 +227,7 @@ export class PoAModule extends BaseModule { throw new Error('`validators` must be ordered lexicographically by address.'); } - if (!/^[a-z0-9!@$&_.]+$/g.test(validators[i].name)) { + if (!POA_VALIDATOR_NAME_REGEX.test(validators[i].name)) { throw new Error('`name` property is invalid. Must contain only characters a-z0-9!@$&_.'); } } diff --git a/framework/src/modules/poa/stores/snapshot.ts b/framework/src/modules/poa/stores/snapshot.ts index d4fe04d58b3..217ae7b4648 100644 --- a/framework/src/modules/poa/stores/snapshot.ts +++ b/framework/src/modules/poa/stores/snapshot.ts @@ -15,10 +15,6 @@ import { BaseStore } from '../../base_store'; import { NUM_BYTES_ADDRESS } from '../constants'; import { ActiveValidator } from '../types'; -export interface Validator { - address: Buffer; - weight: bigint; -} export interface SnapshotObject { validators: ActiveValidator[]; threshold: bigint; diff --git a/framework/src/modules/poa/utils.ts b/framework/src/modules/poa/utils.ts index dc7fdfd8b01..9ac361baaec 100644 --- a/framework/src/modules/poa/utils.ts +++ b/framework/src/modules/poa/utils.ts @@ -13,13 +13,12 @@ */ import { utils } from '@liskhq/lisk-cryptography'; -import { ValidatorWeightWithRoundHash } from './types'; -import { Validator } from './stores'; +import { ValidatorWeightWithRoundHash, ActiveValidator } from './types'; // Same as pos/utils/shuffleValidatorList export const shuffleValidatorList = ( roundSeed: Buffer, - validators: Validator[], + validators: ActiveValidator[], ): ValidatorWeightWithRoundHash[] => { const validatorsWithRoundHash: ValidatorWeightWithRoundHash[] = []; for (const validator of validators) { diff --git a/framework/test/unit/modules/poa/commands/register_authority.spec.ts b/framework/test/unit/modules/poa/commands/register_authority.spec.ts index 244c27961d8..5e5eff50e41 100644 --- a/framework/test/unit/modules/poa/commands/register_authority.spec.ts +++ b/framework/test/unit/modules/poa/commands/register_authority.spec.ts @@ -12,6 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ +import { validator } from '@liskhq/lisk-validator'; import { address, utils } from '@liskhq/lisk-cryptography'; import { TransactionAttrs } from '@liskhq/lisk-chain'; import { codec } from '@liskhq/lisk-codec'; @@ -32,6 +33,7 @@ import { LENGTH_GENERATOR_KEY, MODULE_NAME_POA, POA_VALIDATOR_NAME_REGEX, + MAX_LENGTH_NAME, } from '../../../../../src/modules/poa/constants'; import { registerAuthoritySchema } from '../../../../../src/modules/poa/schemas'; @@ -101,6 +103,71 @@ describe('RegisterAuthority', () => { nameStore = poaModule.stores.get(NameStore); }); + describe('verifySchema', () => { + it(`should throw error when name is longer than ${MAX_LENGTH_NAME}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + name: 'aaaaaaaaaaaaaaaaaaaaaaa', + }), + ).toThrow(`Property '.name' must NOT have more than 20 characters`); + }); + + it(`should throw error when bls key shorter than ${LENGTH_BLS_KEY}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + blsKey: utils.getRandomBytes(LENGTH_BLS_KEY - 1), + }), + ).toThrow(`Property '.blsKey' minLength not satisfied`); + }); + + it(`should throw error when bls key longer than ${LENGTH_BLS_KEY}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + blsKey: utils.getRandomBytes(LENGTH_BLS_KEY + 1), + }), + ).toThrow(`Property '.blsKey' maxLength exceeded`); + }); + + it(`should throw error when proof of possession shorter than ${LENGTH_PROOF_OF_POSSESSION}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + proofOfPossession: utils.getRandomBytes(LENGTH_PROOF_OF_POSSESSION - 1), + }), + ).toThrow(`Property '.proofOfPossession' minLength not satisfied`); + }); + + it(`should throw error when proof of possession longer than ${LENGTH_PROOF_OF_POSSESSION}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + proofOfPossession: utils.getRandomBytes(LENGTH_PROOF_OF_POSSESSION + 1), + }), + ).toThrow(`Property '.proofOfPossession' maxLength exceeded`); + }); + + it(`should throw error when generator key shorter than ${LENGTH_GENERATOR_KEY}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY - 1), + }), + ).toThrow(`Property '.generatorKey' minLength not satisfied`); + }); + + it(`should throw error when generator key longer than ${LENGTH_GENERATOR_KEY}`, () => { + expect(() => + validator.validate(registerAuthorityCommand.schema, { + ...registerAuthorityTransactionParams, + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY + 1), + }), + ).toThrow(`Property '.generatorKey' maxLength exceeded`); + }); + }); + describe('verify', () => { let context: CommandVerifyContext; beforeEach(() => { diff --git a/framework/test/unit/modules/poa/commands/update_authority.spec.ts b/framework/test/unit/modules/poa/commands/update_authority.spec.ts index 7f001634e5d..36a57ff7bd9 100644 --- a/framework/test/unit/modules/poa/commands/update_authority.spec.ts +++ b/framework/test/unit/modules/poa/commands/update_authority.spec.ts @@ -120,18 +120,7 @@ describe('UpdateAuthority', () => { }); }); - describe('verify', () => { - let context: CommandVerifyContext; - beforeEach(() => { - context = testing - .createTransactionContext({ - stateStore, - transaction: buildTransaction({}), - chainID, - }) - .createCommandVerifyContext(updateAuthoritySchema); - }); - + describe('verifySchema', () => { it('should throw error when length of newValidators is less than 1', () => { expect(() => validator.validate(updateAuthorityCommand.schema, { @@ -141,7 +130,7 @@ describe('UpdateAuthority', () => { ).toThrow('must NOT have fewer than 1 items'); }); - it('should throw error when length of newValidators is greater than MAX_NUM_VALIDATORS', async () => { + it('should throw error when length of newValidators is greater than MAX_NUM_VALIDATORS', () => { expect(() => validator.validate(updateAuthorityCommand.schema, { ...updateAuthorityValidatorParams, @@ -152,6 +141,19 @@ describe('UpdateAuthority', () => { }), ).toThrow(`must NOT have more than ${MAX_NUM_VALIDATORS} items`); }); + }); + + describe('verify', () => { + let context: CommandVerifyContext; + beforeEach(() => { + context = testing + .createTransactionContext({ + stateStore, + transaction: buildTransaction({}), + chainID, + }) + .createCommandVerifyContext(updateAuthoritySchema); + }); it('should return error when newValidators are not lexicographically ordered', async () => { context = testing diff --git a/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts b/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts index d68c775eb2f..a435a7c2d47 100644 --- a/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts +++ b/framework/test/unit/modules/poa/commands/update_generator_key.spec.ts @@ -1,3 +1,18 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; import { TransactionAttrs } from '@liskhq/lisk-chain'; import { utils, address } from '@liskhq/lisk-cryptography'; @@ -75,6 +90,24 @@ describe('UpdateGeneratorKey', () => { ); }); + describe('verifySchema', () => { + it(`should throw error when generator key shorter than ${LENGTH_GENERATOR_KEY}`, () => { + expect(() => + validator.validate(updateGeneratorKeyCommand.schema, { + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY - 1), + }), + ).toThrow(`Property '.generatorKey' minLength not satisfied`); + }); + + it(`should throw error when generator key longer than ${LENGTH_GENERATOR_KEY}`, () => { + expect(() => + validator.validate(updateGeneratorKeyCommand.schema, { + generatorKey: utils.getRandomBytes(LENGTH_GENERATOR_KEY + 1), + }), + ).toThrow(`Property '.generatorKey' maxLength exceeded`); + }); + }); + describe('verify', () => { let context: CommandVerifyContext; beforeEach(() => { diff --git a/framework/test/unit/modules/poa/endpoint.spec.ts b/framework/test/unit/modules/poa/endpoint.spec.ts index 484e586bc02..01af8feba6a 100644 --- a/framework/test/unit/modules/poa/endpoint.spec.ts +++ b/framework/test/unit/modules/poa/endpoint.spec.ts @@ -176,7 +176,7 @@ describe('PoAModuleEndpoint', () => { // Checking against name-sorted values expect(validators[0].weight).toBe(currentSnapshot.validators[0].weight.toString()); - expect(validators[1].weight).toBe(''); + expect(validators[1].weight).toBe('0'); }); }); From a7cc9447773726e1d28c40cfb9a89a000f35a102 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Tue, 19 Sep 2023 23:07:31 +0200 Subject: [PATCH 125/170] Remove deprecated slice function (#8997) * Remove deprecated slice function * Remove deprecated slice function --- .../bootstrapping/commands/hash-onion.spec.ts | 2 +- .../lisk-chain/src/data_access/storage.ts | 2 +- .../lisk-chain/src/state_store/state_store.ts | 4 +- elements/lisk-chain/src/state_store/utils.ts | 4 +- elements/lisk-chain/test/unit/event.spec.ts | 2 +- .../test/lisk-cryptography/utils.spec.ts | 2 +- elements/lisk-codec/fuzz/round_trip.js | 4 +- elements/lisk-cryptography/src/address.ts | 2 +- elements/lisk-cryptography/src/bls.ts | 4 +- elements/lisk-cryptography/src/ed.ts | 8 ++-- elements/lisk-cryptography/src/encrypt.ts | 10 ++-- .../lisk-cryptography/src/legacy_address.ts | 4 +- elements/lisk-cryptography/src/nacl/slow.ts | 2 +- elements/lisk-cryptography/src/utils.ts | 4 +- .../lisk-cryptography/test/address.spec.ts | 2 +- .../lisk-tree/src/merkle_tree/merkle_tree.ts | 5 +- .../src/utils.ts | 2 +- .../src/engine/generator/generator_store.ts | 2 +- framework/src/modules/base_offchain_store.ts | 4 +- framework/src/modules/base_store.ts | 2 +- .../interoperability/mainchain/endpoint.ts | 4 +- .../src/modules/interoperability/utils.ts | 4 +- framework/src/modules/nft/method.ts | 4 +- .../modules/pos/stores/eligible_validators.ts | 4 +- framework/src/modules/random/utils.ts | 4 +- framework/src/modules/token/cc_method.ts | 6 +-- framework/src/modules/token/endpoint.ts | 8 ++-- framework/src/modules/token/method.ts | 2 +- framework/src/modules/token/module.ts | 6 +-- framework/src/modules/token/utils.ts | 4 +- .../prefixed_state_read_writer.ts | 4 +- .../test/unit/engine/legacy/codec.spec.ts | 2 +- .../test/unit/modules/nft/method.spec.ts | 10 ++-- .../test/unit/modules/pos/method.spec.ts | 2 +- .../test/unit/modules/random/method.spec.ts | 2 +- .../test/unit/modules/random/module.spec.ts | 2 +- .../test/unit/modules/random/utils.spec.ts | 2 +- .../token/cc_commands/cc_transfer.spec.ts | 2 +- .../test/unit/modules/token/cc_method.spec.ts | 2 +- .../test/unit/modules/token/method.spec.ts | 46 +++++++++---------- .../generators/address_generation/index.js | 2 +- .../sample_generator.js | 2 +- .../pos_random_seed_generation/index.js | 2 +- .../sample_generator.js | 2 +- 44 files changed, 102 insertions(+), 97 deletions(-) diff --git a/commander/test/bootstrapping/commands/hash-onion.spec.ts b/commander/test/bootstrapping/commands/hash-onion.spec.ts index f5fddd37067..97eaec9e7a8 100644 --- a/commander/test/bootstrapping/commands/hash-onion.spec.ts +++ b/commander/test/bootstrapping/commands/hash-onion.spec.ts @@ -44,7 +44,7 @@ describe('hash-onion command', () => { for (let i = 0; i < result.hashes.length - 1; i += 1) { let nextHash = Buffer.from(result.hashes[i + 1], 'hex'); for (let j = 0; j < result.distance; j += 1) { - nextHash = cryptography.utils.hash(nextHash).slice(0, 16); + nextHash = cryptography.utils.hash(nextHash).subarray(0, 16); } expect(result.hashes[i]).toBe(nextHash.toString('hex')); } diff --git a/elements/lisk-chain/src/data_access/storage.ts b/elements/lisk-chain/src/data_access/storage.ts index 5b194e9cc28..33c2a14dab6 100644 --- a/elements/lisk-chain/src/data_access/storage.ts +++ b/elements/lisk-chain/src/data_access/storage.ts @@ -506,7 +506,7 @@ export class Storage { const ids = await this._db.get(concatDBKeys(DB_KEY_TRANSACTIONS_BLOCK_ID, blockID)); const idLength = 32; for (let i = 0; i < ids.length; i += idLength) { - txIDs.push(ids.slice(i, i + idLength)); + txIDs.push(ids.subarray(i, i + idLength)); } } catch (error) { if (!(error instanceof NotFoundError)) { diff --git a/elements/lisk-chain/src/state_store/state_store.ts b/elements/lisk-chain/src/state_store/state_store.ts index bb27aed7e42..140a22f0cb9 100644 --- a/elements/lisk-chain/src/state_store/state_store.ts +++ b/elements/lisk-chain/src/state_store/state_store.ts @@ -186,14 +186,14 @@ export class StateStore { for (const data of cachedValues) { existingKey[data.key.toString('binary')] = true; result.push({ - key: data.key.slice(this._prefix.length), + key: data.key.subarray(this._prefix.length), value: data.value, }); } for (const data of storedData) { if (existingKey[data.key.toString('binary')] === undefined) { result.push({ - key: data.key.slice(this._prefix.length), + key: data.key.subarray(this._prefix.length), value: data.value, }); } diff --git a/elements/lisk-chain/src/state_store/utils.ts b/elements/lisk-chain/src/state_store/utils.ts index 85a56c20be0..23467c785e2 100644 --- a/elements/lisk-chain/src/state_store/utils.ts +++ b/elements/lisk-chain/src/state_store/utils.ts @@ -24,6 +24,6 @@ export const copyBuffer = (value: Buffer): Buffer => { export const toSMTKey = (value: Buffer): Buffer => // First byte is the DB prefix Buffer.concat([ - value.slice(1, SMT_PREFIX_SIZE + 1), - utils.hash(value.slice(SMT_PREFIX_SIZE + 1)), + value.subarray(1, SMT_PREFIX_SIZE + 1), + utils.hash(value.subarray(SMT_PREFIX_SIZE + 1)), ]); diff --git a/elements/lisk-chain/test/unit/event.spec.ts b/elements/lisk-chain/test/unit/event.spec.ts index 4884d893451..193d0297a8e 100644 --- a/elements/lisk-chain/test/unit/event.spec.ts +++ b/elements/lisk-chain/test/unit/event.spec.ts @@ -71,7 +71,7 @@ describe('event', () => { const { key } = pairs[i]; expect(key).toHaveLength(EVENT_TOPIC_HASH_LENGTH_BYTES + EVENT_TOTAL_INDEX_LENGTH_BYTES); - const index = key.slice(EVENT_TOPIC_HASH_LENGTH_BYTES); + const index = key.subarray(EVENT_TOPIC_HASH_LENGTH_BYTES); // Check index const indexNum = index.readUInt32BE(0); diff --git a/elements/lisk-client/test/lisk-cryptography/utils.spec.ts b/elements/lisk-client/test/lisk-cryptography/utils.spec.ts index 1beb57ca23b..676499786c4 100644 --- a/elements/lisk-client/test/lisk-cryptography/utils.spec.ts +++ b/elements/lisk-client/test/lisk-cryptography/utils.spec.ts @@ -214,7 +214,7 @@ describe('buffer', () => { }); it('should be able to calculate the checkpoint from another checkpoint', () => { - const firstDistanceHashes = hashOnion(hashOnionBuffers[1].slice(), 1000, 1); + const firstDistanceHashes = hashOnion(hashOnionBuffers[1].subarray(), 1000, 1); expect(firstDistanceHashes[0]).toEqual(hashOnionBuffers[0]); expect(firstDistanceHashes[1000]).toEqual(hashOnionBuffers[1]); }); diff --git a/elements/lisk-codec/fuzz/round_trip.js b/elements/lisk-codec/fuzz/round_trip.js index 9ae9a018b68..6118c6dd710 100644 --- a/elements/lisk-codec/fuzz/round_trip.js +++ b/elements/lisk-codec/fuzz/round_trip.js @@ -72,7 +72,7 @@ function mutateRandomByte(buffer) { else if (mutationType < 0.66) { const index = Math.floor(Math.random() * (buffer.length + 1)); const mutation = utils.getRandomBytes(1); - buffer = Buffer.concat([buffer.slice(0, index), mutation, buffer.slice(index)]); + buffer = Buffer.concat([buffer.subarray(0, index), mutation, buffer.subarray(index)]); } // Remove a byte else { @@ -80,7 +80,7 @@ function mutateRandomByte(buffer) { return buffer; // Can't remove byte from buffer of length 1 } const index = Math.floor(Math.random() * buffer.length); - buffer = Buffer.concat([buffer.slice(0, index), buffer.slice(index + 1)]); + buffer = Buffer.concat([buffer.subarray(0, index), buffer.subarray(index + 1)]); } return buffer; diff --git a/elements/lisk-cryptography/src/address.ts b/elements/lisk-cryptography/src/address.ts index c4cad619eeb..b02170dd5c0 100644 --- a/elements/lisk-cryptography/src/address.ts +++ b/elements/lisk-cryptography/src/address.ts @@ -54,7 +54,7 @@ const convertUInt5ToBase32 = (uint5Array: number[]): string => export const getAddressFromPublicKey = (publicKey: Buffer): Buffer => { const buffer = hash(publicKey); - const truncatedBuffer = buffer.slice(0, BINARY_ADDRESS_LENGTH); + const truncatedBuffer = buffer.subarray(0, BINARY_ADDRESS_LENGTH); if (truncatedBuffer.length !== BINARY_ADDRESS_LENGTH) { throw new Error(`Lisk address must contain exactly ${BINARY_ADDRESS_LENGTH} bytes`); diff --git a/elements/lisk-cryptography/src/bls.ts b/elements/lisk-cryptography/src/bls.ts index 36248e2e312..359c1d723d9 100644 --- a/elements/lisk-cryptography/src/bls.ts +++ b/elements/lisk-cryptography/src/bls.ts @@ -181,7 +181,7 @@ const hkdfSHA256 = (ikm: Buffer, length: number, salt: Buffer, info: Buffer) => t = hmacSHA256(PRK, Buffer.concat([t, info, Buffer.from([1 + i])]), SHA256); OKM = Buffer.concat([OKM, t]); } - return OKM.slice(0, length); + return OKM.subarray(0, length); }; const toLamportSK = (IKM: Buffer, salt: Buffer) => { @@ -190,7 +190,7 @@ const toLamportSK = (IKM: Buffer, salt: Buffer) => { const lamportSK = []; for (let i = 0; i < 255; i += 1) { - lamportSK.push(OKM.slice(i * 32, (i + 1) * 32)); + lamportSK.push(OKM.subarray(i * 32, (i + 1) * 32)); } return lamportSK; }; diff --git a/elements/lisk-cryptography/src/ed.ts b/elements/lisk-cryptography/src/ed.ts index 33cde3c6965..2abc718e199 100644 --- a/elements/lisk-cryptography/src/ed.ts +++ b/elements/lisk-cryptography/src/ed.ts @@ -37,8 +37,8 @@ export const getPublicKeyFromPrivateKey = (pk: Buffer): Buffer => getPublicKey(p const getMasterKeyFromSeed = (seed: Buffer) => { const hmac = crypto.createHmac('sha512', ED25519_CURVE); const digest = hmac.update(seed).digest(); - const leftBytes = digest.slice(0, 32); - const rightBytes = digest.slice(32); + const leftBytes = digest.subarray(0, 32); + const rightBytes = digest.subarray(32); return { key: leftBytes, chainCode: rightBytes, @@ -50,8 +50,8 @@ const getChildKey = (node: { key: Buffer; chainCode: Buffer }, index: number) => indexBuffer.writeUInt32BE(index, 0); const data = Buffer.concat([Buffer.alloc(1, 0), node.key, indexBuffer]); const digest = crypto.createHmac('sha512', node.chainCode).update(data).digest(); - const leftBytes = digest.slice(0, 32); - const rightBytes = digest.slice(32); + const leftBytes = digest.subarray(0, 32); + const rightBytes = digest.subarray(32); return { key: leftBytes, diff --git a/elements/lisk-cryptography/src/encrypt.ts b/elements/lisk-cryptography/src/encrypt.ts index 1ed41c06735..e87fe559e3c 100644 --- a/elements/lisk-cryptography/src/encrypt.ts +++ b/elements/lisk-cryptography/src/encrypt.ts @@ -127,7 +127,7 @@ export const encryptAES128GCMWithPassword = async ( key = getKeyFromPassword(password, salt, iterations); } - const cipher = crypto.createCipheriv('aes-128-gcm', key.slice(0, 16), iv); + const cipher = crypto.createCipheriv('aes-128-gcm', key.subarray(0, 16), iv); const firstBlock = Buffer.isBuffer(plainText) ? cipher.update(plainText) : cipher.update(plainText, 'utf8'); @@ -136,7 +136,7 @@ export const encryptAES128GCMWithPassword = async ( return { ciphertext: encrypted.toString('hex'), - mac: crypto.createHash('sha256').update(key.slice(16, 32)).update(encrypted).digest('hex'), + mac: crypto.createHash('sha256').update(key.subarray(16, 32)).update(encrypted).digest('hex'), kdf, kdfparams: { parallelism, @@ -227,7 +227,11 @@ export async function decryptAES128GCMWithPassword( } else { key = getKeyFromPassword(password, hexToBuffer(salt, 'Salt'), iterations); } - const decipher = crypto.createDecipheriv('aes-128-gcm', key.slice(0, 16), hexToBuffer(iv, 'IV')); + const decipher = crypto.createDecipheriv( + 'aes-128-gcm', + key.subarray(0, 16), + hexToBuffer(iv, 'IV'), + ); decipher.setAuthTag(tagBuffer); const firstBlock = decipher.update(hexToBuffer(ciphertext, 'Cipher text')); const decrypted = Buffer.concat([firstBlock, decipher.final()]); diff --git a/elements/lisk-cryptography/src/legacy_address.ts b/elements/lisk-cryptography/src/legacy_address.ts index 01b4d8cb5e7..ca13f154a0a 100644 --- a/elements/lisk-cryptography/src/legacy_address.ts +++ b/elements/lisk-cryptography/src/legacy_address.ts @@ -25,10 +25,10 @@ export const getFirstEightBytesReversed = (input: string | Buffer): Buffer => { // Union type arguments on overloaded functions do not work in typescript. // Relevant discussion: https://github.com/Microsoft/TypeScript/issues/23155 if (typeof input === 'string') { - return reverse(Buffer.from(input).slice(0, BUFFER_SIZE)); + return reverse(Buffer.from(input).subarray(0, BUFFER_SIZE)); } - return reverse(Buffer.from(input).slice(0, BUFFER_SIZE)); + return reverse(Buffer.from(input).subarray(0, BUFFER_SIZE)); }; export const getLegacyAddressFromPublicKey = (publicKey: Buffer): string => { diff --git a/elements/lisk-cryptography/src/nacl/slow.ts b/elements/lisk-cryptography/src/nacl/slow.ts index 4b243150840..8d7f0452490 100644 --- a/elements/lisk-cryptography/src/nacl/slow.ts +++ b/elements/lisk-cryptography/src/nacl/slow.ts @@ -83,7 +83,7 @@ const PRIVATE_KEY_LENGTH = 32; export const getPublicKey: NaclInterface['getPublicKey'] = privateKey => { const { publicKey } = tweetnacl.sign.keyPair.fromSeed( - Uint8Array.from(privateKey.slice(0, PRIVATE_KEY_LENGTH)), + Uint8Array.from(privateKey.subarray(0, PRIVATE_KEY_LENGTH)), ); return Buffer.from(publicKey); diff --git a/elements/lisk-cryptography/src/utils.ts b/elements/lisk-cryptography/src/utils.ts index f90ec861494..feb84e406ca 100644 --- a/elements/lisk-cryptography/src/utils.ts +++ b/elements/lisk-cryptography/src/utils.ts @@ -176,7 +176,7 @@ const defaultCount = 1000000; const defaultDistance = 1000; export const generateHashOnionSeed = (): Buffer => - hash(getRandomBytes(INPUT_SIZE)).slice(0, HASH_SIZE); + hash(getRandomBytes(INPUT_SIZE)).subarray(0, HASH_SIZE); export const hashOnion = ( seed: Buffer, @@ -195,7 +195,7 @@ export const hashOnion = ( const hashes = [seed]; for (let i = 1; i <= count; i += 1) { - const nextHash = hash(previousHash).slice(0, HASH_SIZE); + const nextHash = hash(previousHash).subarray(0, HASH_SIZE); if (i % distance === 0) { hashes.push(nextHash); } diff --git a/elements/lisk-cryptography/test/address.spec.ts b/elements/lisk-cryptography/test/address.spec.ts index 470a11f7f8c..3cf77fe6b31 100644 --- a/elements/lisk-cryptography/test/address.spec.ts +++ b/elements/lisk-cryptography/test/address.spec.ts @@ -45,7 +45,7 @@ describe('address', () => { describe('#getAddressFromPrivateKey', () => { it('should create correct address', () => { - expect(getAddressFromPrivateKey(defaultPrivateKey.slice(0, 64))).toEqual(defaultAddress); + expect(getAddressFromPrivateKey(defaultPrivateKey.subarray(0, 64))).toEqual(defaultAddress); }); }); diff --git a/elements/lisk-tree/src/merkle_tree/merkle_tree.ts b/elements/lisk-tree/src/merkle_tree/merkle_tree.ts index 46703a7a455..3cdd1baa42f 100644 --- a/elements/lisk-tree/src/merkle_tree/merkle_tree.ts +++ b/elements/lisk-tree/src/merkle_tree/merkle_tree.ts @@ -452,10 +452,11 @@ export class MerkleTree { type === NodeType.BRANCH ? value.readInt32BE(BRANCH_PREFIX.length + LAYER_INDEX_SIZE) : value.readInt32BE(LEAF_PREFIX.length); - const rightHash = type === NodeType.BRANCH ? value.slice(-1 * NODE_HASH_SIZE) : Buffer.alloc(0); + const rightHash = + type === NodeType.BRANCH ? value.subarray(-1 * NODE_HASH_SIZE) : Buffer.alloc(0); const leftHash = type === NodeType.BRANCH - ? value.slice(-2 * NODE_HASH_SIZE, -1 * NODE_HASH_SIZE) + ? value.subarray(-2 * NODE_HASH_SIZE, -1 * NODE_HASH_SIZE) : Buffer.alloc(0); return { diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/src/utils.ts b/framework-plugins/lisk-framework-chain-connector-plugin/src/utils.ts index 780a4cdc071..b1f223b9778 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/src/utils.ts +++ b/framework-plugins/lisk-framework-chain-connector-plugin/src/utils.ts @@ -45,7 +45,7 @@ interface BFTParametersWithoutGeneratorKey extends Omit { - const networkID = chainID.slice(0, 1); + const networkID = chainID.subarray(0, 1); // 3 bytes for remaining chainID bytes return Buffer.concat([networkID, Buffer.alloc(CHAIN_ID_LENGTH - 1, 0)]); }; diff --git a/framework/src/engine/generator/generator_store.ts b/framework/src/engine/generator/generator_store.ts index d5e7bb94cae..afa92f0d8fe 100644 --- a/framework/src/engine/generator/generator_store.ts +++ b/framework/src/engine/generator/generator_store.ts @@ -71,7 +71,7 @@ export class GeneratorStore { const pairs: KeyValue[] = []; stream .on('data', ({ key, value }: { key: Buffer; value: Buffer }) => { - pairs.push({ key: key.slice(this._prefix.length), value }); + pairs.push({ key: key.subarray(this._prefix.length), value }); }) .on('error', error => { reject(error); diff --git a/framework/src/modules/base_offchain_store.ts b/framework/src/modules/base_offchain_store.ts index 67f7c00d615..b1ab320ca58 100644 --- a/framework/src/modules/base_offchain_store.ts +++ b/framework/src/modules/base_offchain_store.ts @@ -42,14 +42,14 @@ export abstract class BaseOffchainStore { public constructor(moduleName: string, version = 0) { this._version = version; - this._storePrefix = utils.hash(Buffer.from(moduleName, 'utf-8')).slice(0, 4); + this._storePrefix = utils.hash(Buffer.from(moduleName, 'utf-8')).subarray(0, 4); // eslint-disable-next-line no-bitwise this._storePrefix[0] &= 0x7f; const versionBuffer = Buffer.alloc(2); versionBuffer.writeUInt16BE(this._version, 0); this._subStorePrefix = utils .hash(Buffer.concat([Buffer.from(this.name, 'utf-8'), versionBuffer])) - .slice(0, 2); + .subarray(0, 2); } public async get(ctx: ImmutableOffchainStoreGetter, key: Buffer): Promise { diff --git a/framework/src/modules/base_store.ts b/framework/src/modules/base_store.ts index 5d3b6deb718..9272a2fdc41 100644 --- a/framework/src/modules/base_store.ts +++ b/framework/src/modules/base_store.ts @@ -26,7 +26,7 @@ export interface StoreGetter { // LIP: https://github.com/LiskHQ/lips/blob/main/proposals/lip-0040.md#module-store-prefix-1 export const computeStorePrefix = (name: string): Buffer => { - const prefix = utils.hash(Buffer.from(name, 'utf-8')).slice(0, 4); + const prefix = utils.hash(Buffer.from(name, 'utf-8')).subarray(0, 4); // eslint-disable-next-line no-bitwise prefix[0] &= 0x7f; return prefix; diff --git a/framework/src/modules/interoperability/mainchain/endpoint.ts b/framework/src/modules/interoperability/mainchain/endpoint.ts index e4170f3074e..dab73450f9b 100644 --- a/framework/src/modules/interoperability/mainchain/endpoint.ts +++ b/framework/src/modules/interoperability/mainchain/endpoint.ts @@ -42,8 +42,8 @@ export class MainchainInteroperabilityEndpoint extends BaseInteroperabilityEndpo validator.validate(isChainIDAvailableRequestSchema, context.params); const chainID = Buffer.from(context.params.chainID as string, 'hex'); const ownChainAccount = await this.stores.get(OwnChainAccountStore).get(context, EMPTY_BYTES); - const networkID = chainID.slice(0, 1); - const ownChainNetworkID = ownChainAccount.chainID.slice(0, 1); + const networkID = chainID.subarray(0, 1); + const ownChainNetworkID = ownChainAccount.chainID.subarray(0, 1); // Only mainchain network IDs are available if (!networkID.equals(ownChainNetworkID)) { return { diff --git a/framework/src/modules/interoperability/utils.ts b/framework/src/modules/interoperability/utils.ts index 3f5a717849d..cc1278d68af 100644 --- a/framework/src/modules/interoperability/utils.ts +++ b/framework/src/modules/interoperability/utils.ts @@ -270,14 +270,14 @@ export const verifyLivenessConditionForRegisteredChains = ( }; export const getMainchainID = (chainID: Buffer): Buffer => { - const networkID = chainID.slice(0, 1); + const networkID = chainID.subarray(0, 1); // 3 bytes for remaining chainID bytes return Buffer.concat([networkID, Buffer.alloc(CHAIN_ID_LENGTH - 1, 0)]); }; // TODO: Update to use Token method after merging development export const getTokenIDLSK = (chainID: Buffer): Buffer => { - const networkID = chainID.slice(0, 1); + const networkID = chainID.subarray(0, 1); // 3 bytes for remaining chainID bytes return Buffer.concat([networkID, Buffer.alloc(7, 0)]); }; diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 4ae5995f69d..b5ec26df120 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -75,7 +75,7 @@ export class NFTMethod extends BaseMethod { throw new Error(`NFT ID must have length ${LENGTH_NFT_ID}`); } - return nftID.slice(0, LENGTH_CHAIN_ID); + return nftID.subarray(0, LENGTH_CHAIN_ID); } public async getNFTOwner(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { @@ -278,7 +278,7 @@ export class NFTMethod extends BaseMethod { } const latestKey = nftStoreData[nftStoreData.length - 1].key; - const indexBytes = latestKey.slice(LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID, LENGTH_NFT_ID); + const indexBytes = latestKey.subarray(LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID, LENGTH_NFT_ID); const index = indexBytes.readBigUInt64BE(); const largestIndex = BigInt(BigInt(2 ** 64) - BigInt(1)); diff --git a/framework/src/modules/pos/stores/eligible_validators.ts b/framework/src/modules/pos/stores/eligible_validators.ts index 64009ddd605..6066bd0a858 100644 --- a/framework/src/modules/pos/stores/eligible_validators.ts +++ b/framework/src/modules/pos/stores/eligible_validators.ts @@ -69,8 +69,8 @@ export class EligibleValidatorsStore extends BaseStore { } public splitKey(key: Buffer): [Buffer, bigint] { - const weightBytes = key.slice(0, 8); - const address = key.slice(8); + const weightBytes = key.subarray(0, 8); + const address = key.subarray(8); return [address, weightBytes.readBigUInt64BE()]; } diff --git a/framework/src/modules/random/utils.ts b/framework/src/modules/random/utils.ts index 279c5f790f5..b666fd5bc79 100644 --- a/framework/src/modules/random/utils.ts +++ b/framework/src/modules/random/utils.ts @@ -35,7 +35,7 @@ export const isSeedValidInput = ( if (!lastSeed) { return !previousSeedRequired; } - return lastSeed.seedReveal.equals(utils.hash(seedReveal).slice(0, SEED_LENGTH)); + return lastSeed.seedReveal.equals(utils.hash(seedReveal).subarray(0, SEED_LENGTH)); }; export const getRandomSeed = ( @@ -51,7 +51,7 @@ export const getRandomSeed = ( } const initRandomBuffer = utils.intToBuffer(height + numberOfSeeds, 4); - const currentSeeds = [utils.hash(initRandomBuffer).slice(0, 16)]; + const currentSeeds = [utils.hash(initRandomBuffer).subarray(0, 16)]; let isInFuture = true; for (const validatorReveal of validatorsReveal) { diff --git a/framework/src/modules/token/cc_method.ts b/framework/src/modules/token/cc_method.ts index cdc1569c5bf..5ceff2c38ef 100644 --- a/framework/src/modules/token/cc_method.ts +++ b/framework/src/modules/token/cc_method.ts @@ -179,7 +179,7 @@ export class TokenInteroperableMethod extends BaseCCMethod { public async recover(ctx: RecoverContext): Promise { const methodContext = ctx.getMethodContext(); const userStore = this.stores.get(UserStore); - const address = ctx.storeKey.slice(0, ADDRESS_LENGTH); + const address = ctx.storeKey.subarray(0, ADDRESS_LENGTH); let account: UserStoreData; if ( @@ -213,8 +213,8 @@ export class TokenInteroperableMethod extends BaseCCMethod { throw new Error('Invalid arguments.'); } - const chainID = ctx.storeKey.slice(ADDRESS_LENGTH, ADDRESS_LENGTH + CHAIN_ID_LENGTH); - const tokenID = ctx.storeKey.slice(ADDRESS_LENGTH, ADDRESS_LENGTH + TOKEN_ID_LENGTH); + const chainID = ctx.storeKey.subarray(ADDRESS_LENGTH, ADDRESS_LENGTH + CHAIN_ID_LENGTH); + const tokenID = ctx.storeKey.subarray(ADDRESS_LENGTH, ADDRESS_LENGTH + TOKEN_ID_LENGTH); const totalAmount = account.availableBalance + account.lockedBalances.reduce((prev, curr) => prev + curr.amount, BigInt(0)); diff --git a/framework/src/modules/token/endpoint.ts b/framework/src/modules/token/endpoint.ts index 55bf862dc1e..f7a4cd2ca27 100644 --- a/framework/src/modules/token/endpoint.ts +++ b/framework/src/modules/token/endpoint.ts @@ -52,7 +52,7 @@ export class TokenEndpoint extends BaseEndpoint { return { balances: userData.map(({ key, value: user }) => ({ - tokenID: key.slice(20).toString('hex'), + tokenID: key.subarray(20).toString('hex'), availableBalance: user.availableBalance.toString(), lockedBalances: user.lockedBalances.map(b => ({ amount: b.amount.toString(), @@ -120,7 +120,7 @@ export class TokenEndpoint extends BaseEndpoint { // main chain token const mainchainTokenID = Buffer.concat([ - context.chainID.slice(0, 1), + context.chainID.subarray(0, 1), Buffer.alloc(TOKEN_ID_LENGTH - 1, 0), ]); supportedTokens.push(mainchainTokenID.toString('hex')); @@ -168,8 +168,8 @@ export class TokenEndpoint extends BaseEndpoint { }); return { escrowedAmounts: escrowData.map(({ key, value: escrow }) => { - const escrowChainID = key.slice(0, CHAIN_ID_LENGTH); - const tokenID = key.slice(CHAIN_ID_LENGTH); + const escrowChainID = key.subarray(0, CHAIN_ID_LENGTH); + const tokenID = key.subarray(CHAIN_ID_LENGTH); return { escrowChainID: escrowChainID.toString('hex'), amount: escrow.amount.toString(), diff --git a/framework/src/modules/token/method.ts b/framework/src/modules/token/method.ts index a9fbf5cada1..a0f2c099f17 100644 --- a/framework/src/modules/token/method.ts +++ b/framework/src/modules/token/method.ts @@ -79,7 +79,7 @@ export class TokenMethod extends BaseMethod { } public getTokenIDLSK(): Buffer { - const networkID = this._config.ownChainID.slice(0, 1); + const networkID = this._config.ownChainID.subarray(0, 1); // 3 bytes for remaining chainID bytes return Buffer.concat([networkID, Buffer.alloc(3 + LOCAL_ID_LENGTH, 0)]); } diff --git a/framework/src/modules/token/module.ts b/framework/src/modules/token/module.ts index 2abb8b71a53..2cab61f7681 100644 --- a/framework/src/modules/token/module.ts +++ b/framework/src/modules/token/module.ts @@ -399,7 +399,7 @@ export class TokenModule extends BaseInteroperableModule { ); } for (const tokenID of supportedTokenIDsData.supportedTokenIDs) { - if (!tokenID.slice(0, CHAIN_ID_LENGTH).equals(supportedTokenIDsData.chainID)) { + if (!tokenID.subarray(0, CHAIN_ID_LENGTH).equals(supportedTokenIDsData.chainID)) { throw new Error('supportedTokensSubstore tokenIDs must match the chainID.'); } } @@ -418,7 +418,7 @@ export class TokenModule extends BaseInteroperableModule { lte: Buffer.alloc(ADDRESS_LENGTH + TOKEN_ID_LENGTH, 255), }); for (const { key, value: user } of allUsers) { - const tokenID = key.slice(ADDRESS_LENGTH); + const tokenID = key.subarray(ADDRESS_LENGTH); const [chainID] = splitTokenID(tokenID); if (chainID.equals(context.chainID)) { const existingSupply = computedSupply.get(tokenID) ?? BigInt(0); @@ -435,7 +435,7 @@ export class TokenModule extends BaseInteroperableModule { lte: Buffer.alloc(CHAIN_ID_LENGTH + TOKEN_ID_LENGTH, 255), }); for (const { key, value } of allEscrows) { - const tokenID = key.slice(CHAIN_ID_LENGTH); + const tokenID = key.subarray(CHAIN_ID_LENGTH); const existingSupply = computedSupply.get(tokenID) ?? BigInt(0); computedSupply.set(tokenID, existingSupply + value.amount); } diff --git a/framework/src/modules/token/utils.ts b/framework/src/modules/token/utils.ts index b784a44cff1..7116cf9f14f 100644 --- a/framework/src/modules/token/utils.ts +++ b/framework/src/modules/token/utils.ts @@ -19,8 +19,8 @@ export const splitTokenID = (tokenID: TokenID): [Buffer, Buffer] => { if (tokenID.length !== TOKEN_ID_LENGTH) { throw new Error(`Token ID must have length ${TOKEN_ID_LENGTH}`); } - const chainID = tokenID.slice(0, CHAIN_ID_LENGTH); - const localID = tokenID.slice(CHAIN_ID_LENGTH); + const chainID = tokenID.subarray(0, CHAIN_ID_LENGTH); + const localID = tokenID.subarray(CHAIN_ID_LENGTH); return [chainID, localID]; }; diff --git a/framework/src/state_machine/prefixed_state_read_writer.ts b/framework/src/state_machine/prefixed_state_read_writer.ts index 04ace52cd94..e58b1716f8c 100644 --- a/framework/src/state_machine/prefixed_state_read_writer.ts +++ b/framework/src/state_machine/prefixed_state_read_writer.ts @@ -95,7 +95,7 @@ export class PrefixedStateReadWriter { }; const result = await this._readWriter.range(optionsWithKey); return result.map(kv => ({ - key: kv.key.slice(this._prefix.length), + key: kv.key.subarray(this._prefix.length), value: kv.value, })); } @@ -111,7 +111,7 @@ export class PrefixedStateReadWriter { }; const result = await this._readWriter.range(optionsWithKey); return result.map(kv => ({ - key: kv.key.slice(this._prefix.length), + key: kv.key.subarray(this._prefix.length), value: codec.decode(schema, kv.value), })); } diff --git a/framework/test/unit/engine/legacy/codec.spec.ts b/framework/test/unit/engine/legacy/codec.spec.ts index 555bdb9ff8d..63b927d3f3d 100644 --- a/framework/test/unit/engine/legacy/codec.spec.ts +++ b/framework/test/unit/engine/legacy/codec.spec.ts @@ -36,7 +36,7 @@ describe('Legacy codec', () => { }); it('should fail to decode invalid block', () => { - expect(() => decodeBlock(encodedBlock.slice(2))).toThrow(); + expect(() => decodeBlock(encodedBlock.subarray(2))).toThrow(); }); }); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index b69fdf29a57..a22bc7d664f 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -211,7 +211,7 @@ describe('NFTMethod', () => { }); it('should return the first bytes of length LENGTH_CHAIN_ID from provided nftID', () => { - expect(method.getChainID(nftID)).toEqual(nftID.slice(0, LENGTH_CHAIN_ID)); + expect(method.getChainID(nftID)).toEqual(nftID.subarray(0, LENGTH_CHAIN_ID)); }); }); @@ -415,7 +415,7 @@ describe('NFTMethod', () => { attributesArray: [], }); - await supportedNFTsStore.set(methodContext, foreignNFT.slice(0, LENGTH_CHAIN_ID), { + await supportedNFTsStore.set(methodContext, foreignNFT.subarray(0, LENGTH_CHAIN_ID), { supportedCollectionIDArray: [ { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, { collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID) }, @@ -489,7 +489,7 @@ describe('NFTMethod', () => { { module: 'customMod1', attributes: Buffer.alloc(5) }, { module: 'customMod2', attributes: Buffer.alloc(2) }, ]; - const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + const collectionID = nftID.subarray(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); beforeEach(async () => { await nftStore.save(methodContext, nftID, { @@ -543,7 +543,7 @@ describe('NFTMethod', () => { describe('create', () => { const attributesArray1 = [{ module: 'customMod3', attributes: Buffer.alloc(7) }]; const attributesArray2 = [{ module: 'customMod3', attributes: Buffer.alloc(9) }]; - const collectionID = nftID.slice(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); + const collectionID = nftID.subarray(LENGTH_CHAIN_ID, LENGTH_CHAIN_ID + LENGTH_COLLECTION_ID); const address = utils.getRandomBytes(LENGTH_ADDRESS); beforeEach(() => { @@ -938,7 +938,7 @@ describe('NFTMethod', () => { it('should throw and emit error transfer cross chain event if nft does not exist', async () => { const nonExistingNFTID = utils.getRandomBytes(LENGTH_NFT_ID); - receivingChainID = nonExistingNFTID.slice(0, LENGTH_CHAIN_ID); + receivingChainID = nonExistingNFTID.subarray(0, LENGTH_CHAIN_ID); await expect( method.transferCrossChain( methodContext, diff --git a/framework/test/unit/modules/pos/method.spec.ts b/framework/test/unit/modules/pos/method.spec.ts index 0692bd37d3f..d21079efc14 100644 --- a/framework/test/unit/modules/pos/method.spec.ts +++ b/framework/test/unit/modules/pos/method.spec.ts @@ -374,7 +374,7 @@ describe('PoSMethod', () => { const eligibleValidators = await pos.stores .get(EligibleValidatorsStore) .getAll(methodContext); - expect(eligibleValidators.find(v => v.key.slice(8).equals(address))).toBeDefined(); + expect(eligibleValidators.find(v => v.key.subarray(8).equals(address))).toBeDefined(); }); it('should reject changing status if the validator does not exist', async () => { diff --git a/framework/test/unit/modules/random/method.spec.ts b/framework/test/unit/modules/random/method.spec.ts index af96ebd349d..6b53c83726f 100644 --- a/framework/test/unit/modules/random/method.spec.ts +++ b/framework/test/unit/modules/random/method.spec.ts @@ -31,7 +31,7 @@ import { } from '../../../../src/modules/random/stores/validator_reveals'; const strippedHashOfIntegerBuffer = (num: number) => - cryptography.utils.hash(cryptography.utils.intToBuffer(num, 4)).slice(0, SEED_LENGTH); + cryptography.utils.hash(cryptography.utils.intToBuffer(num, 4)).subarray(0, SEED_LENGTH); describe('RandomModuleMethod', () => { let randomMethod: RandomMethod; diff --git a/framework/test/unit/modules/random/module.spec.ts b/framework/test/unit/modules/random/module.spec.ts index cc12119f634..c91ff170430 100644 --- a/framework/test/unit/modules/random/module.spec.ts +++ b/framework/test/unit/modules/random/module.spec.ts @@ -756,7 +756,7 @@ describe('RandomModule', () => { const seedHash = (seed: Buffer, times: number) => { let res = seed; for (let i = 0; i < times; i += 1) { - res = utils.hash(res).slice(0, 16); + res = utils.hash(res).subarray(0, 16); } return res; }; diff --git a/framework/test/unit/modules/random/utils.spec.ts b/framework/test/unit/modules/random/utils.spec.ts index 02ac79ac1d5..f284c324a8a 100644 --- a/framework/test/unit/modules/random/utils.spec.ts +++ b/framework/test/unit/modules/random/utils.spec.ts @@ -47,7 +47,7 @@ describe('Random module utils', () => { describe('isSeedValidInput', () => { const generatorAddress = utils.getRandomBytes(ADDRESS_LENGTH); const seed = utils.getRandomBytes(SEED_LENGTH); - const previousSeed = utils.hash(seed).slice(0, SEED_LENGTH); + const previousSeed = utils.hash(seed).subarray(0, SEED_LENGTH); let validatorSeedReveals: ValidatorSeedReveal[]; beforeEach(() => { diff --git a/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts b/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts index 15a449c23fd..6b6c660f018 100644 --- a/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/token/cc_commands/cc_transfer.spec.ts @@ -186,7 +186,7 @@ describe('CrossChain Transfer Command', () => { escrowStore = tokenModule.stores.get(EscrowStore); await escrowStore.set( methodContext, - Buffer.concat([defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), defaultTokenID]), + Buffer.concat([defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID]), { amount: defaultEscrowAmount }, ); await escrowStore.set( diff --git a/framework/test/unit/modules/token/cc_method.spec.ts b/framework/test/unit/modules/token/cc_method.spec.ts index 19868575031..6f219684fb2 100644 --- a/framework/test/unit/modules/token/cc_method.spec.ts +++ b/framework/test/unit/modules/token/cc_method.spec.ts @@ -144,7 +144,7 @@ describe('TokenInteroperableMethod', () => { escrowStore = tokenModule.stores.get(EscrowStore); await escrowStore.set( methodContext, - Buffer.concat([defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), defaultTokenID]), + Buffer.concat([defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID]), { amount: defaultEscrowAmount }, ); await escrowStore.set( diff --git a/framework/test/unit/modules/token/method.spec.ts b/framework/test/unit/modules/token/method.spec.ts index dadb0c7b82c..4b154c4a786 100644 --- a/framework/test/unit/modules/token/method.spec.ts +++ b/framework/test/unit/modules/token/method.spec.ts @@ -131,7 +131,7 @@ describe('token module', () => { const escrowStore = tokenModule.stores.get(EscrowStore); await escrowStore.set( methodContext, - Buffer.concat([defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), defaultTokenID]), + Buffer.concat([defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID]), { amount: defaultEscrowAmount }, ); }); @@ -202,7 +202,7 @@ describe('token module', () => { await expect( method.getEscrowedAmount( methodContext, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultForeignTokenID, ), ).rejects.toThrow('Only native token can have escrow amount'); @@ -218,7 +218,7 @@ describe('token module', () => { await expect( method.getEscrowedAmount( methodContext, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), Buffer.from([0, 0, 0, 1, 0, 0, 0, 1]), ), ).resolves.toBe(BigInt(0)); @@ -228,7 +228,7 @@ describe('token module', () => { await expect( method.getEscrowedAmount( methodContext, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID, ), ).resolves.toEqual(defaultEscrowAmount); @@ -511,7 +511,7 @@ describe('token module', () => { await expect( method.initializeEscrowAccount( methodContext, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID, ), ).resolves.toBeUndefined(); @@ -678,7 +678,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100'), @@ -744,7 +744,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, newAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100'), @@ -783,7 +783,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, newAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100'), @@ -812,7 +812,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, newAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100'), @@ -847,7 +847,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), unknownToken, BigInt(100000), @@ -887,7 +887,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100000'), @@ -901,7 +901,7 @@ describe('token module', () => { const escrowStore = tokenModule.stores.get(EscrowStore); const { amount } = await escrowStore.get( methodContext, - escrowStore.getKey(defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), defaultTokenID), + escrowStore.getKey(defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID), ); expect(amount).toEqual(defaultEscrowAmount + BigInt('100000')); checkEventResult( @@ -917,7 +917,7 @@ describe('token module', () => { await method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), recipient, defaultTokenID, BigInt('100000'), @@ -930,7 +930,7 @@ describe('token module', () => { defaultAddress, 'token', CROSS_CHAIN_COMMAND_NAME_TRANSFER, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), BigInt('10000'), codec.encode(crossChainTransferMessageParams, { tokenID: defaultTokenID, @@ -952,7 +952,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt(0), @@ -967,7 +967,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt(-1), @@ -982,7 +982,7 @@ describe('token module', () => { method.transferCrossChain( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), utils.getRandomBytes(20), defaultTokenID, BigInt('100000'), @@ -1193,7 +1193,7 @@ describe('token module', () => { method.payMessageFee( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), BigInt(-1), ), ).rejects.toThrow('Invalid Message Fee'); @@ -1204,7 +1204,7 @@ describe('token module', () => { method.payMessageFee( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultAccount.availableBalance + BigInt(1), ), ).rejects.toThrow('does not have sufficient balance'); @@ -1215,14 +1215,14 @@ describe('token module', () => { method.payMessageFee( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), BigInt(100), ), ).resolves.toBeUndefined(); const escrowStore = tokenModule.stores.get(EscrowStore); const { amount } = await escrowStore.get( methodContext, - escrowStore.getKey(defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), defaultTokenID), + escrowStore.getKey(defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), defaultTokenID), ); expect(amount).toEqual(defaultEscrowAmount + BigInt('100')); }); @@ -1232,7 +1232,7 @@ describe('token module', () => { method.payMessageFee( methodContext, defaultAddress, - defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH), + defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH), BigInt(100), ), ).resolves.toBeUndefined(); @@ -1506,7 +1506,7 @@ describe('token module', () => { }); describe('escrowSubstoreExists', () => { - const escrowChainID = defaultForeignTokenID.slice(0, CHAIN_ID_LENGTH); + const escrowChainID = defaultForeignTokenID.subarray(0, CHAIN_ID_LENGTH); it('should return false if escrow subStore does not exist for the given chain id and token id', async () => { const escrowStore = tokenModule.stores.get(EscrowStore); diff --git a/protocol-specs/generators/address_generation/index.js b/protocol-specs/generators/address_generation/index.js index e562c22a008..267bfbea6aa 100644 --- a/protocol-specs/generators/address_generation/index.js +++ b/protocol-specs/generators/address_generation/index.js @@ -35,7 +35,7 @@ const GENERATOR = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]; const getBinaryAddress = publicKey => { const publicKeyBuffer = Buffer.from(publicKey, 'hex'); - return utils.hash(publicKeyBuffer).slice(0, 20); + return utils.hash(publicKeyBuffer).subarray(0, 20); }; const polymod = uint5Array => { diff --git a/protocol-specs/generators/pos_generator_selection/sample_generator.js b/protocol-specs/generators/pos_generator_selection/sample_generator.js index 5b1b19e2486..56761039f8e 100644 --- a/protocol-specs/generators/pos_generator_selection/sample_generator.js +++ b/protocol-specs/generators/pos_generator_selection/sample_generator.js @@ -26,7 +26,7 @@ const generateValidators = (num, fixedNum) => { for (let i = 0; i < num; i += 1) { const passphrase = Mnemonic.generateMnemonic(); const { publicKey } = ed.getKeys(passphrase); - const address = utils.hash(Buffer.from(publicKey, 'hex')).slice(0, 20); + const address = utils.hash(Buffer.from(publicKey, 'hex')).subarray(0, 20); const buf = crypto.randomBytes(8); const randomNumber = buf.readBigUInt64BE() / BigInt(10) ** BigInt(8); const validatorWeight = fixedValue diff --git a/protocol-specs/generators/pos_random_seed_generation/index.js b/protocol-specs/generators/pos_random_seed_generation/index.js index e7324eec595..7513708cfcb 100644 --- a/protocol-specs/generators/pos_random_seed_generation/index.js +++ b/protocol-specs/generators/pos_random_seed_generation/index.js @@ -35,7 +35,7 @@ const strippedHash = data => { throw new Error('Hash input is not a valid type'); } - return utils.hash(data).slice(0, 16); + return utils.hash(data).subarray(0, 16); }; const bitwiseXOR = bufferArray => { diff --git a/protocol-specs/generators/pos_validator_shuffling/sample_generator.js b/protocol-specs/generators/pos_validator_shuffling/sample_generator.js index 788276feb9d..da3875048c2 100644 --- a/protocol-specs/generators/pos_validator_shuffling/sample_generator.js +++ b/protocol-specs/generators/pos_validator_shuffling/sample_generator.js @@ -25,7 +25,7 @@ const generateValidators = num => { for (let i = 0; i < num; i += 1) { const passphrase = Mnemonic.generateMnemonic(); const { publicKey } = ed.getKeys(passphrase); - const address = utils.hash(Buffer.from(publicKey, 'hex')).slice(0, 20); + const address = utils.hash(Buffer.from(publicKey, 'hex')).subarray(0, 20); validatorList.push({ address, From 39b89af3da8657f12f0dbf3f9bfa33491fd3c14d Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Wed, 20 Sep 2023 10:40:30 +0100 Subject: [PATCH 126/170] Update destroy and lock events of NFT module (#8978) Update events --- framework/src/modules/nft/events/destroy.ts | 2 +- framework/src/modules/nft/events/lock.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index 15bf0ffb7ad..4fdb823e694 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -54,6 +54,6 @@ export class DestroyEvent extends BaseEvent Date: Wed, 20 Sep 2023 14:58:53 +0100 Subject: [PATCH 127/170] Remove unnecessary calls to validator.validate in commands of NFT module (#9012) Remove validation in verify hook --- .../modules/nft/cc_commands/cc_transfer.ts | 1 - .../src/modules/nft/commands/transfer.ts | 3 - .../nft/commands/transfer_cross_chain.ts | 3 - .../nft/cc_comands/cc_transfer.spec.ts | 34 -------- .../modules/nft/commands/transfer.spec.ts | 40 --------- .../nft/commands/transfer_cross_chain.spec.ts | 86 ------------------- 6 files changed, 167 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index c8f9000446a..74c06e9efbd 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -53,7 +53,6 @@ export class CrossChainTransferCommand extends BaseCCCommand { crossChainNFTTransferMessageParamsSchema, ccm.params, ); - validator.validate(crossChainNFTTransferMessageParamsSchema, params); if (ccm.status > MAX_RESERVED_ERROR_STATUS) { throw new Error('Invalid CCM error code'); diff --git a/framework/src/modules/nft/commands/transfer.ts b/framework/src/modules/nft/commands/transfer.ts index 85a984c00a2..3da6043b42f 100644 --- a/framework/src/modules/nft/commands/transfer.ts +++ b/framework/src/modules/nft/commands/transfer.ts @@ -12,7 +12,6 @@ * Removal or modification of this copyright notice is prohibited. */ -import { validator } from '@liskhq/lisk-validator'; import { CommandExecuteContext, CommandVerifyContext, @@ -45,8 +44,6 @@ export class TransferCommand extends BaseCommand { public async verify(context: CommandVerifyContext): Promise { const { params } = context; - validator.validate(this.schema, params); - const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(context, params.nftID); diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index daf3c4f472f..9fa0cd4f20d 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -12,7 +12,6 @@ * Removal or modification of this copyright notice is prohibited. */ -import { validator } from '@liskhq/lisk-validator'; import { crossChainTransferParamsSchema } from '../schemas'; import { NFTStore } from '../stores/nft'; import { NFTMethod } from '../method'; @@ -59,8 +58,6 @@ export class TransferCrossChainCommand extends BaseCommand { public async verify(context: CommandVerifyContext): Promise { const { params } = context; - validator.validate(this.schema, params); - const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(context.getMethodContext(), params.nftID); diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index c2fadce28a4..a8b20e3144d 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -183,40 +183,6 @@ describe('CrossChain Transfer Command', () => { await expect(command.verify(context)).resolves.toBeUndefined(); }); - it('throw for if validation fails', async () => { - params = codec.encode(crossChainNFTTransferMessageParamsSchema, { - nftID: Buffer.alloc(LENGTH_NFT_ID + 1, 1), - senderAddress, - recipientAddress, - attributesArray, - data: '', - }); - ccm = { - crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, - module: module.name, - nonce: BigInt(1), - sendingChainID, - receivingChainID, - fee: BigInt(30000), - status: CCM_STATUS_OK, - params, - }; - context = { - ccm, - transaction: defaultTransaction, - header: defaultHeader, - stateStore, - contextStore, - getMethodContext, - eventQueue, - getStore, - logger: fakeLogger, - chainID, - }; - - await expect(command.verify(context)).rejects.toThrow(`Property '.nftID' maxLength exceeded`); - }); - it('throw for invalid ccm status', async () => { ccm = { crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, diff --git a/framework/test/unit/modules/nft/commands/transfer.spec.ts b/framework/test/unit/modules/nft/commands/transfer.spec.ts index a4cae3f8054..b9889fa9f06 100644 --- a/framework/test/unit/modules/nft/commands/transfer.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer.spec.ts @@ -99,46 +99,6 @@ describe('Transfer command', () => { }); describe('verify', () => { - it('should fail if nftID does not have valid length', async () => { - const nftMinLengthContext = createTransactionContextWithOverridingParams({ - nftID: Buffer.alloc(LENGTH_NFT_ID - 1, 1), - }); - - const nftMaxLengthContext = createTransactionContextWithOverridingParams({ - nftID: Buffer.alloc(LENGTH_NFT_ID + 1, 1), - }); - - await expect( - command.verify(nftMinLengthContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow("'.nftID' minLength not satisfied"); - - await expect( - command.verify(nftMaxLengthContext.createCommandExecuteContext(transferParamsSchema)), - ).rejects.toThrow("'.nftID' maxLength exceeded"); - }); - - it('should fail if recipientAddress is not 20 bytes', async () => { - const recipientAddressIncorrectLengthContext = createTransactionContextWithOverridingParams({ - recipientAddress: utils.getRandomBytes(22), - }); - - await expect( - command.verify( - recipientAddressIncorrectLengthContext.createCommandVerifyContext(transferParamsSchema), - ), - ).rejects.toThrow("'.recipientAddress' address length invalid"); - }); - - it('should fail if data exceeds 64 characters', async () => { - const dataIncorrectLengthContext = createTransactionContextWithOverridingParams({ - data: '1'.repeat(65), - }); - - await expect( - command.verify(dataIncorrectLengthContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow("'.data' must NOT have more than 64 characters"); - }); - it('should fail if nftID does not exist', async () => { const nftIDNotExistingContext = createTransactionContextWithOverridingParams({ nftID: Buffer.alloc(LENGTH_NFT_ID, 0), diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts index d7c2d9e0d59..83eeb065f67 100644 --- a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -215,92 +215,6 @@ describe('TransferCrossChainComand', () => { ).rejects.toThrow('Receiving chain cannot be the sending chain'); }); - it('should fail if NFT does not have valid length', async () => { - const nftMinLengthContext = createTransactionContextWithOverridingParams({ - nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1), - }); - - const nftMaxLengthContext = createTransactionContextWithOverridingParams({ - nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1), - }); - - await expect( - command.verify( - nftMinLengthContext.createCommandVerifyContext(crossChainTransferParamsSchema), - ), - ).rejects.toThrow("'.nftID' minLength not satisfied"); - - await expect( - command.verify( - nftMaxLengthContext.createCommandExecuteContext(crossChainTransferParamsSchema), - ), - ).rejects.toThrow("'.nftID' maxLength exceeded"); - }); - - it('should fail if receivingChainID does not have valid length', async () => { - const receivingChainIDMinLengthContext = createTransactionContextWithOverridingParams({ - receivingChainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1), - }); - - const receivingChainIDMaxLengthContext = createTransactionContextWithOverridingParams({ - receivingChainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1), - }); - - await expect( - command.verify( - receivingChainIDMinLengthContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.receivingChainID' minLength not satisfied"); - - await expect( - command.verify( - receivingChainIDMaxLengthContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.receivingChainID' maxLength exceeded"); - }); - - it('should fail if recipientAddress does not have valid length', async () => { - const recipientAddressMinLengthContext = createTransactionContextWithOverridingParams({ - recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS - 1), - }); - - const recipientAddressMaxLenghtContext = createTransactionContextWithOverridingParams({ - recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS + 1), - }); - - await expect( - command.verify( - recipientAddressMinLengthContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.recipientAddress' address length invalid"); - - await expect( - command.verify( - recipientAddressMaxLenghtContext.createCommandVerifyContext( - crossChainTransferParamsSchema, - ), - ), - ).rejects.toThrow("'.recipientAddress' address length invalid"); - }); - - it('should fail if data has more than 64 characters', async () => { - const dataMaxLengthContext = createTransactionContextWithOverridingParams({ - data: '1'.repeat(65), - }); - - await expect( - command.verify( - dataMaxLengthContext.createCommandVerifyContext(crossChainTransferParamsSchema), - ), - ).rejects.toThrow("'.data' must NOT have more than 64 characters"); - }); - it('should fail if NFT does not exist', async () => { const context = createTransactionContextWithOverridingParams({ nftID: utils.getRandomBytes(LENGTH_NFT_ID), From 3db42a3c0edaf733c3c7c4b4227ac223b4c61c0c Mon Sep 17 00:00:00 2001 From: Franco NG Date: Wed, 20 Sep 2023 16:55:36 +0200 Subject: [PATCH 128/170] Update lisk-codec to check int range during write (#9010) Co-authored-by: !shan --- elements/lisk-codec/src/varint.ts | 18 ++++++++++++++++++ elements/lisk-codec/test/varint.spec.ts | 19 +++++++++++++++++++ .../terminate_sidechain_for_liveness.spec.ts | 2 +- .../token/init_genesis_state_fixture.ts | 16 ---------------- 4 files changed, 38 insertions(+), 17 deletions(-) diff --git a/elements/lisk-codec/src/varint.ts b/elements/lisk-codec/src/varint.ts index 92ce7495384..6494fe1714b 100644 --- a/elements/lisk-codec/src/varint.ts +++ b/elements/lisk-codec/src/varint.ts @@ -14,10 +14,16 @@ /* eslint-disable no-bitwise */ /* eslint-disable no-param-reassign */ +import { MAX_SINT32, MAX_SINT64, MAX_UINT32, MAX_UINT64 } from '@liskhq/lisk-validator'; + const msg = 0x80; const rest = 0x7f; export const writeUInt32 = (value: number): Buffer => { + if (value > MAX_UINT32) { + throw new Error('Value out of range of uint32'); + } + const result: number[] = []; let index = 0; while (value > rest) { @@ -32,6 +38,10 @@ export const writeUInt32 = (value: number): Buffer => { }; export const writeSInt32 = (value: number): Buffer => { + if (value > MAX_SINT32) { + throw new Error('Value out of range of sint32'); + } + if (value >= 0) { return writeUInt32(2 * value); } @@ -39,6 +49,10 @@ export const writeSInt32 = (value: number): Buffer => { }; export const writeUInt64 = (value: bigint): Buffer => { + if (value > MAX_UINT64) { + throw new Error('Value out of range of uint64'); + } + const result: number[] = []; let index = 0; while (value > BigInt(rest)) { @@ -53,6 +67,10 @@ export const writeUInt64 = (value: bigint): Buffer => { }; export const writeSInt64 = (value: bigint): Buffer => { + if (value > MAX_SINT64) { + throw new Error('Value out of range of sint64'); + } + if (value >= BigInt(0)) { return writeUInt64(BigInt(2) * value); } diff --git a/elements/lisk-codec/test/varint.spec.ts b/elements/lisk-codec/test/varint.spec.ts index 8dbfa8a9586..956f1a36055 100644 --- a/elements/lisk-codec/test/varint.spec.ts +++ b/elements/lisk-codec/test/varint.spec.ts @@ -11,6 +11,9 @@ * * Removal or modification of this copyright notice is prohibited. */ + +import { MAX_SINT32, MAX_SINT64, MAX_UINT32, MAX_UINT64 } from '@liskhq/lisk-validator'; + import { writeUInt32, writeSInt32, @@ -24,6 +27,22 @@ import { describe('varint', () => { describe('writer', () => { + it('should fail to encode uint32 when input is out of range', () => { + expect(() => writeUInt32(MAX_UINT32 + 1)).toThrow('Value out of range of uint32'); + }); + + it('should fail to encode uint64 when input is out of range', () => { + expect(() => writeUInt64(MAX_UINT64 + BigInt(1))).toThrow('Value out of range of uint64'); + }); + + it('should fail to encode sint32 when input is out of range', () => { + expect(() => writeSInt32(MAX_SINT32 + 1)).toThrow('Value out of range of sint32'); + }); + + it('should fail to encode sint64 when input is out of range', () => { + expect(() => writeSInt64(MAX_SINT64 + BigInt(1))).toThrow('Value out of range of sint64'); + }); + it('should encode uint32', () => { expect(writeUInt32(0)).toEqual(Buffer.from('00', 'hex')); expect(writeUInt32(300)).toEqual(Buffer.from('ac02', 'hex')); diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts index 4acfc46560a..70c630dae63 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts @@ -68,7 +68,7 @@ describe('TerminateSidechainForLivenessCommand', () => { lastCertificate: { height: 10, stateRoot: utils.getRandomBytes(32), - timestamp: Date.now(), + timestamp: Math.floor(Date.now() / 1000), validatorsHash: utils.getRandomBytes(32), }, name: 'staleSidechain', diff --git a/framework/test/unit/modules/token/init_genesis_state_fixture.ts b/framework/test/unit/modules/token/init_genesis_state_fixture.ts index d99ac0aedf8..93a6dc5e14b 100644 --- a/framework/test/unit/modules/token/init_genesis_state_fixture.ts +++ b/framework/test/unit/modules/token/init_genesis_state_fixture.ts @@ -135,22 +135,6 @@ export const invalidGenesisAssets = [ }, "tokenID' maxLength exceeded", ], - [ - 'Overflow uint64 for available balance', - { - ...validData, - userSubstore: [ - { - address: Buffer.alloc(20, 0), - tokenID: Buffer.from([9, 0, 0, 0, 0, 0, 0, 0, 0]), - availableBalance: BigInt('1000000000000000000000000000'), - lockedBalances: [{ module: 'pos', amount: oneUnit }], - }, - ...validData.userSubstore.slice(1), - ], - }, - 'Value out of range of uint64', - ], [ 'Unsorted userstore by address', { From 5ec14a091f062d0795c534d79f5fb2c6127e6e70 Mon Sep 17 00:00:00 2001 From: Franco NG Date: Wed, 20 Sep 2023 17:33:31 +0200 Subject: [PATCH 129/170] PoA Improvements #2 (#8991) * Code improvements and remove shuffleValidatorList from PoA * Revert changes that has already been resolved in #8967 at update_generator_key --------- Co-authored-by: !shan --- framework/src/modules/poa/module.ts | 41 +++++++-------- framework/src/modules/poa/types.ts | 8 +-- framework/src/modules/poa/utils.ts | 42 ---------------- framework/src/modules/pos/module.ts | 6 +-- framework/src/modules/pos/utils.ts | 27 +--------- framework/src/modules/utils/index.ts | 15 ++++++ .../src/modules/utils/shuffleValidatorList.ts | 46 +++++++++++++++++ framework/src/modules/validators/method.ts | 2 +- .../test/unit/modules/poa/module.spec.ts | 5 +- framework/test/unit/modules/poa/utils.spec.ts | 50 ------------------- framework/test/unit/modules/pos/utils.spec.ts | 36 +------------ .../utils/shuffleValidatorList.spec.ts | 44 ++++++++++++++++ 12 files changed, 134 insertions(+), 188 deletions(-) create mode 100644 framework/src/modules/utils/index.ts create mode 100644 framework/src/modules/utils/shuffleValidatorList.ts delete mode 100644 framework/test/unit/modules/poa/utils.spec.ts create mode 100644 framework/test/unit/modules/utils/shuffleValidatorList.spec.ts diff --git a/framework/src/modules/poa/module.ts b/framework/src/modules/poa/module.ts index 167463be4b2..89349ad70d8 100644 --- a/framework/src/modules/poa/module.ts +++ b/framework/src/modules/poa/module.ts @@ -35,7 +35,7 @@ import { SUBSTORE_PREFIX_NAME_INDEX, SUBSTORE_PREFIX_SNAPSHOT_INDEX, } from './constants'; -import { shuffleValidatorList } from './utils'; +import { shuffleValidatorList } from '../utils'; import { NextValidatorsSetter, MethodContext } from '../../state_machine/types'; import { configSchema, @@ -52,6 +52,7 @@ import { RandomMethod, ModuleConfigJSON, ModuleConfig, + ActiveValidator, } from './types'; import { RegisterAuthorityCommand } from './commands/register_authority'; import { UpdateAuthorityCommand } from './commands/update_authority'; @@ -179,7 +180,10 @@ export class PoAModule extends BaseModule { previousLengthValidators, ); - const nextValidators = shuffleValidatorList(randomSeed, snapshot1.validators); + const nextValidators = shuffleValidatorList( + randomSeed, + snapshot1.validators, + ); await this._validatorsMethod.setValidatorsParams( context as MethodContext, @@ -220,23 +224,18 @@ export class PoAModule extends BaseModule { throw new Error('`address` property of all entries in validators must be distinct.'); } - const sortedValidatorsByAddress = [...validatorAddresses].sort((a, b) => a.compare(b)); - for (let i = 0; i < validators.length; i += 1) { - // Check that entries in the validators array are ordered lexicographically according to address. - if (!validatorAddresses[i].equals(sortedValidatorsByAddress[i])) { - throw new Error('`validators` must be ordered lexicographically by address.'); - } + if (!objects.isBufferArrayOrdered(validatorAddresses)) { + throw new Error('`validators` must be ordered lexicographically by address.'); + } - if (!POA_VALIDATOR_NAME_REGEX.test(validators[i].name)) { + for (const poaValidator of validators) { + if (!POA_VALIDATOR_NAME_REGEX.test(poaValidator.name)) { throw new Error('`name` property is invalid. Must contain only characters a-z0-9!@$&_.'); } } const { activeValidators, threshold } = snapshotSubstore; const activeValidatorAddresses = activeValidators.map(v => v.address); - const sortedActiveValidatorsByAddress = [...activeValidatorAddresses].sort((a, b) => - a.compare(b), - ); const validatorAddressesString = validatorAddresses.map(a => a.toString('hex')); let totalWeight = BigInt(0); @@ -245,25 +244,21 @@ export class PoAModule extends BaseModule { throw new Error('`address` properties in `activeValidators` must be distinct.'); } - for (let i = 0; i < activeValidators.length; i += 1) { - // Check that entries in the snapshotSubstore.activeValidators array are ordered lexicographically according to address. - if (!activeValidators[i].address.equals(sortedActiveValidatorsByAddress[i])) { - throw new Error( - '`activeValidators` must be ordered lexicographically by address property.', - ); - } - + if (!objects.isBufferArrayOrdered(activeValidatorAddresses)) { + throw new Error('`activeValidators` must be ordered lexicographically by address property.'); + } + for (const activeValidator of activeValidators) { // Check that for every element activeValidator in the snapshotSubstore.activeValidators array, there is an entry validator in the validators array with validator.address == activeValidator.address. - if (!validatorAddressesString.includes(activeValidators[i].address.toString('hex'))) { + if (!validatorAddressesString.includes(activeValidator.address.toString('hex'))) { throw new Error('`activeValidator` address is missing from validators array.'); } // Check that the weight property of every entry in the snapshotSubstore.activeValidators array is a positive integer. - if (activeValidators[i].weight <= BigInt(0)) { + if (activeValidator.weight <= BigInt(0)) { throw new Error('`activeValidators` weight must be positive integer.'); } - totalWeight += activeValidators[i].weight; + totalWeight += activeValidator.weight; } if (totalWeight > MAX_UINT64) { diff --git a/framework/src/modules/poa/types.ts b/framework/src/modules/poa/types.ts index 134ca1c4567..439d7538b4c 100644 --- a/framework/src/modules/poa/types.ts +++ b/framework/src/modules/poa/types.ts @@ -27,8 +27,8 @@ export type ModuleConfigJSON = JSONObject; export interface RegisterAuthorityParams { name: string; blsKey: Buffer; - generatorKey: Buffer; proofOfPossession: Buffer; + generatorKey: Buffer; } export interface UpdateAuthorityParams { @@ -42,12 +42,6 @@ export interface UpdateAuthorityParams { aggregationBits: Buffer; } -export interface ValidatorWeightWithRoundHash { - readonly address: Buffer; - weight: bigint; - roundHash: Buffer; -} - export interface ValidatorsMethod { setValidatorGeneratorKey( methodContext: MethodContext, diff --git a/framework/src/modules/poa/utils.ts b/framework/src/modules/poa/utils.ts index 9ac361baaec..e69de29bb2d 100644 --- a/framework/src/modules/poa/utils.ts +++ b/framework/src/modules/poa/utils.ts @@ -1,42 +0,0 @@ -/* - * Copyright © 2023 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -import { utils } from '@liskhq/lisk-cryptography'; -import { ValidatorWeightWithRoundHash, ActiveValidator } from './types'; - -// Same as pos/utils/shuffleValidatorList -export const shuffleValidatorList = ( - roundSeed: Buffer, - validators: ActiveValidator[], -): ValidatorWeightWithRoundHash[] => { - const validatorsWithRoundHash: ValidatorWeightWithRoundHash[] = []; - for (const validator of validators) { - const seedSource = Buffer.concat([roundSeed, validator.address]); - validatorsWithRoundHash.push({ - ...validator, - roundHash: utils.hash(seedSource), - }); - } - - validatorsWithRoundHash.sort((validator1, validator2) => { - const diff = validator1.roundHash.compare(validator2.roundHash); - if (diff !== 0) { - return diff; - } - - return validator1.address.compare(validator2.address); - }); - - return validatorsWithRoundHash; -}; diff --git a/framework/src/modules/pos/module.ts b/framework/src/modules/pos/module.ts index 06520f17aa1..70c6c16eb82 100644 --- a/framework/src/modules/pos/module.ts +++ b/framework/src/modules/pos/module.ts @@ -70,12 +70,11 @@ import { equalUnlocking, isUsername, selectStandbyValidators, - shuffleValidatorList, sortUnlocking, getModuleConfig, getValidatorWeight, - ValidatorWeight, isSharingCoefficientSorted, + ValidatorWeight, } from './utils'; import { ValidatorStore } from './stores/validator'; import { GenesisDataStore } from './stores/genesis'; @@ -93,6 +92,7 @@ import { CommissionChangeEvent } from './events/commission_change'; import { ClaimRewardsCommand } from './commands/claim_rewards'; import { getMainchainID } from '../interoperability/utils'; import { RewardsAssignedEvent } from './events/rewards_assigned'; +import { shuffleValidatorList } from '../utils'; export class PoSModule extends BaseModule { public method = new PoSMethod(this.stores, this.events); @@ -690,7 +690,7 @@ export class PoSModule extends BaseModule { } // Update the validators - const shuffledValidators = shuffleValidatorList(randomSeed1, validators); + const shuffledValidators = shuffleValidatorList(randomSeed1, validators); let aggregateBFTWeight = BigInt(0); const bftValidators: { address: Buffer; bftWeight: bigint }[] = []; for (const v of shuffledValidators) { diff --git a/framework/src/modules/pos/utils.ts b/framework/src/modules/pos/utils.ts index c91b08b57ec..b4ce7d2ce34 100644 --- a/framework/src/modules/pos/utils.ts +++ b/framework/src/modules/pos/utils.ts @@ -12,7 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ -import { utils, ed } from '@liskhq/lisk-cryptography'; +import { ed } from '@liskhq/lisk-cryptography'; import { math } from '@liskhq/lisk-utils'; import { ModuleConfig, @@ -99,31 +99,6 @@ export const pickStandByValidator = ( return -1; }; -export const shuffleValidatorList = ( - previousRoundSeed1: Buffer, - addresses: ValidatorWeight[], -): ValidatorWeight[] => { - const validatorList = [...addresses].map(validator => ({ - ...validator, - })) as { address: Buffer; roundHash: Buffer; weight: bigint }[]; - - for (const validator of validatorList) { - const seedSource = Buffer.concat([previousRoundSeed1, validator.address]); - validator.roundHash = utils.hash(seedSource); - } - - validatorList.sort((validator1, validator2) => { - const diff = validator1.roundHash.compare(validator2.roundHash); - if (diff !== 0) { - return diff; - } - - return validator1.address.compare(validator2.address); - }); - - return validatorList; -}; - export const selectStandbyValidators = ( validatorWeights: ValidatorWeight[], randomSeed1: Buffer, diff --git a/framework/src/modules/utils/index.ts b/framework/src/modules/utils/index.ts new file mode 100644 index 00000000000..945dcdf9f82 --- /dev/null +++ b/framework/src/modules/utils/index.ts @@ -0,0 +1,15 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export { shuffleValidatorList } from './shuffleValidatorList'; diff --git a/framework/src/modules/utils/shuffleValidatorList.ts b/framework/src/modules/utils/shuffleValidatorList.ts new file mode 100644 index 00000000000..1cfea0ae9bf --- /dev/null +++ b/framework/src/modules/utils/shuffleValidatorList.ts @@ -0,0 +1,46 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { utils } from '@liskhq/lisk-cryptography'; + +export const shuffleValidatorList = < + T extends { + readonly address: Buffer; + weight: bigint; + }, +>( + roundSeed: Buffer, + addresses: T[], +): (T & { roundHash: Buffer })[] => { + const validatorList = [...addresses].map(validator => ({ + ...validator, + roundHash: Buffer.from([]), + })) as (T & { roundHash: Buffer })[]; + + for (const validator of validatorList) { + const seedSource = Buffer.concat([roundSeed, validator.address]); + validator.roundHash = utils.hash(seedSource); + } + + validatorList.sort((validator1, validator2) => { + const diff = validator1.roundHash.compare(validator2.roundHash); + if (diff !== 0) { + return diff; + } + + return validator1.address.compare(validator2.address); + }); + + return validatorList; +}; diff --git a/framework/src/modules/validators/method.ts b/framework/src/modules/validators/method.ts index 0fa0cc242c0..24a33a1e05c 100644 --- a/framework/src/modules/validators/method.ts +++ b/framework/src/modules/validators/method.ts @@ -363,7 +363,7 @@ export class ValidatorsMethod extends BaseMethod { throw new Error(`BLS public key must be ${BLS_PUBLIC_KEY_LENGTH} bytes long.`); } if (args.proofOfPossession && args.proofOfPossession.length !== BLS_POP_LENGTH) { - throw new Error(`Proof of possesion must be ${BLS_POP_LENGTH} bytes long.`); + throw new Error(`Proof of Possession must be ${BLS_POP_LENGTH} bytes long.`); } if (args.generatorKey && args.generatorKey.length !== ED25519_PUBLIC_KEY_LENGTH) { throw new Error(`Generator key must be ${ED25519_PUBLIC_KEY_LENGTH} bytes long.`); diff --git a/framework/test/unit/modules/poa/module.spec.ts b/framework/test/unit/modules/poa/module.spec.ts index fd7ed1c6928..491dfd64d09 100644 --- a/framework/test/unit/modules/poa/module.spec.ts +++ b/framework/test/unit/modules/poa/module.spec.ts @@ -35,6 +35,7 @@ import { LENGTH_GENERATOR_KEY, } from '../../../../src/modules/poa/constants'; import { + ActiveValidator, FeeMethod, ModuleConfigJSON, RandomMethod, @@ -55,7 +56,7 @@ import { SnapshotObject, ChainProperties, } from '../../../../src/modules/poa/stores'; -import { shuffleValidatorList } from '../../../../src/modules/poa/utils'; +import { shuffleValidatorList } from '../../../../src/modules/utils'; describe('PoA module', () => { let poaModule: PoAModule; @@ -306,7 +307,7 @@ describe('PoA module', () => { for (const validator of snapshot1.validators) { validators.push(validator); } - const nextValidators = shuffleValidatorList(randomSeed, validators); + const nextValidators = shuffleValidatorList(randomSeed, validators); await poaModule.afterTransactionsExecute(context); expect(poaModule.stores.get(SnapshotStore).set).toHaveBeenCalledWith( context, diff --git a/framework/test/unit/modules/poa/utils.spec.ts b/framework/test/unit/modules/poa/utils.spec.ts deleted file mode 100644 index 64ba1cd35ad..00000000000 --- a/framework/test/unit/modules/poa/utils.spec.ts +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright © 2023 Lisk Foundation - * - * See the LICENSE file at the top-level directory of this distribution - * for licensing information. - * - * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, - * no part of this software, including this file, may be copied, modified, - * propagated, or distributed except according to the terms contained in the - * LICENSE file. - * - * Removal or modification of this copyright notice is prohibited. - */ - -import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; -import { shuffleValidatorList } from '../../../../src/modules/poa/utils'; -import * as validatorShufflingScenario from '../../../fixtures/pos_validator_shuffling/uniformly_shuffled_validator_list.json'; - -// Same as pos/utils/shuffleValidatorList -describe('utils', () => { - describe('shuffleValidatorList', () => { - const { previousRoundSeed1 } = validatorShufflingScenario.testCases.input; - const validatorsList = [...validatorShufflingScenario.testCases.input.validatorList].map( - address => ({ - address: Buffer.from(address, 'hex'), - weight: BigInt(1), - }), - ); - it('should return a list of uniformly shuffled list of validators', () => { - const shuffledValidatorList = shuffleValidatorList( - Buffer.from(previousRoundSeed1, 'hex'), - validatorsList, - ); - const lisk32Addresses = validatorsList.map(a => - cryptoAddress.getLisk32AddressFromAddress(a.address), - ); - - expect(shuffledValidatorList).toHaveLength(validatorsList.length); - shuffledValidatorList.forEach(validator => - expect(lisk32Addresses).toContain( - cryptoAddress.getLisk32AddressFromAddress(validator.address), - ), - ); - - expect(shuffledValidatorList.map(b => b.address.toString('hex'))).toEqual( - validatorShufflingScenario.testCases.output.validatorList, - ); - }); - }); -}); diff --git a/framework/test/unit/modules/pos/utils.spec.ts b/framework/test/unit/modules/pos/utils.spec.ts index 1a0e942d49c..a4913ebb8af 100644 --- a/framework/test/unit/modules/pos/utils.spec.ts +++ b/framework/test/unit/modules/pos/utils.spec.ts @@ -11,7 +11,7 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; + import { math } from '@liskhq/lisk-utils'; import { defaultConfig, TOKEN_ID_LENGTH } from '../../../../src/modules/pos/constants'; import { @@ -19,43 +19,11 @@ import { ModuleConfigJSON, StakeSharingCoefficient, } from '../../../../src/modules/pos/types'; -import { - calculateStakeRewards, - getModuleConfig, - shuffleValidatorList, -} from '../../../../src/modules/pos/utils'; -import * as validatorShufflingScenario from '../../../fixtures/pos_validator_shuffling/uniformly_shuffled_validator_list.json'; +import { calculateStakeRewards, getModuleConfig } from '../../../../src/modules/pos/utils'; const { q96 } = math; describe('utils', () => { - describe('shuffleValidatorList', () => { - const { previousRoundSeed1 } = validatorShufflingScenario.testCases.input; - const addressList = [...validatorShufflingScenario.testCases.input.validatorList].map( - address => ({ - address: Buffer.from(address, 'hex'), - weight: BigInt(1), - }), - ); - it('should return a list of uniformly shuffled list of validators', () => { - const shuffledValidatorList = shuffleValidatorList( - Buffer.from(previousRoundSeed1, 'hex'), - addressList, - ); - - expect(shuffledValidatorList).toHaveLength(addressList.length); - shuffledValidatorList.forEach(validator => - expect( - addressList.map(a => cryptoAddress.getLisk32AddressFromAddress(a.address)), - ).toContain(cryptoAddress.getLisk32AddressFromAddress(validator.address)), - ); - - expect(shuffledValidatorList.map(b => b.address.toString('hex'))).toEqual( - validatorShufflingScenario.testCases.output.validatorList, - ); - }); - }); - describe('getModuleConfig', () => { it('converts ModuleConfigJSON to ModuleConfig', () => { const expected: ModuleConfig = { diff --git a/framework/test/unit/modules/utils/shuffleValidatorList.spec.ts b/framework/test/unit/modules/utils/shuffleValidatorList.spec.ts new file mode 100644 index 00000000000..dbe2760bb7d --- /dev/null +++ b/framework/test/unit/modules/utils/shuffleValidatorList.spec.ts @@ -0,0 +1,44 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { address as cryptoAddress } from '@liskhq/lisk-cryptography'; +import * as validatorShufflingScenario from '../../../fixtures/pos_validator_shuffling/uniformly_shuffled_validator_list.json'; +import { shuffleValidatorList } from '../../../../src/modules/utils'; + +describe('shuffleValidatorList', () => { + const { previousRoundSeed1 } = validatorShufflingScenario.testCases.input; + const addressList = [...validatorShufflingScenario.testCases.input.validatorList].map( + address => ({ + address: Buffer.from(address, 'hex'), + weight: BigInt(1), + }), + ); + it('should return a list of uniformly shuffled list of validators', () => { + const shuffledValidatorList = shuffleValidatorList( + Buffer.from(previousRoundSeed1, 'hex'), + addressList, + ); + + expect(shuffledValidatorList).toHaveLength(addressList.length); + shuffledValidatorList.forEach(validator => + expect(addressList.map(a => cryptoAddress.getLisk32AddressFromAddress(a.address))).toContain( + cryptoAddress.getLisk32AddressFromAddress(validator.address), + ), + ); + + expect(shuffledValidatorList.map(b => b.address.toString('hex'))).toEqual( + validatorShufflingScenario.testCases.output.validatorList, + ); + }); +}); From a869773b587b466e405910c39c3d13e54cca79c6 Mon Sep 17 00:00:00 2001 From: Franco NG Date: Wed, 20 Sep 2023 18:49:45 +0200 Subject: [PATCH 130/170] Update interop example (#8994) * Update registration script in examples/interop * Make authorize console.log more clear --------- Co-authored-by: !shan --- examples/interop/common/mainchain_registration.ts | 9 +++++++++ .../config/scripts/sidechain_registration.ts | 8 ++++++++ 2 files changed, 17 insertions(+) diff --git a/examples/interop/common/mainchain_registration.ts b/examples/interop/common/mainchain_registration.ts index cdd2bb1ea2a..939757f5d1c 100644 --- a/examples/interop/common/mainchain_registration.ts +++ b/examples/interop/common/mainchain_registration.ts @@ -123,5 +123,14 @@ export const registerMainchain = async ( }); console.log('Sent mainchain registration transaction. Result from transaction pool is: ', result); + + const authorizeMainchainResult = await mainchainClient.invoke<{ + transactionId: string; + }>('chainConnector_authorize', { + enable: true, + password: 'lisk', + }); + console.log('Authorize Mainchain completed, result:', authorizeMainchainResult); + process.exit(0); }; diff --git a/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts b/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts index 082deab672f..7e0d09cdefa 100644 --- a/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts +++ b/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts @@ -71,6 +71,14 @@ import { sidechainRegParams } from 'lisk-framework'; // Wait for 2 seconds before next registration await wait(WAIT_PERIOD); } + + const authorizeSideChainResult = await sidechainClient.invoke<{ + transactionId: string; + }>('chainConnector_authorize', { + enable: true, + password: 'lisk', + }); + console.log('Authorize Sidechain completed, result:', authorizeSideChainResult); } process.exit(0); From da62e35678699c31a0598df72f13335514fa602c Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Wed, 20 Sep 2023 19:02:57 +0100 Subject: [PATCH 131/170] Update recover method of NFT module to validate NFT attributes array (#9000) Add validation --- framework/src/modules/nft/method.ts | 8 +++--- .../test/unit/modules/nft/method.spec.ts | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index b5ec26df120..d1f67382df2 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -12,6 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ +import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; import { FeeMethod, InteroperabilityMethod, ModuleConfig, TokenMethod } from './types'; @@ -895,18 +896,19 @@ export class NFTMethod extends BaseMethod { ): Promise { const nftStore = this.stores.get(NFTStore); const nftID = storeKey; - let isDecodable = true; + let isValidInput = true; let decodedValue: NFTStoreData; try { decodedValue = codec.decode(nftStoreSchema, storeValue); + validator.validate(nftStoreSchema, decodedValue); } catch (error) { - isDecodable = false; + isValidInput = false; } if ( !substorePrefix.equals(nftStore.subStorePrefix) || storeKey.length !== LENGTH_NFT_ID || - !isDecodable + !isValidInput ) { this.events.get(RecoverEvent).error( methodContext, diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index a22bc7d664f..0ee3ee94dc5 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1668,6 +1668,31 @@ describe('NFTMethod', () => { ); }); + it('should throw and emit error recover event if module name length in attributes array is not valid', async () => { + const newStoreValue = codec.encode(nftStoreSchema, { + owner: utils.getRandomBytes(LENGTH_CHAIN_ID), + attributesArray: [ + { module: 'customMod1', attributes: Buffer.alloc(5) }, + { module: '', attributes: Buffer.alloc(2) }, + ], + }); + + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, newStoreValue), + ).rejects.toThrow('Invalid inputs'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: storeKey, + }, + NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, + ); + }); + it('should throw and emit error recover event if nft chain id is not same as own chain id', async () => { await expect( method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, storeValue), From 53736ce6c8405716394345106d5dc6d0bb4a0743 Mon Sep 17 00:00:00 2001 From: Franco NG Date: Thu, 21 Sep 2023 11:22:59 +0200 Subject: [PATCH 132/170] Update unit test to properly showcase minFee check (#8996) Co-authored-by: !shan --- .../base_cross_chain_update_command.spec.ts | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts index b45c12f38a6..769a49c9a9b 100644 --- a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts +++ b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts @@ -92,6 +92,7 @@ describe('BaseCrossChainUpdateCommand', () => { senderPublicKey, signatures: [], }; + const minReturnFeePerByte = BigInt(10000000); const certificate = codec.encode(certificateSchema, { blockID: utils.getRandomBytes(32), @@ -253,7 +254,7 @@ describe('BaseCrossChainUpdateCommand', () => { command.init( { getMessageFeeTokenID: jest.fn().mockResolvedValue(messageFeeTokenID), - getMinReturnFeePerByte: jest.fn().mockResolvedValue(BigInt(10000000)), + getMinReturnFeePerByte: jest.fn().mockResolvedValue(minReturnFeePerByte), } as any, { initializeUserAccount: jest.fn(), @@ -1497,6 +1498,7 @@ describe('BaseCrossChainUpdateCommand', () => { describe('bounce', () => { const ccmStatus = CCMStatusCode.MODULE_NOT_SUPPORTED; const ccmProcessedEventCode = CCMProcessedCode.MODULE_NOT_SUPPORTED; + const ccmSize = 100; let stateStore: PrefixedStateReadWriter; beforeEach(async () => { @@ -1518,7 +1520,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1535,17 +1537,18 @@ describe('BaseCrossChainUpdateCommand', () => { }); it('should log event when ccm.fee is less than min fee', async () => { + const minFee = minReturnFeePerByte * BigInt(ccmSize); context = createCrossChainMessageContext({ ccm: { ...defaultCCM, status: CCMStatusCode.OK, - fee: BigInt(1), + fee: minFee - BigInt(1), }, stateStore, }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1575,7 +1578,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1611,7 +1614,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1641,7 +1644,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1668,7 +1671,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(2); From 53fa52fd46c868c6ab1e92acec13d2e8bb43ec78 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 21 Sep 2023 11:38:45 +0100 Subject: [PATCH 133/170] Remove collectionID as a data field from the create event of NFT module (#9001) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Remove collection id * Feedback * Update framework/src/modules/nft/events/create.ts Co-authored-by: Miroslav Jerković --------- Co-authored-by: Miroslav Jerković --- framework/src/modules/nft/events/create.ts | 13 +++---------- framework/src/modules/nft/events/destroy.ts | 4 ++-- framework/src/modules/nft/method.ts | 1 - framework/test/unit/modules/nft/method.spec.ts | 2 -- 4 files changed, 5 insertions(+), 15 deletions(-) diff --git a/framework/src/modules/nft/events/create.ts b/framework/src/modules/nft/events/create.ts index be3f55ae96c..e84fd3fac4b 100644 --- a/framework/src/modules/nft/events/create.ts +++ b/framework/src/modules/nft/events/create.ts @@ -13,18 +13,17 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -import { LENGTH_COLLECTION_ID, LENGTH_NFT_ID, NftEventResult } from '../constants'; +import { LENGTH_NFT_ID, NftEventResult } from '../constants'; export interface CreateEventData { address: Buffer; nftID: Buffer; - collectionID: Buffer; } export const createEventSchema = { $id: '/nft/events/create', type: 'object', - required: ['address', 'nftID', 'collectionID', 'result'], + required: ['address', 'nftID', 'result'], properties: { address: { dataType: 'bytes', @@ -37,15 +36,9 @@ export const createEventSchema = { maxLength: LENGTH_NFT_ID, fieldNumber: 2, }, - collectionID: { - dataType: 'bytes', - minLength: LENGTH_COLLECTION_ID, - maxLength: LENGTH_COLLECTION_ID, - fieldNumber: 3, - }, result: { dataType: 'uint32', - fieldNumber: 4, + fieldNumber: 3, }, }, }; diff --git a/framework/src/modules/nft/events/destroy.ts b/framework/src/modules/nft/events/destroy.ts index 4fdb823e694..500c5579f2c 100644 --- a/framework/src/modules/nft/events/destroy.ts +++ b/framework/src/modules/nft/events/destroy.ts @@ -20,7 +20,7 @@ export interface DestroyEventData { nftID: Buffer; } -export const createEventSchema = { +export const destroyEventSchema = { $id: '/nft/events/destroy', type: 'object', required: ['address', 'nftID', 'result'], @@ -44,7 +44,7 @@ export const createEventSchema = { }; export class DestroyEvent extends BaseEvent { - public schema = createEventSchema; + public schema = destroyEventSchema; public log(ctx: EventQueuer, data: DestroyEventData): void { this.add(ctx, { ...data, result: NftEventResult.RESULT_SUCCESSFUL }, [ diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index d1f67382df2..9a3ebd5f2f2 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -325,7 +325,6 @@ export class NFTMethod extends BaseMethod { this.events.get(CreateEvent).log(methodContext, { address, nftID, - collectionID, }); } diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 0ee3ee94dc5..965369661de 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -587,7 +587,6 @@ describe('NFTMethod', () => { checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, nftID: expectedKey, - collectionID, }); }); @@ -619,7 +618,6 @@ describe('NFTMethod', () => { checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, nftID: expectedKey, - collectionID, }); }); }); From 4a3fd2a12f45b2b43a8f6cdd0e407a0466dc3483 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 21 Sep 2023 12:06:29 +0100 Subject: [PATCH 134/170] Update removeSupportAllNFTsFromCollection method of NFT module (#9003) Update check --- framework/src/modules/nft/method.ts | 2 +- framework/test/unit/modules/nft/method.spec.ts | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 9a3ebd5f2f2..48731fdc3b5 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -839,7 +839,7 @@ export class NFTMethod extends BaseMethod { collectionID: Buffer, ): Promise { if (chainID.equals(this._config.ownChainID)) { - return; + throw new Error('Invalid operation. Support for native NFTs cannot be removed'); } const supportedNFTsStore = this.stores.get(SupportedNFTsStore); diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 965369661de..991d64ebf1b 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1469,9 +1469,7 @@ describe('NFTMethod', () => { config.ownChainID, utils.getRandomBytes(LENGTH_CHAIN_ID), ), - ).resolves.toBeUndefined(); - - expect(methodContext.eventQueue.getEvents()).toHaveLength(0); + ).rejects.toThrow('Invalid operation. Support for native NFTs cannot be removed'); }); it('should throw if all NFTs are supported', async () => { From cf5c582e3aa03e6b3e28c399cba1948a06fa772c Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 21 Sep 2023 13:33:49 +0200 Subject: [PATCH 135/170] Updates CcmTransferEvent & CrossChainTransferCommand (#9011) Updates CcmTransferEvent with sending and receiving chain and CrossChainTransferCommand.execute to charge fee if CMM status is not OK Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --- .../modules/nft/cc_commands/cc_transfer.ts | 7 ++++- .../src/modules/nft/events/ccm_transfer.ts | 27 ++++++++++++++++--- .../nft/cc_comands/cc_transfer.spec.ts | 12 ++++++++- 3 files changed, 41 insertions(+), 5 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index 74c06e9efbd..3213c30fbba 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -131,13 +131,16 @@ export class CrossChainTransferCommand extends BaseCCCommand { senderAddress, recipientAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }, NftEventResult.RESULT_NFT_NOT_SUPPORTED, ); throw new Error('Non-supported NFT'); } + this._feeMethod.payFee(getMethodContext(), BigInt(FEE_CREATE_NFT)); + if (status === CCM_STATUS_CODE_OK) { - this._feeMethod.payFee(getMethodContext(), BigInt(FEE_CREATE_NFT)); await nftStore.save(getMethodContext(), nftID, { owner: recipientAddress, attributesArray: receivedAttributes as NFTAttributes[], @@ -157,6 +160,8 @@ export class CrossChainTransferCommand extends BaseCCCommand { senderAddress, recipientAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }); } } diff --git a/framework/src/modules/nft/events/ccm_transfer.ts b/framework/src/modules/nft/events/ccm_transfer.ts index 990f267885b..be4e36df185 100644 --- a/framework/src/modules/nft/events/ccm_transfer.ts +++ b/framework/src/modules/nft/events/ccm_transfer.ts @@ -13,18 +13,27 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -import { LENGTH_NFT_ID, NftEventResult } from '../constants'; +import { LENGTH_CHAIN_ID, LENGTH_NFT_ID, NftEventResult } from '../constants'; export interface CCMTransferEventData { senderAddress: Buffer; recipientAddress: Buffer; nftID: Buffer; + receivingChainID: Buffer; + sendingChainID: Buffer; } export const ccmTransferEventSchema = { $id: '/nft/events/ccmTransfer', type: 'object', - required: ['senderAddress', 'recipientAddress', 'nftID', 'result'], + required: [ + 'senderAddress', + 'recipientAddress', + 'nftID', + 'receivingChainID', + 'sendingChainID', + 'result', + ], properties: { senderAddress: { dataType: 'bytes', @@ -42,9 +51,21 @@ export const ccmTransferEventSchema = { maxLength: LENGTH_NFT_ID, fieldNumber: 3, }, + receivingChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 4, + }, + sendingChainID: { + dataType: 'bytes', + minLength: LENGTH_CHAIN_ID, + maxLength: LENGTH_CHAIN_ID, + fieldNumber: 5, + }, result: { dataType: 'uint32', - fieldNumber: 4, + fieldNumber: 6, }, }, }; diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index a8b20e3144d..ecfb6bc4e8d 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -415,6 +415,8 @@ describe('CrossChain Transfer Command', () => { senderAddress, recipientAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }); }); @@ -465,6 +467,8 @@ describe('CrossChain Transfer Command', () => { senderAddress, recipientAddress: senderAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }); }); @@ -514,6 +518,8 @@ describe('CrossChain Transfer Command', () => { senderAddress, recipientAddress, nftID: newNftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }, NftEventResult.RESULT_NFT_NOT_SUPPORTED, ); @@ -560,6 +566,8 @@ describe('CrossChain Transfer Command', () => { senderAddress, recipientAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }); }); @@ -610,7 +618,7 @@ describe('CrossChain Transfer Command', () => { methodContext, userStore.getKey(senderAddress, nftID), ); - expect(feeMethod.payFee).not.toHaveBeenCalled(); + expect(feeMethod.payFee).toHaveBeenCalledWith(methodContext, BigInt(FEE_CREATE_NFT)); expect(nftStoreData.owner).toStrictEqual(senderAddress); expect(nftStoreData.attributesArray).toEqual(attributesArray); expect(userAccountExistsForRecipient).toBe(false); @@ -619,6 +627,8 @@ describe('CrossChain Transfer Command', () => { senderAddress, recipientAddress: senderAddress, nftID, + receivingChainID: ccm.receivingChainID, + sendingChainID: ccm.sendingChainID, }); }); }); From 22213a76f79e470d6c75f734120b277bb344e378 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Thu, 21 Sep 2023 13:58:49 +0200 Subject: [PATCH 136/170] Update the removeSupportAllNFTs method (#9017) --- framework/src/modules/nft/method.ts | 2 ++ framework/test/unit/modules/nft/method.spec.ts | 14 ++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 48731fdc3b5..c1443aabb6a 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -711,6 +711,8 @@ export class NFTMethod extends BaseMethod { await supportedNFTsStore.del(methodContext, key); } + await supportedNFTsStore.del(methodContext, ALL_SUPPORTED_NFTS_KEY); + this.events.get(AllNFTsSupportRemovedEvent).log(methodContext); } diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 991d64ebf1b..6f4e07a1409 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1189,6 +1189,20 @@ describe('NFTMethod', () => { checkEventResult(methodContext.eventQueue, 1, AllNFTsSupportRemovedEvent, 0, {}, null); }); + + it('should remove all existing entries even if the ALL_SUPPORTED_NFTS_KEY entry exists', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], + }); + + await expect(method.removeSupportAllNFTs(methodContext)).resolves.toBeUndefined(); + await expect( + supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY), + ).resolves.toBeFalse(); + + checkEventResult(methodContext.eventQueue, 1, AllNFTsSupportRemovedEvent, 0, {}, null); + expect(methodContext.eventQueue.getEvents()).toHaveLength(1); + }); }); describe('supportAllNFTsFromChain', () => { From a251bb369e61a54105c3099138768c814af65713 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Thu, 21 Sep 2023 16:08:04 +0200 Subject: [PATCH 137/170] Update unit test of removeSupportAllNFTs method (#9018) --- framework/test/unit/modules/nft/method.spec.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 6f4e07a1409..054c601ab57 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1178,12 +1178,15 @@ describe('NFTMethod', () => { describe('removeSupportAllNFTs', () => { it('should remove all existing entries and log AllNFTsSupportRemovedEvent', async () => { const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); - - await supportedNFTsStore.save(methodContext, utils.getRandomBytes(LENGTH_CHAIN_ID), { + await supportedNFTsStore.save(methodContext, chainID, { supportedCollectionIDArray: [], }); + await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeTrue(); await expect(method.removeSupportAllNFTs(methodContext)).resolves.toBeUndefined(); + await expect( + supportedNFTsStore.has(methodContext, ALL_SUPPORTED_NFTS_KEY), + ).resolves.toBeFalse(); await expect(supportedNFTsStore.has(methodContext, chainID)).resolves.toBeFalse(); From 5c689a9beeb0143bf2a4d330bf213f0a901c75d0 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 28 Sep 2023 14:09:23 +0100 Subject: [PATCH 138/170] Implement getNFT function of NFT module (#9034) * Implement and use method getft * Added and updated unit tests * Add check per feedback * Use json type per feedback * Update check with has per feedback --- .../modules/nft/cc_commands/cc_transfer.ts | 14 +- .../src/modules/nft/commands/transfer.ts | 27 +-- .../nft/commands/transfer_cross_chain.ts | 30 +-- framework/src/modules/nft/endpoint.ts | 15 +- framework/src/modules/nft/method.ts | 210 +++++++----------- framework/src/modules/nft/types.ts | 9 + .../nft/cc_comands/cc_transfer.spec.ts | 83 ++++++- .../modules/nft/commands/transfer.spec.ts | 12 +- .../nft/commands/transfer_cross_chain.spec.ts | 16 +- .../test/unit/modules/nft/endpoint.spec.ts | 2 +- .../test/unit/modules/nft/method.spec.ts | 151 ++++--------- 11 files changed, 277 insertions(+), 292 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index 3213c30fbba..d4a28e89c6f 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -26,7 +26,7 @@ import { import { InternalMethod } from '../internal_method'; import { BaseCCCommand } from '../../interoperability/base_cc_command'; import { CrossChainMessageContext } from '../../interoperability/types'; -import { MAX_RESERVED_ERROR_STATUS } from '../../interoperability/constants'; +import { CCMStatusCode, MAX_RESERVED_ERROR_STATUS } from '../../interoperability/constants'; import { FeeMethod } from '../types'; import { EscrowStore } from '../stores/escrow'; import { CcmTransferEvent } from '../events/ccm_transfer'; @@ -75,12 +75,20 @@ export class CrossChainTransferCommand extends BaseCCCommand { throw new Error('Non-existent entry in the NFT substore'); } - const owner = await this._method.getNFTOwner(getMethodContext(), nftID); - if (!owner.equals(sendingChainID)) { + const nft = await nftStore.get(getMethodContext(), nftID); + if (!nft.owner.equals(sendingChainID)) { throw new Error('NFT has not been properly escrowed'); } } + if ( + !nftChainID.equals(ownChainID) && + (ccm.status === CCMStatusCode.MODULE_NOT_SUPPORTED || + ccm.status === CCMStatusCode.CROSS_CHAIN_COMMAND_NOT_SUPPORTED) + ) { + throw new Error('Module or cross-chain command not supported'); + } + if (!nftChainID.equals(ownChainID) && nftExists) { throw new Error('NFT substore entry already exists'); } diff --git a/framework/src/modules/nft/commands/transfer.ts b/framework/src/modules/nft/commands/transfer.ts index 3da6043b42f..8588035a0dd 100644 --- a/framework/src/modules/nft/commands/transfer.ts +++ b/framework/src/modules/nft/commands/transfer.ts @@ -20,9 +20,7 @@ import { } from '../../../state_machine'; import { BaseCommand } from '../../base_command'; import { transferParamsSchema } from '../schemas'; -import { NFTStore } from '../stores/nft'; import { NFTMethod } from '../method'; -import { LENGTH_CHAIN_ID, NFT_NOT_LOCKED } from '../constants'; import { InternalMethod } from '../internal_method'; export interface Params { @@ -43,31 +41,24 @@ export class TransferCommand extends BaseCommand { public async verify(context: CommandVerifyContext): Promise { const { params } = context; + const methodContext = context.getMethodContext(); - const nftStore = this.stores.get(NFTStore); - - const nftExists = await nftStore.has(context, params.nftID); - - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); + let nft; + try { + nft = await this._method.getNFT(methodContext, params.nftID); + } catch (error) { + throw new Error('NFT does not exist'); } - const owner = await this._method.getNFTOwner(context.getMethodContext(), params.nftID); - - if (owner.length === LENGTH_CHAIN_ID) { + if (this._method.isNFTEscrowed(nft)) { throw new Error('NFT is escrowed to another chain'); } - if (!owner.equals(context.transaction.senderAddress)) { + if (!nft.owner.equals(context.transaction.senderAddress)) { throw new Error('Transfer not initiated by the NFT owner'); } - const lockingModule = await this._method.getLockingModule( - context.getMethodContext(), - params.nftID, - ); - - if (lockingModule !== NFT_NOT_LOCKED) { + if (this._method.isNFTLocked(nft)) { throw new Error('Locked NFTs cannot be transferred'); } diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index 9fa0cd4f20d..9fef1ab8f19 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -13,9 +13,7 @@ */ import { crossChainTransferParamsSchema } from '../schemas'; -import { NFTStore } from '../stores/nft'; import { NFTMethod } from '../method'; -import { LENGTH_CHAIN_ID, NFT_NOT_LOCKED } from '../constants'; import { InteroperabilityMethod, TokenMethod } from '../types'; import { BaseCommand } from '../../base_command'; import { @@ -57,21 +55,20 @@ export class TransferCrossChainCommand extends BaseCommand { public async verify(context: CommandVerifyContext): Promise { const { params } = context; - - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(context.getMethodContext(), params.nftID); + const methodContext = context.getMethodContext(); if (params.receivingChainID.equals(context.chainID)) { throw new Error('Receiving chain cannot be the sending chain'); } - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); + let nft; + try { + nft = await this._nftMethod.getNFT(methodContext, params.nftID); + } catch (error) { + throw new Error('NFT does not exist'); } - const owner = await this._nftMethod.getNFTOwner(context.getMethodContext(), params.nftID); - - if (owner.length === LENGTH_CHAIN_ID) { + if (this._nftMethod.isNFTEscrowed(nft)) { throw new Error('NFT is escrowed to another chain'); } @@ -82,25 +79,20 @@ export class TransferCrossChainCommand extends BaseCommand { } const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( - context.getMethodContext(), + methodContext, params.receivingChainID, ); - if (!owner.equals(context.transaction.senderAddress)) { + if (!nft.owner.equals(context.transaction.senderAddress)) { throw new Error('Transfer not initiated by the NFT owner'); } - const lockingModule = await this._nftMethod.getLockingModule( - context.getMethodContext(), - params.nftID, - ); - - if (lockingModule !== NFT_NOT_LOCKED) { + if (this._nftMethod.isNFTLocked(nft)) { throw new Error('Locked NFTs cannot be transferred'); } const availableBalance = await this._tokenMethod.getAvailableBalance( - context.getMethodContext(), + methodContext, context.transaction.senderAddress, messageFeeTokenID, ); diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts index 6c71df19002..6a4c18b0939 100644 --- a/framework/src/modules/nft/endpoint.ts +++ b/framework/src/modules/nft/endpoint.ts @@ -28,7 +28,7 @@ import { import { NFTStore } from './stores/nft'; import { ALL_SUPPORTED_NFTS_KEY, LENGTH_ADDRESS, LENGTH_NFT_ID } from './constants'; import { UserStore } from './stores/user'; -import { NFT } from './types'; +import { NFTJSON } from './types'; import { SupportedNFTsStore } from './stores/supported_nfts'; import { NFTMethod } from './method'; @@ -41,7 +41,7 @@ export class NFTEndpoint extends BaseEndpoint { public async getNFTs( context: ModuleEndpointContext, - ): Promise<{ nfts: JSONObject & { id: string }>[] }> { + ): Promise<{ nfts: JSONObject & { id: string }>[] }> { validator.validate<{ address: string }>(getNFTsRequestSchema, context.params); const nftStore = this.stores.get(NFTStore); @@ -97,7 +97,7 @@ export class NFTEndpoint extends BaseEndpoint { return { hasNFT: nftData.owner.equals(owner) }; } - public async getNFT(context: ModuleEndpointContext): Promise> { + public async getNFT(context: ModuleEndpointContext): Promise> { const { params } = context; validator.validate<{ id: string }>(getNFTRequestSchema, params); @@ -106,7 +106,7 @@ export class NFTEndpoint extends BaseEndpoint { const nftExists = await nftStore.has(context.getImmutableMethodContext(), nftID); if (!nftExists) { - throw new Error('NFT does not exist'); + throw new Error('NFT substore entry does not exist'); } const userStore = this.stores.get(UserStore); @@ -118,6 +118,13 @@ export class NFTEndpoint extends BaseEndpoint { })); if (nftData.owner.length === LENGTH_ADDRESS) { + const userExists = await userStore.has( + context.getImmutableMethodContext(), + userStore.getKey(nftData.owner, nftID), + ); + if (!userExists) { + throw new Error('User substore entry does not exist'); + } const userData = await userStore.get( context.getImmutableMethodContext(), userStore.getKey(nftData.owner, nftID), diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index c1443aabb6a..1ce1885561d 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -15,7 +15,7 @@ import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; -import { FeeMethod, InteroperabilityMethod, ModuleConfig, TokenMethod } from './types'; +import { FeeMethod, InteroperabilityMethod, ModuleConfig, NFT, TokenMethod } from './types'; import { NFTAttributes, NFTStore, NFTStoreData, nftStoreSchema } from './stores/nft'; import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { @@ -79,9 +79,20 @@ export class NFTMethod extends BaseMethod { return nftID.subarray(0, LENGTH_CHAIN_ID); } - public async getNFTOwner(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { - const nftStore = this.stores.get(NFTStore); + public isNFTEscrowed(nft: NFT): boolean { + return nft.owner.length !== LENGTH_ADDRESS; + } + + public isNFTLocked(nft: NFT): boolean { + if (!nft.lockingModule) { + return false; + } + return nft.lockingModule !== NFT_NOT_LOCKED; + } + + public async getNFT(methodContext: ImmutableMethodContext, nftID: Buffer): Promise { + const nftStore = this.stores.get(NFTStore); const nftExists = await nftStore.has(methodContext, nftID); if (!nftExists) { @@ -89,24 +100,19 @@ export class NFTMethod extends BaseMethod { } const data = await nftStore.get(methodContext, nftID); + const { owner } = data; - return data.owner; - } - - public async getLockingModule( - methodContext: ImmutableMethodContext, - nftID: Buffer, - ): Promise { - const owner = await this.getNFTOwner(methodContext, nftID); - - if (owner.length === LENGTH_CHAIN_ID) { - throw new Error('NFT is escrowed to another chain'); + if (owner.length === LENGTH_ADDRESS) { + const userStore = this.stores.get(UserStore); + const userExists = await userStore.has(methodContext, userStore.getKey(owner, nftID)); + if (!userExists) { + throw new Error('User substore entry does not exist'); + } + const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); + return { ...data, lockingModule: userData.lockingModule }; } - const userStore = this.stores.get(UserStore); - const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); - - return userData.lockingModule; + return data; } public async destroy( @@ -114,11 +120,10 @@ export class NFTMethod extends BaseMethod { address: Buffer, nftID: Buffer, ): Promise { - const nftStore = this.stores.get(NFTStore); - - const nftExists = await nftStore.has(methodContext, nftID); - - if (!nftExists) { + let nft; + try { + nft = await this.getNFT(methodContext, nftID); + } catch (error) { this.events.get(DestroyEvent).error( methodContext, { @@ -128,42 +133,36 @@ export class NFTMethod extends BaseMethod { NftEventResult.RESULT_NFT_DOES_NOT_EXIST, ); - throw new Error('NFT substore entry does not exist'); + throw new Error('NFT does not exist'); } - const owner = await this.getNFTOwner(methodContext, nftID); - - if (owner.length === LENGTH_CHAIN_ID) { + if (!nft.owner.equals(address)) { this.events.get(DestroyEvent).error( methodContext, { address, nftID, }, - NftEventResult.RESULT_NFT_ESCROWED, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, ); - throw new Error('NFT is escrowed to another chain'); + throw new Error('Not initiated by the NFT owner'); } - if (!owner.equals(address)) { + if (this.isNFTEscrowed(nft)) { this.events.get(DestroyEvent).error( methodContext, { address, nftID, }, - NftEventResult.RESULT_INITIATED_BY_NONOWNER, + NftEventResult.RESULT_NFT_ESCROWED, ); - throw new Error('Not initiated by the NFT owner'); + throw new Error('NFT is escrowed to another chain'); } - const userStore = this.stores.get(UserStore); - const userKey = userStore.getKey(owner, nftID); - const { lockingModule } = await userStore.get(methodContext, userKey); - - if (lockingModule !== NFT_NOT_LOCKED) { + if (this.isNFTLocked(nft)) { this.events.get(DestroyEvent).error( methodContext, { @@ -176,9 +175,10 @@ export class NFTMethod extends BaseMethod { throw new Error('Locked NFTs cannot be destroyed'); } + const nftStore = this.stores.get(NFTStore); + const userStore = this.stores.get(UserStore); await nftStore.del(methodContext, nftID); - - await userStore.del(methodContext, userKey); + await userStore.del(methodContext, userStore.getKey(nft.owner, nftID)); this.events.get(DestroyEvent).log(methodContext, { address, @@ -227,42 +227,6 @@ export class NFTMethod extends BaseMethod { return false; } - public async getAttributesArray( - methodContext: MethodContext, - nftID: Buffer, - ): Promise { - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); - } - - const storeData = await nftStore.get(methodContext, nftID); - return storeData.attributesArray; - } - - public async getAttributes( - methodContext: MethodContext, - module: string, - nftID: Buffer, - ): Promise { - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { - throw new Error('NFT substore entry does not exist'); - } - - const storeData = await nftStore.get(methodContext, nftID); - - for (const nftAttributes of storeData.attributesArray) { - if (nftAttributes.module === module) { - return nftAttributes.attributes; - } - } - - throw new Error('Specific module did not set any attributes.'); - } - public async getNextAvailableIndex( methodContext: MethodContext, collectionID: Buffer, @@ -333,11 +297,10 @@ export class NFTMethod extends BaseMethod { throw new Error('Cannot be locked by NFT module'); } - const nftStore = this.stores.get(NFTStore); - - const nftExists = await nftStore.has(methodContext, nftID); - - if (!nftExists) { + let nft; + try { + nft = await this.getNFT(methodContext, nftID); + } catch (error) { this.events.get(LockEvent).error( methodContext, { @@ -347,12 +310,10 @@ export class NFTMethod extends BaseMethod { NftEventResult.RESULT_NFT_DOES_NOT_EXIST, ); - throw new Error('NFT substore entry does not exist'); + throw new Error('NFT does not exist'); } - const owner = await this.getNFTOwner(methodContext, nftID); - - if (owner.length === LENGTH_CHAIN_ID) { + if (this.isNFTEscrowed(nft)) { this.events.get(LockEvent).error( methodContext, { @@ -365,11 +326,7 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT is escrowed to another chain'); } - const userStore = this.stores.get(UserStore); - const userKey = userStore.getKey(owner, nftID); - const userData = await userStore.get(methodContext, userKey); - - if (userData.lockingModule !== NFT_NOT_LOCKED) { + if (this.isNFTLocked(nft)) { this.events.get(LockEvent).error( methodContext, { @@ -382,9 +339,10 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT is already locked'); } - userData.lockingModule = module; - - await userStore.set(methodContext, userKey, userData); + const userStore = this.stores.get(UserStore); + await userStore.set(methodContext, userStore.getKey(nft.owner, nftID), { + lockingModule: module, + }); this.events.get(LockEvent).log(methodContext, { module, @@ -393,11 +351,10 @@ export class NFTMethod extends BaseMethod { } public async unlock(methodContext: MethodContext, module: string, nftID: Buffer): Promise { - const nftStore = this.stores.get(NFTStore); - - const nftExists = await nftStore.has(methodContext, nftID); - - if (!nftExists) { + let nft; + try { + nft = await this.getNFT(methodContext, nftID); + } catch (error) { this.events.get(LockEvent).error( methodContext, { @@ -407,20 +364,14 @@ export class NFTMethod extends BaseMethod { NftEventResult.RESULT_NFT_DOES_NOT_EXIST, ); - throw new Error('NFT substore entry does not exist'); + throw new Error('NFT does not exist'); } - const nftData = await nftStore.get(methodContext, nftID); - - if (nftData.owner.length === LENGTH_CHAIN_ID) { + if (this.isNFTEscrowed(nft)) { throw new Error('NFT is escrowed to another chain'); } - const userStore = this.stores.get(UserStore); - const userKey = userStore.getKey(nftData.owner, nftID); - const userData = await userStore.get(methodContext, userKey); - - if (userData.lockingModule === NFT_NOT_LOCKED) { + if (!this.isNFTLocked(nft)) { this.events.get(LockEvent).error( methodContext, { @@ -433,7 +384,7 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT is not locked'); } - if (userData.lockingModule !== module) { + if (nft.lockingModule !== module) { this.events.get(LockEvent).error( methodContext, { @@ -446,9 +397,10 @@ export class NFTMethod extends BaseMethod { throw new Error('Unlocking NFT via module that did not lock it'); } - userData.lockingModule = NFT_NOT_LOCKED; - - await userStore.set(methodContext, userKey, userData); + const userStore = this.stores.get(UserStore); + await userStore.set(methodContext, userStore.getKey(nft.owner, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); this.events.get(LockEvent).log(methodContext, { module, @@ -462,9 +414,10 @@ export class NFTMethod extends BaseMethod { recipientAddress: Buffer, nftID: Buffer, ): Promise { - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { + let nft; + try { + nft = await this.getNFT(methodContext, nftID); + } catch (error) { this.events.get(TransferEvent).error( methodContext, { @@ -474,11 +427,11 @@ export class NFTMethod extends BaseMethod { }, NftEventResult.RESULT_NFT_DOES_NOT_EXIST, ); - throw new Error('NFT substore entry does not exist'); + + throw new Error('NFT does not exist'); } - const owner = await this.getNFTOwner(methodContext, nftID); - if (owner.length === LENGTH_CHAIN_ID) { + if (this.isNFTEscrowed(nft)) { this.events.get(TransferEvent).error( methodContext, { @@ -491,7 +444,7 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT is escrowed to another chain'); } - if (!owner.equals(senderAddress)) { + if (!nft.owner.equals(senderAddress)) { this.events.get(TransferEvent).error( methodContext, { @@ -504,9 +457,7 @@ export class NFTMethod extends BaseMethod { throw new Error('Transfer not initiated by the NFT owner'); } - const userStore = this.stores.get(UserStore); - const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); - if (userData.lockingModule !== NFT_NOT_LOCKED) { + if (this.isNFTLocked(nft)) { this.events.get(TransferEvent).error( methodContext, { @@ -563,9 +514,10 @@ export class NFTMethod extends BaseMethod { throw new Error('Data field is too long'); } - const nftStore = this.stores.get(NFTStore); - const nftExists = await nftStore.has(methodContext, nftID); - if (!nftExists) { + let nft; + try { + nft = await this.getNFT(methodContext, nftID); + } catch (error) { this.events.get(TransferCrossChainEvent).error( methodContext, { @@ -577,11 +529,11 @@ export class NFTMethod extends BaseMethod { }, NftEventResult.RESULT_NFT_DOES_NOT_EXIST, ); - throw new Error('NFT substore entry does not exist'); + + throw new Error('NFT does not exist'); } - const owner = await this.getNFTOwner(methodContext, nftID); - if (owner.length === LENGTH_CHAIN_ID) { + if (this.isNFTEscrowed(nft)) { this.events.get(TransferCrossChainEvent).error( methodContext, { @@ -612,7 +564,7 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT must be native either to the sending chain or the receiving chain'); } - if (!owner.equals(senderAddress)) { + if (!nft.owner.equals(senderAddress)) { this.events.get(TransferCrossChainEvent).error( methodContext, { @@ -627,9 +579,7 @@ export class NFTMethod extends BaseMethod { throw new Error('Transfer not initiated by the NFT owner'); } - const userStore = this.stores.get(UserStore); - const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); - if (userData.lockingModule !== NFT_NOT_LOCKED) { + if (this.isNFTLocked(nft)) { this.events.get(TransferCrossChainEvent).error( methodContext, { diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 64ccefa1a54..9a2e8bfc37e 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -13,6 +13,7 @@ */ import { ImmutableMethodContext, MethodContext } from '../../state_machine'; +import { JSONObject } from '../../types'; import { CCMsg } from '../interoperability'; export interface ModuleConfig { @@ -58,6 +59,14 @@ export interface NFTAttributes { } export interface NFT { + owner: Buffer; + attributesArray: NFTAttributes[]; + lockingModule?: string; +} + +export type NFTJSON = JSONObject; + +export interface NFTOutputEndpoint { owner: string; attributesArray: NFTAttributes[]; lockingModule?: string; diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index ecfb6bc4e8d..78d5ab0037d 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -41,6 +41,7 @@ import { CcmTransferEvent } from '../../../../../src/modules/nft/events/ccm_tran import { EscrowStore } from '../../../../../src/modules/nft/stores/escrow'; import { UserStore } from '../../../../../src/modules/nft/stores/user'; import { SupportedNFTsStore } from '../../../../../src/modules/nft/stores/supported_nfts'; +import { CCMStatusCode } from '../../../../../src/modules/interoperability/constants'; describe('CrossChain Transfer Command', () => { const module = new NFTModule(); @@ -272,7 +273,17 @@ describe('CrossChain Transfer Command', () => { await expect(command.verify(context)).rejects.toThrow('NFT has not been properly escrowed'); }); - it('should not throw if nft chain id is not equal to own chain id and no entry exists in nft substore for the nft id', async () => { + it('throw if nft chain id is not equal to own chain id and ccm status code is CCMStatusCode.MODULE_NOT_SUPPORTED', async () => { + const newCcm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCMStatusCode.MODULE_NOT_SUPPORTED, + params, + }; const newConfig = { ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), escrowAccountInitializationFee: BigInt(50000000), @@ -282,7 +293,7 @@ describe('CrossChain Transfer Command', () => { internalMethod.addDependencies(method, interopMethod); internalMethod.init(newConfig); context = { - ccm, + ccm: newCcm, transaction: defaultTransaction, header: defaultHeader, stateStore, @@ -295,7 +306,47 @@ describe('CrossChain Transfer Command', () => { }; await nftStore.del(methodContext, nftID); - await expect(command.verify(context)).resolves.toBeUndefined(); + await expect(command.verify(context)).rejects.toThrow( + 'Module or cross-chain command not supported', + ); + }); + + it('throw if nft chain id is not equal to own chain id and ccm status code is CCMStatusCode.CROSS_CHAIN_COMMAND_NOT_SUPPORTED', async () => { + const newCcm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCMStatusCode.CROSS_CHAIN_COMMAND_NOT_SUPPORTED, + params, + }; + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + context = { + ccm: newCcm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID: newConfig.ownChainID, + }; + await nftStore.del(methodContext, nftID); + + await expect(command.verify(context)).rejects.toThrow( + 'Module or cross-chain command not supported', + ); }); it('throw if nft chain id is not equal to own chain id and entry already exists in nft substore for the nft id', async () => { @@ -322,6 +373,32 @@ describe('CrossChain Transfer Command', () => { await expect(command.verify(context)).rejects.toThrow('NFT substore entry already exists'); }); + + it('should not throw if nft chain id is not equal to own chain id and no entry exists in nft substore for the nft id', async () => { + const newConfig = { + ownChainID: utils.getRandomBytes(LENGTH_CHAIN_ID), + escrowAccountInitializationFee: BigInt(50000000), + userAccountInitializationFee: BigInt(50000000), + }; + method.init(newConfig); + internalMethod.addDependencies(method, interopMethod); + internalMethod.init(newConfig); + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID: newConfig.ownChainID, + }; + await nftStore.del(methodContext, nftID); + + await expect(command.verify(context)).resolves.toBeUndefined(); + }); }); describe('execute', () => { diff --git a/framework/test/unit/modules/nft/commands/transfer.spec.ts b/framework/test/unit/modules/nft/commands/transfer.spec.ts index b9889fa9f06..010b45c5b14 100644 --- a/framework/test/unit/modules/nft/commands/transfer.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer.spec.ts @@ -106,7 +106,7 @@ describe('Transfer command', () => { await expect( command.verify(nftIDNotExistingContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow('NFT substore entry does not exist'); + ).rejects.toThrow('NFT does not exist'); }); it('should fail if NFT is escrowed to another chain', async () => { @@ -128,11 +128,19 @@ describe('Transfer command', () => { const nftIncorrectOwnerContext = createTransactionContextWithOverridingParams({ nftID, }); + const newOwner = utils.getRandomBytes(LENGTH_ADDRESS); await nftStore.save(createStoreGetter(nftIncorrectOwnerContext.stateStore), nftID, { - owner: utils.getRandomBytes(LENGTH_ADDRESS), + owner: newOwner, attributesArray: [], }); + await userStore.set( + createStoreGetter(nftIncorrectOwnerContext.stateStore), + userStore.getKey(newOwner, nftID), + { + lockingModule: 'token', + }, + ); await expect( command.verify(nftIncorrectOwnerContext.createCommandVerifyContext(transferParamsSchema)), diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts index 83eeb065f67..a863839d5a5 100644 --- a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -222,7 +222,7 @@ describe('TransferCrossChainComand', () => { await expect( command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('NFT substore entry does not exist'); + ).rejects.toThrow('NFT does not exist'); }); it('should fail if NFT is escrowed', async () => { @@ -236,7 +236,7 @@ describe('TransferCrossChainComand', () => { }); it('should fail if NFT is not native to either the sending or receiving chain', async () => { - const nftID = utils.getRandomBytes(LENGTH_ADDRESS); + const nftID = utils.getRandomBytes(LENGTH_NFT_ID); const context = createTransactionContextWithOverridingParams({ nftID, @@ -247,9 +247,13 @@ describe('TransferCrossChainComand', () => { attributesArray: [], }); + await userStore.set(methodContext, userStore.getKey(owner, nftID), { + lockingModule: NFT_NOT_LOCKED, + }); + await expect( command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow(''); + ).rejects.toThrow('NFT must be native to either the sending or the receiving chain'); }); it('should fail if the owner of the NFT is not the sender', async () => { @@ -258,8 +262,12 @@ describe('TransferCrossChainComand', () => { }); const nft = await nftStore.get(methodContext, existingNFT.nftID); - nft.owner = utils.getRandomBytes(LENGTH_ADDRESS); + const newOwner = utils.getRandomBytes(LENGTH_ADDRESS); + nft.owner = newOwner; await nftStore.save(methodContext, existingNFT.nftID, nft); + await userStore.set(methodContext, userStore.getKey(newOwner, existingNFT.nftID), { + lockingModule: NFT_NOT_LOCKED, + }); await expect( command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), diff --git a/framework/test/unit/modules/nft/endpoint.spec.ts b/framework/test/unit/modules/nft/endpoint.spec.ts index 9515ac1abf5..a9dca0e4e71 100644 --- a/framework/test/unit/modules/nft/endpoint.spec.ts +++ b/framework/test/unit/modules/nft/endpoint.spec.ts @@ -319,7 +319,7 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.getNFT(context)).rejects.toThrow('NFT does not exist'); + await expect(endpoint.getNFT(context)).rejects.toThrow('NFT substore entry does not exist'); }); it('should return NFT details', async () => { diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 054c601ab57..16cde615f6a 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -104,7 +104,6 @@ describe('NFTMethod', () => { utils.getRandomBytes(LENGTH_CHAIN_ID), firstIndex, ]); - let owner: Buffer; const checkEventResult = ( eventQueue: EventQueue, @@ -137,7 +136,6 @@ describe('NFTMethod', () => { method.init(config); internalMethod.addDependencies(method, interopMethod); internalMethod.init(config); - owner = utils.getRandomBytes(LENGTH_ADDRESS); methodContext = createMethodContext({ stateStore: new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()), @@ -215,54 +213,52 @@ describe('NFTMethod', () => { }); }); - describe('getNFTOwner', () => { - it('should fail if NFT does not exist', async () => { - await expect(method.getNFTOwner(methodContext, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); + describe('isNFTEscrowed', () => { + it('should return true if nft owner is a chain', () => { + expect(method.isNFTEscrowed({ ...escrowedNFT, attributesArray: [] })).toBeTrue(); }); - it('should return the owner if NFT exists', async () => { - await nftStore.save(methodContext, nftID, { - owner, - attributesArray: [], - }); - - await expect(method.getNFTOwner(methodContext, nftID)).resolves.toEqual(owner); + it('should return false if nft owner is not a chain', () => { + expect(method.isNFTEscrowed({ ...existingNFT, attributesArray: [] })).toBeFalse(); }); }); - describe('getLockingModule', () => { + describe('getNFT', () => { it('should fail if NFT does not exist', async () => { - await expect(method.getLockingModule(methodContext, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); + await expect( + method.getNFT(methodContext, utils.getRandomBytes(LENGTH_NFT_ID)), + ).rejects.toThrow('NFT substore entry does not exist'); }); - it('should fail if NFT is escrowed', async () => { - owner = utils.getRandomBytes(LENGTH_CHAIN_ID); - - await nftStore.save(methodContext, nftID, { - owner, - attributesArray: [], - }); - - await expect(method.getLockingModule(methodContext, nftID)).rejects.toThrow( - 'NFT is escrowed to another chain', + it('should fail if NFT exist but the corresponding entry in the user store does not exist', async () => { + await userStore.del(methodContext, userStore.getKey(existingNFT.owner, existingNFT.nftID)); + await expect(method.getNFT(methodContext, existingNFT.nftID)).rejects.toThrow( + 'User substore entry does not exist', ); }); - it('should return the lockingModule for the owner of the NFT', async () => { - await nftStore.save(methodContext, nftID, { - owner, + it('should return NFT details if NFT and corresponding user store entry exist', async () => { + await expect(method.getNFT(methodContext, existingNFT.nftID)).resolves.toStrictEqual({ + owner: existingNFT.owner, attributesArray: [], + lockingModule: NFT_NOT_LOCKED, }); + }); + }); - await userStore.set(methodContext, userStore.getKey(owner, nftID), { - lockingModule, - }); + describe('isNFTLocked', () => { + it('should return true if nft is locked', () => { + expect(method.isNFTLocked({ ...lockedExistingNFT, attributesArray: [] })).toBeTrue(); + }); + + it('should return false if nft does not have locking module property', () => { + expect(method.isNFTLocked({ ...existingNFT, attributesArray: [] })).toBeFalse(); + }); - await expect(method.getLockingModule(methodContext, nftID)).resolves.toEqual(lockingModule); + it('should return false if nft is locked by module NFT_NOT_LOCKED', () => { + expect( + method.isNFTLocked({ ...existingNFT, lockingModule: NFT_NOT_LOCKED, attributesArray: [] }), + ).toBeFalse(); }); }); @@ -271,7 +267,7 @@ describe('NFTMethod', () => { const address = utils.getRandomBytes(LENGTH_ADDRESS); await expect(method.destroy(methodContext, address, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', + 'NFT does not exist', ); checkEventResult( @@ -427,63 +423,6 @@ describe('NFTMethod', () => { }); }); - describe('getAttributesArray', () => { - const expectedAttributesArray = [ - { module: 'customMod1', attributes: Buffer.alloc(5) }, - { module: 'customMod2', attributes: Buffer.alloc(2) }, - ]; - - it('should throw if entry does not exist in the nft substore for the nft id', async () => { - await expect(method.getAttributesArray(methodContext, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); - }); - - it('should return attributes array if entry exists in the nft substore for the nft id', async () => { - await nftStore.save(methodContext, nftID, { - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: expectedAttributesArray, - }); - const returnedAttributesArray = await method.getAttributesArray(methodContext, nftID); - expect(returnedAttributesArray).toStrictEqual(expectedAttributesArray); - }); - }); - - describe('getAttributes', () => { - const module1 = 'customMod1'; - const module2 = 'customMod2'; - const module3 = 'customMod3'; - const expectedAttributesArray = [ - { module: module1, attributes: Buffer.alloc(5) }, - { module: module2, attributes: Buffer.alloc(2) }, - ]; - - beforeEach(async () => { - await nftStore.save(methodContext, nftID, { - owner: utils.getRandomBytes(LENGTH_CHAIN_ID), - attributesArray: expectedAttributesArray, - }); - }); - - it('should throw if entry does not exist in the nft substore for the nft id', async () => { - await nftStore.del(methodContext, nftID); - await expect(method.getAttributes(methodContext, module1, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', - ); - }); - - it('should return attributes if entry exists in the nft substore for the nft id and attributes exists for the requested module', async () => { - const returnedAttributes = await method.getAttributes(methodContext, module1, nftID); - expect(returnedAttributes).toStrictEqual(expectedAttributesArray[0].attributes); - }); - - it('should throw if entry exists in the nft substore for the nft id but no attributes exists for the requested module', async () => { - await expect(method.getAttributes(methodContext, module3, nftID)).rejects.toThrow( - 'Specific module did not set any attributes.', - ); - }); - }); - describe('getNextAvailableIndex', () => { const attributesArray = [ { module: 'customMod1', attributes: Buffer.alloc(5) }, @@ -631,7 +570,7 @@ describe('NFTMethod', () => { it('should throw and log LockEvent if NFT does not exist', async () => { await expect(method.lock(methodContext, lockingModule, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', + 'NFT does not exist', ); checkEventResult( @@ -713,7 +652,7 @@ describe('NFTMethod', () => { describe('unlock', () => { it('should throw and log LockEvent if NFT does not exist', async () => { await expect(method.unlock(methodContext, module.name, nftID)).rejects.toThrow( - 'NFT substore entry does not exist', + 'NFT does not exist', ); checkEventResult( @@ -804,7 +743,7 @@ describe('NFTMethod', () => { it('should throw and emit error transfer event if nft does not exist', async () => { await expect( method.transfer(methodContext, senderAddress, recipientAddress, nftID), - ).rejects.toThrow('NFT substore entry does not exist'); + ).rejects.toThrow('NFT does not exist'); checkEventResult( methodContext.eventQueue, 1, @@ -948,7 +887,7 @@ describe('NFTMethod', () => { data, includeAttributes, ), - ).rejects.toThrow('NFT substore entry does not exist'); + ).rejects.toThrow('NFT does not exist'); checkEventResult( methodContext.eventQueue, 1, @@ -1856,12 +1795,9 @@ describe('NFTMethod', () => { }, NftEventResult.RESULT_SUCCESSFUL, ); - const storedAttributes = await method.getAttributes( - methodContext, - module.name, - existingNFT.nftID, - ); - expect(storedAttributes).toStrictEqual(attributes); + const storedNFT = await method.getNFT(methodContext, existingNFT.nftID); + const storedAttributes = storedNFT.attributesArray.find(a => a.module === module.name); + expect(storedAttributes?.attributes).toStrictEqual(attributes); }); it('should update attributes if NFT exists and an entry already exists for the given module', async () => { @@ -1894,12 +1830,11 @@ describe('NFTMethod', () => { }, NftEventResult.RESULT_SUCCESSFUL, ); - const storedAttributes = await method.getAttributes( - methodContext, - attributesArray1[0].module, - existingNFT.nftID, + const storedNFT = await method.getNFT(methodContext, existingNFT.nftID); + const storedAttributes = storedNFT.attributesArray.find( + a => a.module === attributesArray1[0].module, ); - expect(storedAttributes).toStrictEqual(newAttributes); + expect(storedAttributes?.attributes).toStrictEqual(newAttributes); }); }); }); From 481445051ba2df228e617cfdb2e363ba1c67a523 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Thu, 28 Sep 2023 22:52:02 +0200 Subject: [PATCH 139/170] :arrow_up: Bump version --- commander/package.json | 24 ++++++------ .../templates/init/package-template.json | 8 ++-- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 10 ++--- elements/lisk-bft/package.json | 12 +++--- elements/lisk-chain/package.json | 14 +++---- elements/lisk-client/package.json | 18 ++++----- elements/lisk-codec/package.json | 6 +-- elements/lisk-cryptography/package.json | 2 +- elements/lisk-elements/package.json | 28 +++++++------- elements/lisk-genesis/package.json | 12 +++--- elements/lisk-p2p/package.json | 8 ++-- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +-- elements/lisk-transactions/package.json | 8 ++-- elements/lisk-tree/package.json | 6 +-- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../package.json | 10 ++--- .../lisk-framework-faucet-plugin/package.json | 16 ++++---- .../lisk-framework-forger-plugin/package.json | 20 +++++----- .../package.json | 14 +++---- .../package.json | 14 +++---- .../package.json | 18 ++++----- framework/package.json | 28 +++++++------- protocol-specs/package.json | 8 ++-- sdk/package.json | 38 +++++++++---------- yarn.lock | 24 ++++++------ 28 files changed, 181 insertions(+), 181 deletions(-) diff --git a/commander/package.json b/commander/package.json index b744a736d05..f17605478f5 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "5.2.0-rc.0", + "version": "5.2.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -91,17 +91,17 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-client": "^5.3.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-client": "^5.3.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.3.0-rc.0", - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-genesis": "^0.3.0", + "@liskhq/lisk-passphrase": "^3.2.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "@oclif/command": "1.8.16", "@oclif/config": "1.18.3", "@oclif/errors": "1.3.5", @@ -114,7 +114,7 @@ "cli-table3": "0.6.0", "fs-extra": "9.1.0", "inquirer": "8.0.0", - "lisk-framework": "^0.10.0-rc.0", + "lisk-framework": "^0.10.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.3.5", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 65151d58e15..8a474aae3ac 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -98,15 +98,15 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0-rc.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.2.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.2.0", "@oclif/command": "1.8.16", "@oclif/plugin-autocomplete": "1.2.0", "@oclif/plugin-help": "5.1.12", "fs-extra": "9.1.0", "inquirer": "7.3.2", - "lisk-commander": "^5.2.0-rc.0", - "lisk-sdk": "^5.3.0-rc.0", + "lisk-commander": "^5.2.0", + "lisk-sdk": "^5.3.0", "tar": "6.0.2", "tslib": "1.13.0", "axios": "0.21.1" diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index b2ace81c253..c2dba0ab990 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -29,7 +29,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "lisk-sdk": "^5.3.0-rc.0" + "lisk-sdk": "^5.3.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 1a348113f9d..9a5f184ca7d 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "5.2.0-rc.0", + "version": "5.2.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-transactions": "^5.3.0", "isomorphic-ws": "4.0.1", "pm2-axon": "4.0.1", "pm2-axon-rpc": "0.7.1", "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", "@types/node": "18.15.3", diff --git a/elements/lisk-bft/package.json b/elements/lisk-bft/package.json index c448bbe9db2..943ecb13028 100644 --- a/elements/lisk-bft/package.json +++ b/elements/lisk-bft/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-bft", - "version": "0.4.0-rc.0", + "version": "0.4.0", "description": "Byzantine fault tolerance implementation according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "@types/node": "18.15.3", "debug": "4.3.4" }, diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index 6245a257d1c..98833356cf7 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.4.0-rc.0", + "version": "0.4.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,16 +36,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-tree": "^0.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-tree": "^0.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0", "@types/debug": "4.1.7", "@types/faker": "4.1.10", "@types/jest": "26.0.21", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 3831b539188..c900092fdd1 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "5.3.0-rc.0", + "version": "5.3.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -55,14 +55,14 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-tree": "^0.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-passphrase": "^3.2.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-tree": "^0.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index c1e5b9eb7c2..3bf5594db6e 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0" + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 3b9f0572302..8c72b213c06 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "3.3.0-rc.0", + "version": "3.3.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 0f4f314cb97..76b37137476 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "5.3.0-rc.0", + "version": "5.3.0", "description": "Libraries to support building blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,19 +36,19 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-bft": "^0.4.0-rc.0", - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-genesis": "^0.3.0-rc.0", - "@liskhq/lisk-p2p": "^0.8.0-rc.0", - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", - "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-tree": "^0.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0" + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-bft": "^0.4.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-genesis": "^0.3.0", + "@liskhq/lisk-p2p": "^0.8.0", + "@liskhq/lisk-passphrase": "^3.2.0", + "@liskhq/lisk-transaction-pool": "^0.6.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-tree": "^0.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-genesis/package.json b/elements/lisk-genesis/package.json index 6519e637ad5..be67a532432 100644 --- a/elements/lisk-genesis/package.json +++ b/elements/lisk-genesis/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-genesis", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "Library containing genesis block creation functions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,11 +36,11 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "lodash.clonedeep": "4.5.0" }, "devDependencies": { diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 37a0bf2c69c..7f5ec9a695d 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.8.0-rc.0", + "version": "0.8.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,9 +42,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-validator": "^0.7.0", "lodash.shuffle": "4.2.0", "semver": "7.3.5", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 0fa41a7752d..6994fbd7807 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "3.2.0-rc.0", + "version": "3.2.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 2f267434245..6569379fe83 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.6.0-rc.0", + "version": "0.6.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,8 +37,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 98f55660e09..7be8e5bd80d 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "5.3.0-rc.0", + "version": "5.3.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,9 +36,9 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0" + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-validator": "^0.7.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 575885bb549..80875258fa1 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0" + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 9027aae3dc3..e8c50467b87 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index 8a60275e67f..4cd784ff626 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.7.0-rc.0", + "version": "0.7.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,7 +37,7 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0", "ajv": "8.1.0", "ajv-formats": "2.0.2", "debug": "4.3.4", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index bd9dd54fb88..1559c6cca7a 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.2.0-rc.0", + "version": "0.2.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,12 +40,12 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-client": "^5.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", + "@liskhq/lisk-client": "^5.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0", "express": "4.17.3", "json-format-highlight": "1.0.4", - "lisk-framework": "^0.10.0-rc.0", + "lisk-framework": "^0.10.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index f72c6c33bf8..548debe8978 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.2.0-rc.0", + "version": "0.2.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,15 +41,15 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-client": "^5.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-client": "^5.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "axios": "1.3.2", "express": "4.17.3", - "lisk-framework": "^0.10.0-rc.0", + "lisk-framework": "^0.10.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 398305254f6..0bf78daa816 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "A plugin for lisk-framework that monitors configured delegates forging activity and voters information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,13 +38,13 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "axios": "1.3.2", "cors": "2.8.5", "debug": "4.3.4", @@ -52,11 +52,11 @@ "express-rate-limit": "5.1.3", "fs-extra": "9.1.0", "ip": "1.1.5", - "lisk-framework": "^0.10.0-rc.0" + "lisk-framework": "^0.10.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-genesis": "^0.3.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-genesis": "^0.3.0", "@types/cors": "2.8.6", "@types/debug": "4.1.7", "@types/express": "4.17.6", diff --git a/framework-plugins/lisk-framework-http-api-plugin/package.json b/framework-plugins/lisk-framework-http-api-plugin/package.json index 122d5f0a703..0ac5e741b0a 100644 --- a/framework-plugins/lisk-framework-http-api-plugin/package.json +++ b/framework-plugins/lisk-framework-http-api-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-http-api-plugin", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "A plugin for lisk-framework that provides basic HTTP API endpoints to get running node information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,18 +37,18 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.10.0-rc.0" + "lisk-framework": "^0.10.0" }, "devDependencies": { - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-transactions": "^5.3.0", "@types/cors": "2.8.6", "@types/express": "4.17.6", "@types/express-rate-limit": "5.0.0", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index e315df05678..2f37e9620b1 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -37,16 +37,16 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "cors": "2.8.5", "express": "4.17.3", "express-rate-limit": "5.1.3", "ip": "1.1.5", - "lisk-framework": "^0.10.0-rc.0" + "lisk-framework": "^0.10.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 0be5a556fa4..2ebcfd2afbd 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.3.0-rc.0", + "version": "0.3.0", "description": "A plugin for lisk-framework that provides automatic detection of delegate misbehavior and sends a reportDelegateMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,17 +38,17 @@ "prepublishOnly": "npm run lint && npm test && npm run build && npm run build:check" }, "dependencies": { - "@liskhq/lisk-bft": "^0.4.0-rc.0", - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-bft": "^0.4.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "debug": "4.3.4", "fs-extra": "9.1.0", - "lisk-framework": "^0.10.0-rc.0" + "lisk-framework": "^0.10.0" }, "devDependencies": { "@types/cors": "2.8.6", diff --git a/framework/package.json b/framework/package.json index 61dfbc81fab..218dc4867cf 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.10.0-rc.0", + "version": "0.10.0", "description": "Framework to build blockchain applications according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,19 +40,19 @@ "test:functional": "jest --config=./test/functional/jest.config.js --runInBand" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-bft": "^0.4.0-rc.0", - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-bft": "^0.4.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-genesis": "^0.3.0-rc.0", - "@liskhq/lisk-p2p": "^0.8.0-rc.0", - "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-tree": "^0.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", + "@liskhq/lisk-genesis": "^0.3.0", + "@liskhq/lisk-p2p": "^0.8.0", + "@liskhq/lisk-transaction-pool": "^0.6.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-tree": "^0.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.5", @@ -64,7 +64,7 @@ "ws": "7.5.7" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", + "@liskhq/lisk-passphrase": "^3.2.0", "@types/bunyan": "1.8.6", "@types/jest": "26.0.21", "@types/jest-when": "2.7.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index b5017858cf7..56a97ca662d 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.3.0-rc.0", - "@liskhq/lisk-cryptography": "3.3.0-rc.0", - "@liskhq/lisk-passphrase": "3.2.0-rc.0", - "@liskhq/lisk-validator": "0.7.0-rc.0", + "@liskhq/lisk-codec": "0.3.0", + "@liskhq/lisk-cryptography": "3.3.0", + "@liskhq/lisk-passphrase": "3.2.0", + "@liskhq/lisk-validator": "0.7.0", "protobufjs": "6.9.0" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index c73c5f81f68..b6af2368dc5 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "5.3.0-rc.0", + "version": "5.3.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,25 +29,25 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^5.2.0-rc.0", - "@liskhq/lisk-bft": "^0.4.0-rc.0", - "@liskhq/lisk-chain": "^0.4.0-rc.0", - "@liskhq/lisk-codec": "^0.3.0-rc.0", - "@liskhq/lisk-cryptography": "^3.3.0-rc.0", + "@liskhq/lisk-api-client": "^5.2.0", + "@liskhq/lisk-bft": "^0.4.0", + "@liskhq/lisk-chain": "^0.4.0", + "@liskhq/lisk-codec": "^0.3.0", + "@liskhq/lisk-cryptography": "^3.3.0", "@liskhq/lisk-db": "^0.3.6", - "@liskhq/lisk-framework-forger-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-http-api-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-genesis": "^0.3.0-rc.0", - "@liskhq/lisk-p2p": "^0.8.0-rc.0", - "@liskhq/lisk-passphrase": "^3.2.0-rc.0", - "@liskhq/lisk-transaction-pool": "^0.6.0-rc.0", - "@liskhq/lisk-transactions": "^5.3.0-rc.0", - "@liskhq/lisk-tree": "^0.3.0-rc.0", - "@liskhq/lisk-utils": "^0.3.0-rc.0", - "@liskhq/lisk-validator": "^0.7.0-rc.0", - "lisk-framework": "^0.10.0-rc.0" + "@liskhq/lisk-framework-forger-plugin": "^0.3.0", + "@liskhq/lisk-framework-http-api-plugin": "^0.3.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.3.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.3.0", + "@liskhq/lisk-genesis": "^0.3.0", + "@liskhq/lisk-p2p": "^0.8.0", + "@liskhq/lisk-passphrase": "^3.2.0", + "@liskhq/lisk-transaction-pool": "^0.6.0", + "@liskhq/lisk-transactions": "^5.3.0", + "@liskhq/lisk-tree": "^0.3.0", + "@liskhq/lisk-utils": "^0.3.0", + "@liskhq/lisk-validator": "^0.7.0", + "lisk-framework": "^0.10.0" }, "devDependencies": { "eslint": "7.22.0", diff --git a/yarn.lock b/yarn.lock index 1ccd44c92de..cd292b061cb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16831,32 +16831,32 @@ semver@7.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== -semver@7.3.4, semver@7.x, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2: +semver@7.3.4: version "7.3.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== dependencies: lru-cache "^6.0.0" -semver@7.3.5, semver@^7.1.1, semver@^7.3.4, semver@^7.3.5: +semver@7.3.5: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== dependencies: lru-cache "^6.0.0" +semver@7.x, semver@^7.0.0, semver@^7.1.1, semver@^7.1.3, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" + semver@^6.0.0, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.0.0, semver@^7.3.7: - version "7.3.8" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" - integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== - dependencies: - lru-cache "^6.0.0" - send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" @@ -19324,9 +19324,9 @@ widest-line@^3.1.0: string-width "^4.0.0" word-wrap@^1.2.3, word-wrap@~1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + version "1.2.5" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" + integrity "sha1-0sRcbdT7zmIaZvE2y+Mor9BBCzQ= sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==" wordwrap@^1.0.0: version "1.0.0" From 79b8644b0cbaa6efffa31b40987186ebb3ce48a8 Mon Sep 17 00:00:00 2001 From: sitetester Date: Mon, 2 Oct 2023 18:40:20 +0300 Subject: [PATCH 140/170] Add/update cross chain tests (#9007) --- .../base_cross_chain_update_command.spec.ts | 13 +++++++++++++ ...ubmit_mainchain_cross_chain_update.spec.ts | 19 ++++++++++++++++++- ...ubmit_sidechain_cross_chain_update.spec.ts | 18 +++++------------- 3 files changed, 36 insertions(+), 14 deletions(-) diff --git a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts index 769a49c9a9b..328baebba70 100644 --- a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts +++ b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts @@ -321,6 +321,19 @@ describe('BaseCrossChainUpdateCommand', () => { .set(stateStore, params.sendingChainID, chainAccount); }); + it('should reject when ccu params validation fails', async () => { + const nonBufferSendingChainID = 2; + verifyContext = { + ...verifyContext, + params: { ...params, sendingChainID: nonBufferSendingChainID } as any, + }; + + // 2nd param `isMainchain` could be false + await expect(command['verifyCommon'](verifyContext, false)).rejects.toThrow( + `Property '.sendingChainID' should pass "dataType" keyword validation`, + ); + }); + it('should call validator.validate with crossChainUpdateTransactionParams schema', async () => { jest.spyOn(validator, 'validate'); diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts index 2e5d92d3480..9bf2a422351 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts @@ -413,7 +413,7 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { }); }); - it('should verify verifyCommon is called', async () => { + it('should check if verifyCommon is called', async () => { jest.spyOn(mainchainCCUUpdateCommand, 'verifyCommon' as any); await expect(mainchainCCUUpdateCommand.verify(verifyContext)).resolves.toEqual({ @@ -423,6 +423,23 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { expect(mainchainCCUUpdateCommand['verifyCommon']).toHaveBeenCalled(); }); + it('should call isLive with 3 params', async () => { + jest.spyOn(mainchainCCUUpdateCommand['internalMethod'], 'isLive'); + + await expect( + mainchainCCUUpdateCommand.verify({ + ...verifyContext, + params: { ...params } as any, + }), + ).resolves.toEqual({ status: VerifyStatus.OK }); + + expect(mainchainCCUUpdateCommand['internalMethod'].isLive).toHaveBeenCalledWith( + verifyContext, + verifyContext.params.sendingChainID, + verifyContext.header.timestamp, + ); + }); + it(`should not verify liveness condition when sendingChainAccount.status == ${ChainStatus.REGISTERED} and inboxUpdate is empty`, async () => { await expect( mainchainCCUUpdateCommand.verify({ diff --git a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts index decb0fbe118..e2dcf9db178 100644 --- a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts @@ -303,7 +303,7 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { jest.spyOn(sidechainCCUUpdateCommand['internalMethod'], 'isLive').mockResolvedValue(true); }); - it('should verify verifyCommon is called', async () => { + it('should check if verifyCommon is called', async () => { jest.spyOn(sidechainCCUUpdateCommand, 'verifyCommon' as any); await expect(sidechainCCUUpdateCommand.verify(verifyContext)).resolves.toEqual({ @@ -313,15 +313,6 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { expect(sidechainCCUUpdateCommand['verifyCommon']).toHaveBeenCalled(); }); - it('should reject when ccu params validation fails', async () => { - await expect( - sidechainCCUUpdateCommand.verify({ - ...verifyContext, - params: { ...params, sendingChainID: 2 } as any, - }), - ).rejects.toThrow('.sendingChainID'); - }); - it('should call isLive with only 2 params', async () => { jest.spyOn(sidechainCCUUpdateCommand['internalMethod'], 'isLive'); @@ -332,15 +323,16 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { }), ).resolves.toEqual({ status: VerifyStatus.OK }); - expect(sidechainCCUUpdateCommand['internalMethod'].isLive).toHaveBeenCalledWith( + expect(sidechainCCUUpdateCommand['internalMethod'].isLive).not.toHaveBeenCalledWith( verifyContext, verifyContext.params.sendingChainID, + verifyContext.header.timestamp, ); - expect(sidechainCCUUpdateCommand['internalMethod'].isLive).not.toHaveBeenCalledWith( + // should be tested later, otherwise, it can pass even if above fails + expect(sidechainCCUUpdateCommand['internalMethod'].isLive).toHaveBeenCalledWith( verifyContext, verifyContext.params.sendingChainID, - verifyContext.header.timestamp, ); }); }); From fe906529d86ec1c9c94012095a72d6a615a8331b Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Wed, 4 Oct 2023 08:48:06 +0200 Subject: [PATCH 141/170] Add missing unit test (#9051) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ Add missing unit test --- elements/lisk-chain/test/unit/transactions.spec.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/elements/lisk-chain/test/unit/transactions.spec.ts b/elements/lisk-chain/test/unit/transactions.spec.ts index 005dfb78974..be123d0aa0c 100644 --- a/elements/lisk-chain/test/unit/transactions.spec.ts +++ b/elements/lisk-chain/test/unit/transactions.spec.ts @@ -33,6 +33,20 @@ describe('blocks/transactions', () => { expect(() => transaction.validate()).not.toThrow(); }); + it('should not throw an error if params length is less than MAX_PARAMS_SIZE', () => { + transaction = new Transaction({ + module: 'token', + command: 'transfer', + fee: BigInt(613000), + // 126 is the size of other properties + params: utils.getRandomBytes(MAX_PARAMS_SIZE - 1), + nonce: BigInt(2), + senderPublicKey: utils.getRandomBytes(32), + signatures: [utils.getRandomBytes(64)], + }); + expect(() => transaction.validate()).not.toThrow(); + }); + it('should throw when module name is invalid', () => { transaction = new Transaction({ module: 'token_mod', From 2d69d976d505138081c5c56df92cfc5b9766080c Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Wed, 4 Oct 2023 09:14:50 +0200 Subject: [PATCH 142/170] Resolves TODO comments (#9050) * :label: Updates TokenMethod typedef for BaseInteroperabilityMethod * :fire: UserStore.accountExist for Token module * Removes comment from utils.ts in Interoperability module * Removes comment from lisk-chain constants * :label: Removes casting of NetworkEndpoint to EndpointArgs in Consensus.init * Removes comment to reuse checkEventResult utility; Issue #9047 targetting resuability of checkEventResult * Removes comment; as the title message works expected --- elements/lisk-chain/src/constants.ts | 1 - framework/src/engine/consensus/consensus.ts | 4 ++-- .../base_interoperability_method.ts | 21 ++----------------- .../src/modules/interoperability/utils.ts | 1 - framework/src/modules/token/stores/user.ts | 13 ++---------- .../interoperability/internal_method.spec.ts | 1 - .../pos/commands/register_validator.spec.ts | 1 - 7 files changed, 6 insertions(+), 36 deletions(-) diff --git a/elements/lisk-chain/src/constants.ts b/elements/lisk-chain/src/constants.ts index b8e792b30f7..fbd33b11848 100644 --- a/elements/lisk-chain/src/constants.ts +++ b/elements/lisk-chain/src/constants.ts @@ -32,7 +32,6 @@ export const GENESIS_BLOCK_TRANSACTION_ROOT = EMPTY_HASH; export const TAG_BLOCK_HEADER = utils.createMessageTag('BH'); export const TAG_TRANSACTION = utils.createMessageTag('TX'); -// TODO: Actual size TBD export const MAX_ASSET_DATA_SIZE_BYTES = 18; export const SIGNATURE_LENGTH_BYTES = 64; diff --git a/framework/src/engine/consensus/consensus.ts b/framework/src/engine/consensus/consensus.ts index 87726032971..41ac71952e8 100644 --- a/framework/src/engine/consensus/consensus.ts +++ b/framework/src/engine/consensus/consensus.ts @@ -35,7 +35,7 @@ import { ApplyPenaltyError } from '../../errors'; import { AbortError, ApplyPenaltyAndRestartError, RestartError } from './synchronizer/errors'; import { BlockExecutor } from './synchronizer/type'; import { Network } from '../network'; -import { NetworkEndpoint, EndpointArgs } from './network_endpoint'; +import { NetworkEndpoint } from './network_endpoint'; import { LegacyNetworkEndpoint } from '../legacy/network_endpoint'; import { EventPostBlockData, postBlockEventSchema } from './schema'; import { @@ -158,7 +158,7 @@ export class Consensus { network: this._network, db: this._db, commitPool: this._commitPool, - } as EndpointArgs); // TODO: Remove casting in issue where commitPool is added here + }); this._legacyEndpoint = new LegacyNetworkEndpoint({ logger: this._logger, network: this._network, diff --git a/framework/src/modules/interoperability/base_interoperability_method.ts b/framework/src/modules/interoperability/base_interoperability_method.ts index baedbec6b50..ac60c77d502 100644 --- a/framework/src/modules/interoperability/base_interoperability_method.ts +++ b/framework/src/modules/interoperability/base_interoperability_method.ts @@ -37,14 +37,7 @@ import { getMainchainID } from './utils'; export abstract class BaseInteroperabilityMethod< T extends BaseInteroperabilityInternalMethod, > extends BaseMethod { - protected _tokenMethod!: TokenMethod & { - payMessageFee: ( - context: MethodContext, - payFromAddress: Buffer, - fee: bigint, - receivingChainID: Buffer, - ) => Promise; - }; + protected _tokenMethod!: TokenMethod; public constructor( stores: NamedRegistry, @@ -55,17 +48,7 @@ export abstract class BaseInteroperabilityMethod< super(stores, events); } - public addDependencies( - tokenMethod: TokenMethod & { - // TODO: Remove this after token module update - payMessageFee: ( - context: MethodContext, - payFromAddress: Buffer, - fee: bigint, - receivingChainID: Buffer, - ) => Promise; - }, - ) { + public addDependencies(tokenMethod: TokenMethod) { this._tokenMethod = tokenMethod; } diff --git a/framework/src/modules/interoperability/utils.ts b/framework/src/modules/interoperability/utils.ts index cc1278d68af..478fba37db1 100644 --- a/framework/src/modules/interoperability/utils.ts +++ b/framework/src/modules/interoperability/utils.ts @@ -275,7 +275,6 @@ export const getMainchainID = (chainID: Buffer): Buffer => { return Buffer.concat([networkID, Buffer.alloc(CHAIN_ID_LENGTH - 1, 0)]); }; -// TODO: Update to use Token method after merging development export const getTokenIDLSK = (chainID: Buffer): Buffer => { const networkID = chainID.subarray(0, 1); // 3 bytes for remaining chainID bytes diff --git a/framework/src/modules/token/stores/user.ts b/framework/src/modules/token/stores/user.ts index bc84a7960de..b1e2e37e888 100644 --- a/framework/src/modules/token/stores/user.ts +++ b/framework/src/modules/token/stores/user.ts @@ -12,8 +12,8 @@ * Removal or modification of this copyright notice is prohibited. */ import { NotFoundError } from '@liskhq/lisk-db'; -import { BaseStore, ImmutableStoreGetter, StoreGetter } from '../../base_store'; -import { MAX_MODULE_NAME_LENGTH, MIN_MODULE_NAME_LENGTH, TOKEN_ID_LENGTH } from '../constants'; +import { BaseStore, StoreGetter } from '../../base_store'; +import { MAX_MODULE_NAME_LENGTH, MIN_MODULE_NAME_LENGTH } from '../constants'; import { TokenID } from '../types'; export interface UserStoreData { @@ -53,15 +53,6 @@ export const userStoreSchema = { export class UserStore extends BaseStore { public schema = userStoreSchema; - // TODO: Remove this function when updating the methods - public async accountExist(context: ImmutableStoreGetter, address: Buffer): Promise { - const allUserData = await this.iterate(context, { - gte: Buffer.concat([address, Buffer.alloc(TOKEN_ID_LENGTH, 0)]), - lte: Buffer.concat([address, Buffer.alloc(TOKEN_ID_LENGTH, 255)]), - }); - return allUserData.length !== 0; - } - public async createDefaultAccount( context: StoreGetter, address: Buffer, diff --git a/framework/test/unit/modules/interoperability/internal_method.spec.ts b/framework/test/unit/modules/interoperability/internal_method.spec.ts index 6899c5b0f6b..a7acb99820c 100644 --- a/framework/test/unit/modules/interoperability/internal_method.spec.ts +++ b/framework/test/unit/modules/interoperability/internal_method.spec.ts @@ -589,7 +589,6 @@ describe('Base interoperability internal method', () => { }, ]; - // TODO: I have no idea why `$title` is not working, fix this it.each(testCases)('$title', async ({ changedValues }) => { // Assign const isValueChanged = await interopMod.stores diff --git a/framework/test/unit/modules/pos/commands/register_validator.spec.ts b/framework/test/unit/modules/pos/commands/register_validator.spec.ts index 4fc8df99a59..f5bd045833c 100644 --- a/framework/test/unit/modules/pos/commands/register_validator.spec.ts +++ b/framework/test/unit/modules/pos/commands/register_validator.spec.ts @@ -86,7 +86,6 @@ describe('Validator registration command', () => { 'hex', ); - // TODO: move this function to utils and import from all other tests using it const checkEventResult = ( eventQueue: EventQueue, EventClass: any, From d9e8a89fede5af6726e00b1960cdeaf77fdfcfce Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Thu, 5 Oct 2023 00:46:11 +0200 Subject: [PATCH 143/170] Updates type parameter for StateStore.getStore (#9046) :label: Updates type parameter for StateStore.getStore --- .../lisk-chain/src/state_store/state_store.ts | 13 +--- elements/lisk-chain/test/unit/chain.spec.ts | 2 +- .../test/unit/state_store/state_store.spec.ts | 61 ++++++++++--------- framework/src/engine/bft/constants.ts | 4 +- .../src/state_machine/generator_context.ts | 2 +- 5 files changed, 39 insertions(+), 43 deletions(-) diff --git a/elements/lisk-chain/src/state_store/state_store.ts b/elements/lisk-chain/src/state_store/state_store.ts index 140a22f0cb9..49fdab8e377 100644 --- a/elements/lisk-chain/src/state_store/state_store.ts +++ b/elements/lisk-chain/src/state_store/state_store.ts @@ -47,19 +47,10 @@ export class StateStore { this._latestSnapshotId = -1; } - // TODO: Remove accepting number for subStorePrefix - public getStore(storePrefix: Buffer, subStorePrefix: Buffer | number): StateStore { - let storePrefixBuffer: Buffer; - if (typeof subStorePrefix === 'number') { - storePrefixBuffer = Buffer.alloc(2); - storePrefixBuffer.writeUInt16BE(subStorePrefix, 0); - } else { - storePrefixBuffer = subStorePrefix; - } - + public getStore(storePrefix: Buffer, subStorePrefix: Buffer): StateStore { const subStore = new StateStore( this._db, - Buffer.concat([DB_KEY_STATE_STORE, storePrefix, storePrefixBuffer]), + Buffer.concat([DB_KEY_STATE_STORE, storePrefix, subStorePrefix]), this._cache, ); diff --git a/elements/lisk-chain/test/unit/chain.spec.ts b/elements/lisk-chain/test/unit/chain.spec.ts index 70c6a2754b1..4eea93c6c60 100644 --- a/elements/lisk-chain/test/unit/chain.spec.ts +++ b/elements/lisk-chain/test/unit/chain.spec.ts @@ -201,7 +201,7 @@ describe('chain', () => { beforeEach(async () => { stateStore = new StateStore(db); jest.spyOn(stateStore, 'finalize'); - const subStore = stateStore.getStore(utils.intToBuffer(2, 4), 0); + const subStore = stateStore.getStore(utils.intToBuffer(2, 4), Buffer.from([0, 0])); await subStore.set(utils.getRandomBytes(20), utils.getRandomBytes(100)); batch = new Batch(); jest.spyOn(batch, 'set'); diff --git a/elements/lisk-chain/test/unit/state_store/state_store.spec.ts b/elements/lisk-chain/test/unit/state_store/state_store.spec.ts index 27052098dba..9ee742617dc 100644 --- a/elements/lisk-chain/test/unit/state_store/state_store.spec.ts +++ b/elements/lisk-chain/test/unit/state_store/state_store.spec.ts @@ -31,7 +31,15 @@ const sampleSchema = { describe('state store', () => { let moduleID = utils.intToBuffer(2, 4); + const storePrefix = 0; + const storePrefixBuffer = Buffer.alloc(2); + storePrefixBuffer.writeUInt16BE(storePrefix, 0); + + const anotherStorePrefix = 1; + const anotherStorePrefixBuffer = Buffer.alloc(2); + anotherStorePrefixBuffer.writeUInt16BE(anotherStorePrefix, 0); + const existingKey = utils.getRandomBytes(20); const existingKey2 = utils.getRandomBytes(20); const existingValue = utils.getRandomBytes(64); @@ -43,8 +51,6 @@ describe('state store', () => { beforeEach(async () => { db = new InMemoryDatabase(); stateStore = new StateStore(db); - const storePrefixBuffer = Buffer.alloc(2); - storePrefixBuffer.writeUInt16BE(storePrefix, 0); await db.set( Buffer.concat([stateStore['_prefix'], moduleID, storePrefixBuffer, existingKey]), existingValue, @@ -59,17 +65,17 @@ describe('state store', () => { it('should keep the same cache as the original state store', async () => { const address = utils.getRandomBytes(20); const value = utils.getRandomBytes(64); - const subStore = stateStore.getStore(utils.intToBuffer(2, 4), 0); + const subStore = stateStore.getStore(utils.intToBuffer(2, 4), storePrefixBuffer); await subStore.set(address, value); // create different store from the state store - const newSubStore = stateStore.getStore(utils.intToBuffer(2, 4), 0); + const newSubStore = stateStore.getStore(utils.intToBuffer(2, 4), storePrefixBuffer); const valueFromNewStore = await newSubStore.get(address); expect(valueFromNewStore).toEqual(value); }); it('should append the prefix', () => { - const subStore = stateStore.getStore(utils.intToBuffer(2, 4), 0); + const subStore = stateStore.getStore(utils.intToBuffer(2, 4), storePrefixBuffer); // db prefix(1) + moduleID(4) + storePrefix(2) expect(subStore['_prefix']).toHaveLength(1 + 4 + 2); }); @@ -77,7 +83,7 @@ describe('state store', () => { describe('get', () => { it('should get from the cache if the key already exist', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); const newKey = utils.getRandomBytes(20); await subStore.set(newKey, utils.getRandomBytes(10)); jest.spyOn(db, 'get'); @@ -89,7 +95,7 @@ describe('state store', () => { it('should get from the database if the key does not exist', async () => { jest.spyOn(db, 'get'); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); const value = await subStore.get(existingKey); @@ -98,7 +104,7 @@ describe('state store', () => { }); it('should return copied value', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); const value = await subStore.get(existingKey); value[0] = 233; @@ -109,7 +115,7 @@ describe('state store', () => { }); it('should throw not found error if deleted in the key', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.del(existingKey); await expect(subStore.get(existingKey)).rejects.toThrow(NotFoundError); @@ -120,7 +126,7 @@ describe('state store', () => { it('should return decoded value', async () => { const address = utils.getRandomBytes(20); const encodedValue = codec.encode(sampleSchema, { address }); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(address, encodedValue); const value = await subStore.getWithSchema>(address, sampleSchema); @@ -133,7 +139,7 @@ describe('state store', () => { it('should update the cached value if it exist in the cache', async () => { const address = utils.getRandomBytes(20); const value = utils.getRandomBytes(50); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(address, value); const updatingValue = await subStore.get(address); @@ -150,7 +156,7 @@ describe('state store', () => { jest.spyOn(db, 'get'); const address = utils.getRandomBytes(20); const value = utils.getRandomBytes(50); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(address, value); const updatingValue = await subStore.get(address); @@ -164,7 +170,7 @@ describe('state store', () => { it('should set encoded value', async () => { const address = utils.getRandomBytes(20); const encodedValue = codec.encode(sampleSchema, { address }); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.setWithSchema(address, { address }, sampleSchema); const value = await subStore.get(address); @@ -178,7 +184,7 @@ describe('state store', () => { const value = utils.getRandomBytes(50); it('should mark as deleted if it exists in the cache', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(address, value); await subStore.del(address); @@ -188,7 +194,7 @@ describe('state store', () => { it('should cache the original value and mark as deleted if it does not in the cache', async () => { jest.spyOn(db, 'get'); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.del(existingKey); expect(db.get).toHaveReturnedTimes(1); @@ -198,7 +204,7 @@ describe('state store', () => { describe('iterate', () => { it('should return all the key-values with the prefix', async () => { - const subStore = stateStore.getStore(moduleID, 1); + const subStore = stateStore.getStore(moduleID, anotherStorePrefixBuffer); await subStore.set(Buffer.from([0]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([1]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([2]), utils.getRandomBytes(40)); @@ -214,9 +220,9 @@ describe('state store', () => { }); it('should return all the key-values with the prefix in reverse order', async () => { - const existingStore = stateStore.getStore(moduleID, storePrefix); + const existingStore = stateStore.getStore(moduleID, storePrefixBuffer); await existingStore.set(Buffer.from([0]), utils.getRandomBytes(40)); - const subStore = stateStore.getStore(moduleID, 1); + const subStore = stateStore.getStore(moduleID, anotherStorePrefixBuffer); await subStore.set(Buffer.from([0]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([1]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([2]), utils.getRandomBytes(40)); @@ -234,7 +240,7 @@ describe('state store', () => { }); it('should not return the deleted values', async () => { - const subStore = stateStore.getStore(moduleID, 1); + const subStore = stateStore.getStore(moduleID, anotherStorePrefixBuffer); await subStore.set(Buffer.from([0]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([1]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([2]), utils.getRandomBytes(40)); @@ -252,7 +258,7 @@ describe('state store', () => { it('should return the updated values in the cache', async () => { const expectedValue = Buffer.from('random'); - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(Buffer.from([0]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([1]), utils.getRandomBytes(40)); await subStore.set(Buffer.from([2]), utils.getRandomBytes(40)); @@ -273,7 +279,7 @@ describe('state store', () => { it('should return decoded value', async () => { const address = utils.getRandomBytes(20); const encodedValue = codec.encode(sampleSchema, { address }); - const subStore = stateStore.getStore(moduleID, 1); + const subStore = stateStore.getStore(moduleID, anotherStorePrefixBuffer); await subStore.set(Buffer.from([0]), encodedValue); await subStore.set(Buffer.from([1]), encodedValue); await subStore.set(Buffer.from([2]), encodedValue); @@ -294,7 +300,7 @@ describe('state store', () => { describe('snapshot', () => { it('should not change the snapshot data when other operation is triggered', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); subStore.createSnapshot(); const expected = utils.getRandomBytes(64); await subStore.set(Buffer.from([0]), expected); @@ -305,7 +311,7 @@ describe('state store', () => { }); it('should restore to snapshot value when the restore is called', async () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); const id = subStore.createSnapshot(); await subStore.set(Buffer.from([0]), utils.getRandomBytes(64)); await subStore.del(existingKey); @@ -318,7 +324,7 @@ describe('state store', () => { }); it('should throw an error when restoring with an invalid snapshot ID', () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); expect(() => subStore.restoreSnapshot(100)).toThrow( 'Invalid snapshot ID. Cannot revert to an older snapshot.', @@ -326,7 +332,7 @@ describe('state store', () => { }); it('should throw an error when restoring without taking a snapshot first', () => { - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); expect(() => subStore.restoreSnapshot(0)).toThrow( 'Invalid snapshot ID. Cannot revert to an older snapshot.', @@ -348,7 +354,6 @@ describe('state store', () => { const getKey = (mID: number, prefix: number) => { moduleID = Buffer.alloc(4); moduleID.writeInt32BE(mID, 0); - const storePrefixBuffer = Buffer.alloc(2); storePrefixBuffer.writeUInt16BE(prefix, 0); return Buffer.concat([moduleID, storePrefixBuffer]); }; @@ -358,10 +363,10 @@ describe('state store', () => { beforeEach(async () => { data = [getRandomData(), getRandomData(), getRandomData()]; - const subStore = stateStore.getStore(moduleID, storePrefix); + const subStore = stateStore.getStore(moduleID, storePrefixBuffer); await subStore.set(existingKey, utils.getRandomBytes(40)); await subStore.del(existingKey2); - const anotherStore = stateStore.getStore(moduleID, 1); + const anotherStore = stateStore.getStore(moduleID, anotherStorePrefixBuffer); for (const sample of data) { await anotherStore.set(sample.key, sample.value); } diff --git a/framework/src/engine/bft/constants.ts b/framework/src/engine/bft/constants.ts index 90fddc54f2b..9c1f563fcf5 100644 --- a/framework/src/engine/bft/constants.ts +++ b/framework/src/engine/bft/constants.ts @@ -19,8 +19,8 @@ export const MODULE_STORE_PREFIX_BFT = Buffer.from([0, 0, 0, 0]); export const ED25519_PUBLIC_KEY_LENGTH = 32; export const BLS_PUBLIC_KEY_LENGTH = 48; export const EMPTY_BLS_KEY = Buffer.alloc(BLS_PUBLIC_KEY_LENGTH, 0); -export const STORE_PREFIX_BFT_PARAMETERS = 0x0000; -export const STORE_PREFIX_BFT_VOTES = 0x8000; +export const STORE_PREFIX_BFT_PARAMETERS = Buffer.from([0x00, 0x00]); +export const STORE_PREFIX_BFT_VOTES = Buffer.from([0x80, 0x00]); export const EMPTY_KEY = Buffer.alloc(0); export const MAX_UINT32 = 2 ** 32 - 1; diff --git a/framework/src/state_machine/generator_context.ts b/framework/src/state_machine/generator_context.ts index 8f0fa12fb22..b1cfc103191 100644 --- a/framework/src/state_machine/generator_context.ts +++ b/framework/src/state_machine/generator_context.ts @@ -68,7 +68,7 @@ export class GenerationContext { this._stateStore.getStore(moduleID, storePrefix), stateStore: this._stateStore, getOffchainStore: (moduleID: Buffer, subStorePrefix: Buffer) => - this._generatorStore.getStore(moduleID, subStorePrefix.readUInt16BE(0)), + this._generatorStore.getStore(moduleID, subStorePrefix), header: this._header, assets: this._assets, chainID: this._chainID, From 5b2792f6cfb2898da2432fe9693b3d437655cd6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Sat, 7 Oct 2023 11:05:37 +0200 Subject: [PATCH 144/170] Refactor NFT transfer verification (#9005) * Extract common NFT transfer verification functionality to internal method * Move all NFT transfer verification to internal methods * Remove dependency on Interop method * Improve error handling * Change Error class * NFT transfers now only log TransferVerifyError errors * Add new error instance and check for db related errors * Use unlock event * Update order of checks * Remove unnecessary branch --------- Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --- .../src/modules/nft/commands/transfer.ts | 38 +- .../nft/commands/transfer_cross_chain.ts | 71 +--- framework/src/modules/nft/error.ts | 15 + framework/src/modules/nft/events/unlock.ts | 5 + framework/src/modules/nft/internal_method.ts | 175 ++++++++- framework/src/modules/nft/method.ts | 346 ++++++------------ framework/src/modules/nft/module.ts | 11 +- framework/src/modules/nft/types.ts | 3 + .../nft/cc_comands/cc_transfer.spec.ts | 18 +- .../modules/nft/commands/transfer.spec.ts | 54 ++- .../nft/commands/transfer_cross_chain.spec.ts | 97 +++-- .../unit/modules/nft/internal_method.spec.ts | 27 +- .../test/unit/modules/nft/method.spec.ts | 95 +++-- 13 files changed, 504 insertions(+), 451 deletions(-) create mode 100644 framework/src/modules/nft/error.ts diff --git a/framework/src/modules/nft/commands/transfer.ts b/framework/src/modules/nft/commands/transfer.ts index 8588035a0dd..bd5ae39d2c8 100644 --- a/framework/src/modules/nft/commands/transfer.ts +++ b/framework/src/modules/nft/commands/transfer.ts @@ -20,10 +20,9 @@ import { } from '../../../state_machine'; import { BaseCommand } from '../../base_command'; import { transferParamsSchema } from '../schemas'; -import { NFTMethod } from '../method'; import { InternalMethod } from '../internal_method'; -export interface Params { +export interface TransferParams { nftID: Buffer; recipientAddress: Buffer; data: string; @@ -31,35 +30,26 @@ export interface Params { export class TransferCommand extends BaseCommand { public schema = transferParamsSchema; - private _method!: NFTMethod; private _internalMethod!: InternalMethod; - public init(args: { method: NFTMethod; internalMethod: InternalMethod }) { - this._method = args.method; + public init(args: { internalMethod: InternalMethod }) { this._internalMethod = args.internalMethod; } - public async verify(context: CommandVerifyContext): Promise { + public async verify(context: CommandVerifyContext): Promise { const { params } = context; - const methodContext = context.getMethodContext(); - let nft; try { - nft = await this._method.getNFT(methodContext, params.nftID); + await this._internalMethod.verifyTransfer( + context.getMethodContext(), + context.transaction.senderAddress, + params.nftID, + ); } catch (error) { - throw new Error('NFT does not exist'); - } - - if (this._method.isNFTEscrowed(nft)) { - throw new Error('NFT is escrowed to another chain'); - } - - if (!nft.owner.equals(context.transaction.senderAddress)) { - throw new Error('Transfer not initiated by the NFT owner'); - } - - if (this._method.isNFTLocked(nft)) { - throw new Error('Locked NFTs cannot be transferred'); + return { + status: VerifyStatus.FAIL, + error: error as Error, + }; } return { @@ -67,10 +57,10 @@ export class TransferCommand extends BaseCommand { }; } - public async execute(context: CommandExecuteContext): Promise { + public async execute(context: CommandExecuteContext): Promise { const { params } = context; - await this._internalMethod.transferInternal( + await this._internalMethod.transfer( context.getMethodContext(), params.recipientAddress, params.nftID, diff --git a/framework/src/modules/nft/commands/transfer_cross_chain.ts b/framework/src/modules/nft/commands/transfer_cross_chain.ts index 9fef1ab8f19..062919f3830 100644 --- a/framework/src/modules/nft/commands/transfer_cross_chain.ts +++ b/framework/src/modules/nft/commands/transfer_cross_chain.ts @@ -24,7 +24,7 @@ import { } from '../../../state_machine'; import { InternalMethod } from '../internal_method'; -export interface Params { +export interface TransferCrossChainParams { nftID: Buffer; receivingChainID: Buffer; recipientAddress: Buffer; @@ -36,9 +36,6 @@ export interface Params { export class TransferCrossChainCommand extends BaseCommand { public schema = crossChainTransferParamsSchema; - private _nftMethod!: NFTMethod; - private _tokenMethod!: TokenMethod; - private _interoperabilityMethod!: InteroperabilityMethod; private _internalMethod!: InternalMethod; public init(args: { @@ -47,58 +44,30 @@ export class TransferCrossChainCommand extends BaseCommand { interoperabilityMethod: InteroperabilityMethod; internalMethod: InternalMethod; }): void { - this._nftMethod = args.nftMethod; - this._tokenMethod = args.tokenMethod; - this._interoperabilityMethod = args.interoperabilityMethod; this._internalMethod = args.internalMethod; } - public async verify(context: CommandVerifyContext): Promise { + public async verify( + context: CommandVerifyContext, + ): Promise { const { params } = context; - const methodContext = context.getMethodContext(); + const { senderAddress } = context.transaction; - if (params.receivingChainID.equals(context.chainID)) { - throw new Error('Receiving chain cannot be the sending chain'); - } - - let nft; try { - nft = await this._nftMethod.getNFT(methodContext, params.nftID); + await this._internalMethod.verifyTransferCrossChain( + context.getMethodContext(), + senderAddress, + params.nftID, + context.chainID, + params.receivingChainID, + params.messageFee, + params.data, + ); } catch (error) { - throw new Error('NFT does not exist'); - } - - if (this._nftMethod.isNFTEscrowed(nft)) { - throw new Error('NFT is escrowed to another chain'); - } - - const nftChainID = this._nftMethod.getChainID(params.nftID); - - if (!nftChainID.equals(context.chainID) && !nftChainID.equals(params.receivingChainID)) { - throw new Error('NFT must be native to either the sending or the receiving chain'); - } - - const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( - methodContext, - params.receivingChainID, - ); - - if (!nft.owner.equals(context.transaction.senderAddress)) { - throw new Error('Transfer not initiated by the NFT owner'); - } - - if (this._nftMethod.isNFTLocked(nft)) { - throw new Error('Locked NFTs cannot be transferred'); - } - - const availableBalance = await this._tokenMethod.getAvailableBalance( - methodContext, - context.transaction.senderAddress, - messageFeeTokenID, - ); - - if (availableBalance < params.messageFee) { - throw new Error('Insufficient balance for the message fee'); + return { + status: VerifyStatus.FAIL, + error: error as Error, + }; } return { @@ -106,10 +75,10 @@ export class TransferCrossChainCommand extends BaseCommand { }; } - public async execute(context: CommandExecuteContext): Promise { + public async execute(context: CommandExecuteContext): Promise { const { params } = context; - await this._internalMethod.transferCrossChainInternal( + await this._internalMethod.transferCrossChain( context.getMethodContext(), context.transaction.senderAddress, params.recipientAddress, diff --git a/framework/src/modules/nft/error.ts b/framework/src/modules/nft/error.ts new file mode 100644 index 00000000000..4a99e52adb5 --- /dev/null +++ b/framework/src/modules/nft/error.ts @@ -0,0 +1,15 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +export class NotFoundError extends Error {} diff --git a/framework/src/modules/nft/events/unlock.ts b/framework/src/modules/nft/events/unlock.ts index 63629e7b5e9..05e3647b331 100644 --- a/framework/src/modules/nft/events/unlock.ts +++ b/framework/src/modules/nft/events/unlock.ts @@ -17,6 +17,7 @@ import { LENGTH_NFT_ID, MAX_LENGTH_MODULE_NAME, MIN_LENGTH_MODULE_NAME, + NftErrorEventResult, NftEventResult, } from '../constants'; @@ -58,4 +59,8 @@ export class UnlockEvent extends BaseEvent nftChainID.equals(allowedChainID))) { + throw new TransferVerifyError( + 'NFT must be native to either the sending or the receiving chain', + NftEventResult.RESULT_NFT_NOT_NATIVE, + ); + } + + if (receivingChainID.equals(sendingChainID)) { + throw new TransferVerifyError( + 'Receiving chain cannot be the sending chain', + NftEventResult.INVALID_RECEIVING_CHAIN, + ); + } + + if (data.length > MAX_LENGTH_DATA) { + throw new TransferVerifyError('Data field is too long', NftEventResult.RESULT_DATA_TOO_LONG); + } + + if (this._nftMethod.isNFTEscrowed(nft)) { + throw new TransferVerifyError( + 'NFT is escrowed to another chain', + NftEventResult.RESULT_NFT_ESCROWED, + ); + } + + if (!nft.owner.equals(senderAddress)) { + throw new TransferVerifyError( + 'Transfer not initiated by the NFT owner', + NftEventResult.RESULT_INITIATED_BY_NONOWNER, + ); + } + + if (this._nftMethod.isNFTLocked(nft)) { + throw new TransferVerifyError( + 'Locked NFTs cannot be transferred', + NftEventResult.RESULT_NFT_LOCKED, + ); + } + + const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( + immutableMethodContext, + receivingChainID, + ); + + const availableBalance = await this._tokenMethod.getAvailableBalance( + immutableMethodContext, + senderAddress, + messageFeeTokenID, + ); + if (availableBalance < messageFee) { + throw new TransferVerifyError( + 'Insufficient balance for the message fee', + NftEventResult.RESULT_INSUFFICIENT_BALANCE, + ); + } + } + + public async transfer( methodContext: MethodContext, recipientAddress: Buffer, nftID: Buffer, @@ -90,12 +235,10 @@ export class InternalMethod extends BaseMethod { const nftStore = this.stores.get(NFTStore); const userStore = this.stores.get(UserStore); - const data = await nftStore.get(methodContext, nftID); - const senderAddress = data.owner; - - data.owner = recipientAddress; - - await nftStore.set(methodContext, nftID, data); + const nft = await nftStore.get(methodContext, nftID); + const senderAddress = nft.owner; + nft.owner = recipientAddress; + await nftStore.set(methodContext, nftID, nft); await userStore.del(methodContext, userStore.getKey(senderAddress, nftID)); await this.createUserEntry(methodContext, recipientAddress, nftID); @@ -107,7 +250,7 @@ export class InternalMethod extends BaseMethod { }); } - public async transferCrossChainInternal( + public async transferCrossChain( methodContext: MethodContext, senderAddress: Buffer, recipientAddress: Buffer, @@ -118,7 +261,7 @@ export class InternalMethod extends BaseMethod { includeAttributes: boolean, timestamp?: number, ): Promise { - const chainID = this._method.getChainID(nftID); + const chainID = this._nftMethod.getChainID(nftID); const nftStore = this.stores.get(NFTStore); const nft = await nftStore.get(methodContext, nftID); @@ -142,7 +285,7 @@ export class InternalMethod extends BaseMethod { } if (chainID.equals(receivingChainID)) { - await this._method.destroy(methodContext, senderAddress, nftID); + await this._nftMethod.destroy(methodContext, senderAddress, nftID); } let attributesArray: { module: string; attributes: Buffer }[] = []; diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 1ce1885561d..8417751276e 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -15,7 +15,7 @@ import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; import { BaseMethod } from '../base_method'; -import { FeeMethod, InteroperabilityMethod, ModuleConfig, NFT, TokenMethod } from './types'; +import { FeeMethod, ModuleConfig, NFT } from './types'; import { NFTAttributes, NFTStore, NFTStoreData, nftStoreSchema } from './stores/nft'; import { ImmutableMethodContext, MethodContext } from '../../state_machine'; import { @@ -26,7 +26,6 @@ import { LENGTH_COLLECTION_ID, LENGTH_INDEX, LENGTH_NFT_ID, - MAX_LENGTH_DATA, NFT_NOT_LOCKED, NftEventResult, } from './constants'; @@ -36,7 +35,7 @@ import { SupportedNFTsStore } from './stores/supported_nfts'; import { CreateEvent } from './events/create'; import { LockEvent } from './events/lock'; import { TransferEvent } from './events/transfer'; -import { InternalMethod } from './internal_method'; +import { InternalMethod, TransferVerifyError } from './internal_method'; import { TransferCrossChainEvent } from './events/transfer_cross_chain'; import { AllNFTsSupportedEvent } from './events/all_nfts_supported'; import { AllNFTsSupportRemovedEvent } from './events/all_nfts_support_removed'; @@ -47,28 +46,21 @@ import { AllNFTsFromChainSupportRemovedEvent } from './events/all_nfts_from_chai import { RecoverEvent } from './events/recover'; import { EscrowStore } from './stores/escrow'; import { SetAttributesEvent } from './events/set_attributes'; +import { NotFoundError } from './error'; +import { UnlockEvent } from './events/unlock'; export class NFTMethod extends BaseMethod { private _config!: ModuleConfig; - private _interoperabilityMethod!: InteroperabilityMethod; private _internalMethod!: InternalMethod; private _feeMethod!: FeeMethod; - private _tokenMethod!: TokenMethod; public init(config: ModuleConfig): void { this._config = config; } - public addDependencies( - interoperabilityMethod: InteroperabilityMethod, - internalMethod: InternalMethod, - feeMethod: FeeMethod, - tokenMethod: TokenMethod, - ) { - this._interoperabilityMethod = interoperabilityMethod; + public addDependencies(internalMethod: InternalMethod, feeMethod: FeeMethod) { this._internalMethod = internalMethod; this._feeMethod = feeMethod; - this._tokenMethod = tokenMethod; } public getChainID(nftID: Buffer): Buffer { @@ -96,7 +88,7 @@ export class NFTMethod extends BaseMethod { const nftExists = await nftStore.has(methodContext, nftID); if (!nftExists) { - throw new Error('NFT substore entry does not exist'); + throw new NotFoundError('NFT substore entry does not exist'); } const data = await nftStore.get(methodContext, nftID); @@ -106,7 +98,7 @@ export class NFTMethod extends BaseMethod { const userStore = this.stores.get(UserStore); const userExists = await userStore.has(methodContext, userStore.getKey(owner, nftID)); if (!userExists) { - throw new Error('User substore entry does not exist'); + throw new NotFoundError('User substore entry does not exist'); } const userData = await userStore.get(methodContext, userStore.getKey(owner, nftID)); return { ...data, lockingModule: userData.lockingModule }; @@ -124,42 +116,45 @@ export class NFTMethod extends BaseMethod { try { nft = await this.getNFT(methodContext, nftID); } catch (error) { - this.events.get(DestroyEvent).error( - methodContext, - { - address, - nftID, - }, - NftEventResult.RESULT_NFT_DOES_NOT_EXIST, - ); + if (error instanceof NotFoundError) { + this.events.get(DestroyEvent).error( + methodContext, + { + address, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); - throw new Error('NFT does not exist'); + throw new Error('NFT does not exist'); + } + throw error; } - if (!nft.owner.equals(address)) { + if (this.isNFTEscrowed(nft)) { this.events.get(DestroyEvent).error( methodContext, { address, nftID, }, - NftEventResult.RESULT_INITIATED_BY_NONOWNER, + NftEventResult.RESULT_NFT_ESCROWED, ); - throw new Error('Not initiated by the NFT owner'); + throw new Error('NFT is escrowed to another chain'); } - if (this.isNFTEscrowed(nft)) { + if (!nft.owner.equals(address)) { this.events.get(DestroyEvent).error( methodContext, { address, nftID, }, - NftEventResult.RESULT_NFT_ESCROWED, + NftEventResult.RESULT_INITIATED_BY_NONOWNER, ); - throw new Error('NFT is escrowed to another chain'); + throw new Error('Not initiated by the NFT owner'); } if (this.isNFTLocked(nft)) { @@ -301,16 +296,19 @@ export class NFTMethod extends BaseMethod { try { nft = await this.getNFT(methodContext, nftID); } catch (error) { - this.events.get(LockEvent).error( - methodContext, - { - module, - nftID, - }, - NftEventResult.RESULT_NFT_DOES_NOT_EXIST, - ); + if (error instanceof NotFoundError) { + this.events.get(LockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); - throw new Error('NFT does not exist'); + throw new Error('NFT does not exist'); + } + throw error; } if (this.isNFTEscrowed(nft)) { @@ -355,16 +353,19 @@ export class NFTMethod extends BaseMethod { try { nft = await this.getNFT(methodContext, nftID); } catch (error) { - this.events.get(LockEvent).error( - methodContext, - { - module, - nftID, - }, - NftEventResult.RESULT_NFT_DOES_NOT_EXIST, - ); + if (error instanceof NotFoundError) { + this.events.get(UnlockEvent).error( + methodContext, + { + module, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); - throw new Error('NFT does not exist'); + throw new Error('NFT does not exist'); + } + throw error; } if (this.isNFTEscrowed(nft)) { @@ -372,7 +373,7 @@ export class NFTMethod extends BaseMethod { } if (!this.isNFTLocked(nft)) { - this.events.get(LockEvent).error( + this.events.get(UnlockEvent).error( methodContext, { module, @@ -385,7 +386,7 @@ export class NFTMethod extends BaseMethod { } if (nft.lockingModule !== module) { - this.events.get(LockEvent).error( + this.events.get(UnlockEvent).error( methodContext, { module, @@ -402,7 +403,7 @@ export class NFTMethod extends BaseMethod { lockingModule: NFT_NOT_LOCKED, }); - this.events.get(LockEvent).log(methodContext, { + this.events.get(UnlockEvent).log(methodContext, { module, nftID, }); @@ -414,63 +415,25 @@ export class NFTMethod extends BaseMethod { recipientAddress: Buffer, nftID: Buffer, ): Promise { - let nft; try { - nft = await this.getNFT(methodContext, nftID); + await this._internalMethod.verifyTransfer(methodContext, senderAddress, nftID); } catch (error) { - this.events.get(TransferEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - nftID, - }, - NftEventResult.RESULT_NFT_DOES_NOT_EXIST, - ); - - throw new Error('NFT does not exist'); - } - - if (this.isNFTEscrowed(nft)) { - this.events.get(TransferEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - nftID, - }, - NftEventResult.RESULT_NFT_ESCROWED, - ); - throw new Error('NFT is escrowed to another chain'); - } + if (error instanceof TransferVerifyError) { + this.events.get(TransferEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + nftID, + }, + error.code, + ); + } - if (!nft.owner.equals(senderAddress)) { - this.events.get(TransferEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - nftID, - }, - NftEventResult.RESULT_INITIATED_BY_NONOWNER, - ); - throw new Error('Transfer not initiated by the NFT owner'); + throw error; } - if (this.isNFTLocked(nft)) { - this.events.get(TransferEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - nftID, - }, - NftEventResult.RESULT_NFT_LOCKED, - ); - throw new Error('Locked NFTs cannot be transferred'); - } - - await this._internalMethod.transferInternal(methodContext, recipientAddress, nftID); + await this._internalMethod.transfer(methodContext, recipientAddress, nftID); } public async transferCrossChain( @@ -483,142 +446,37 @@ export class NFTMethod extends BaseMethod { data: string, includeAttributes: boolean, ): Promise { - const ownChainID = this._internalMethod.getOwnChainID(); - if (receivingChainID.equals(ownChainID)) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.INVALID_RECEIVING_CHAIN, - ); - throw new Error('Receiving chain cannot be the sending chain'); - } - - if (data.length > MAX_LENGTH_DATA) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_DATA_TOO_LONG, - ); - throw new Error('Data field is too long'); - } - - let nft; try { - nft = await this.getNFT(methodContext, nftID); - } catch (error) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_NFT_DOES_NOT_EXIST, - ); + await this._internalMethod.verifyTransfer(methodContext, senderAddress, nftID); - throw new Error('NFT does not exist'); - } - - if (this.isNFTEscrowed(nft)) { - this.events.get(TransferCrossChainEvent).error( + await this._internalMethod.verifyTransferCrossChain( methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_NFT_ESCROWED, + senderAddress, + nftID, + this._internalMethod.getOwnChainID(), + receivingChainID, + messageFee, + data, ); - throw new Error('NFT is escrowed to another chain'); - } - - const nftChainID = this.getChainID(nftID); - if (![ownChainID, receivingChainID].some(allowedChainID => nftChainID.equals(allowedChainID))) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_NFT_NOT_NATIVE, - ); - throw new Error('NFT must be native either to the sending chain or the receiving chain'); - } - - if (!nft.owner.equals(senderAddress)) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_INITIATED_BY_NONOWNER, - ); - throw new Error('Transfer not initiated by the NFT owner'); - } - - if (this.isNFTLocked(nft)) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_NFT_LOCKED, - ); - throw new Error('Locked NFTs cannot be transferred'); - } + } catch (error) { + if (error instanceof TransferVerifyError) { + this.events.get(TransferCrossChainEvent).error( + methodContext, + { + senderAddress, + recipientAddress, + receivingChainID, + nftID, + includeAttributes, + }, + error.code, + ); + } - const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( - methodContext, - receivingChainID, - ); - const availableBalance = await this._tokenMethod.getAvailableBalance( - methodContext, - senderAddress, - messageFeeTokenID, - ); - if (availableBalance < messageFee) { - this.events.get(TransferCrossChainEvent).error( - methodContext, - { - senderAddress, - recipientAddress, - receivingChainID, - nftID, - includeAttributes, - }, - NftEventResult.RESULT_INSUFFICIENT_BALANCE, - ); - throw new Error('Insufficient balance for the message fee'); + throw error; } - await this._internalMethod.transferCrossChainInternal( + await this._internalMethod.transferCrossChain( methodContext, senderAddress, recipientAddress, @@ -886,8 +744,8 @@ export class NFTMethod extends BaseMethod { throw new Error('Recovery called by a foreign chain'); } - const nftData = await nftStore.get(methodContext, nftID); - if (!nftData.owner.equals(terminatedChainID)) { + const nft = await nftStore.get(methodContext, nftID); + if (!nft.owner.equals(terminatedChainID)) { this.events.get(RecoverEvent).error( methodContext, { @@ -914,17 +772,17 @@ export class NFTMethod extends BaseMethod { } const escrowStore = this.stores.get(EscrowStore); - nftData.owner = storeValueOwner; - const storedAttributes = nftData.attributesArray; + nft.owner = storeValueOwner; + const storedAttributes = nft.attributesArray; // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const receivedAttributes = decodedValue!.attributesArray; - nftData.attributesArray = this._internalMethod.getNewAttributes( + nft.attributesArray = this._internalMethod.getNewAttributes( nftID, storedAttributes, receivedAttributes, ); - await nftStore.save(methodContext, nftID, nftData); - await this._internalMethod.createUserEntry(methodContext, nftData.owner, nftID); + await nftStore.save(methodContext, nftID, nft); + await this._internalMethod.createUserEntry(methodContext, nft.owner, nftID); await escrowStore.del(methodContext, escrowStore.getKey(terminatedChainID, nftID)); this.events.get(RecoverEvent).log(methodContext, { @@ -953,14 +811,14 @@ export class NFTMethod extends BaseMethod { throw new Error('NFT substore entry does not exist'); } - const nftData = await nftStore.get(methodContext, nftID); - const index = nftData.attributesArray.findIndex(attr => attr.module === module); + const nft = await nftStore.get(methodContext, nftID); + const index = nft.attributesArray.findIndex(attr => attr.module === module); if (index > -1) { - nftData.attributesArray[index] = { module, attributes }; + nft.attributesArray[index] = { module, attributes }; } else { - nftData.attributesArray.push({ module, attributes }); + nft.attributesArray.push({ module, attributes }); } - await nftStore.save(methodContext, nftID, nftData); + await nftStore.save(methodContext, nftID, nft); this.events.get(SetAttributesEvent).log(methodContext, { nftID, diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 92c876a4dfb..9fc86536261 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -134,13 +134,8 @@ export class NFTModule extends BaseInteroperableModule { this._interoperabilityMethod = interoperabilityMethod; this._feeMethod = feeMethod; this._tokenMethod = tokenMethod; - this.method.addDependencies( - interoperabilityMethod, - this._internalMethod, - feeMethod, - tokenMethod, - ); - this._internalMethod.addDependencies(this.method, this._interoperabilityMethod); + this.method.addDependencies(this._internalMethod, feeMethod); + this._internalMethod.addDependencies(this.method, this._interoperabilityMethod, tokenMethod); this.crossChainMethod.addDependencies(interoperabilityMethod); this.endpoint.addDependencies(this.method); } @@ -206,7 +201,7 @@ export class NFTModule extends BaseInteroperableModule { nftMethod: this.method, tokenMethod: this._tokenMethod, }); - this._transferCommand.init({ method: this.method, internalMethod: this._internalMethod }); + this._transferCommand.init({ internalMethod: this._internalMethod }); } public async initGenesisState(context: GenesisBlockExecuteContext): Promise { diff --git a/framework/src/modules/nft/types.ts b/framework/src/modules/nft/types.ts index 9a2e8bfc37e..080089ccc0a 100644 --- a/framework/src/modules/nft/types.ts +++ b/framework/src/modules/nft/types.ts @@ -51,6 +51,9 @@ export interface TokenMethod { export interface NFTMethod { getChainID(nftID: Buffer): Buffer; destroy(methodContext: MethodContext, address: Buffer, nftID: Buffer): Promise; + getNFT(methodContext: ImmutableMethodContext, nftID: Buffer): Promise; + isNFTEscrowed(nft: NFT): boolean; + isNFTLocked(nft: NFT): boolean; } export interface NFTAttributes { diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 78d5ab0037d..9767f2b1897 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -132,9 +132,9 @@ describe('CrossChain Transfer Command', () => { beforeEach(async () => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - method.addDependencies(interopMethod, internalMethod, feeMethod, tokenMethod); + method.addDependencies(internalMethod, feeMethod); method.init(config); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(config); command = new CrossChainTransferCommand(module.stores, module.events); command.init({ method, internalMethod, feeMethod }); @@ -218,7 +218,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); params = codec.encode(crossChainNFTTransferMessageParamsSchema, { nftID: Buffer.alloc(LENGTH_NFT_ID, 1), @@ -290,7 +290,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); context = { ccm: newCcm, @@ -328,7 +328,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); context = { ccm: newCcm, @@ -356,7 +356,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); context = { ccm, @@ -381,7 +381,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); context = { ccm, @@ -609,7 +609,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); context = { ccm, @@ -655,7 +655,7 @@ describe('CrossChain Transfer Command', () => { userAccountInitializationFee: BigInt(50000000), }; method.init(newConfig); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(newConfig); ccm = { crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, diff --git a/framework/test/unit/modules/nft/commands/transfer.spec.ts b/framework/test/unit/modules/nft/commands/transfer.spec.ts index 010b45c5b14..12ea04e93e8 100644 --- a/framework/test/unit/modules/nft/commands/transfer.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer.spec.ts @@ -16,7 +16,7 @@ import { Transaction } from '@liskhq/lisk-chain'; import { codec } from '@liskhq/lisk-codec'; import { utils, address } from '@liskhq/lisk-cryptography'; import { NFTModule } from '../../../../../src/modules/nft/module'; -import { TransferCommand, Params } from '../../../../../src/modules/nft/commands/transfer'; +import { TransferCommand, TransferParams } from '../../../../../src/modules/nft/commands/transfer'; import { createTransactionContext } from '../../../../../src/testing'; import { transferParamsSchema } from '../../../../../src/modules/nft/schemas'; import { @@ -27,20 +27,24 @@ import { } from '../../../../../src/modules/nft/constants'; import { NFTAttributes, NFTStore } from '../../../../../src/modules/nft/stores/nft'; import { createStoreGetter } from '../../../../../src/testing/utils'; -import { VerifyStatus } from '../../../../../src'; +import { NFTMethod, VerifyStatus } from '../../../../../src'; import { InternalMethod } from '../../../../../src/modules/nft/internal_method'; -import { NFTMethod } from '../../../../../src/modules/nft/method'; import { UserStore } from '../../../../../src/modules/nft/stores/user'; import { EventQueue } from '../../../../../src/state_machine'; import { TransferEvent } from '../../../../../src/modules/nft/events/transfer'; +import { InteroperabilityMethod, TokenMethod } from '../../../../../src/modules/nft/types'; describe('Transfer command', () => { const module = new NFTModule(); - const method = new NFTMethod(module.stores, module.events); + const nftMethod = new NFTMethod(module.stores, module.events); + let interoperabilityMethod!: InteroperabilityMethod; + let tokenMethod!: TokenMethod; const internalMethod = new InternalMethod(module.stores, module.events); + internalMethod.addDependencies(nftMethod, interoperabilityMethod, tokenMethod); + let command: TransferCommand; - const validParams: Params = { + const validParams: TransferParams = { nftID: Buffer.alloc(LENGTH_NFT_ID, 1), recipientAddress: utils.getRandomBytes(20), data: '', @@ -75,7 +79,7 @@ describe('Transfer command', () => { command: 'transfer', fee: BigInt(5000000), nonce: BigInt(0), - senderPublicKey: utils.getRandomBytes(32), + senderPublicKey, params: codec.encode(transferParamsSchema, { ...validParams, ...params, @@ -95,7 +99,7 @@ describe('Transfer command', () => { beforeEach(() => { command = new TransferCommand(module.stores, module.events); - command.init({ method, internalMethod }); + command.init({ internalMethod }); }); describe('verify', () => { @@ -104,9 +108,11 @@ describe('Transfer command', () => { nftID: Buffer.alloc(LENGTH_NFT_ID, 0), }); - await expect( - command.verify(nftIDNotExistingContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow('NFT does not exist'); + const nftIDNotExistingVerification = await command.verify( + nftIDNotExistingContext.createCommandVerifyContext(transferParamsSchema), + ); + expect(nftIDNotExistingVerification.status).toBe(VerifyStatus.FAIL); + expect(nftIDNotExistingVerification.error?.message).toBe('NFT does not exist'); }); it('should fail if NFT is escrowed to another chain', async () => { @@ -114,14 +120,16 @@ describe('Transfer command', () => { nftID, }); - await nftStore.set(createStoreGetter(nftEscrowedContext.stateStore), nftID, { + await nftStore.save(createStoreGetter(nftEscrowedContext.stateStore), nftID, { owner: chainID, attributesArray: [], }); - await expect( - command.verify(nftEscrowedContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow('NFT is escrowed to another chain'); + const nftEscrowedVerification = await command.verify( + nftEscrowedContext.createCommandVerifyContext(transferParamsSchema), + ); + expect(nftEscrowedVerification.status).toBe(VerifyStatus.FAIL); + expect(nftEscrowedVerification.error?.message).toBe('NFT is escrowed to another chain'); }); it('should fail if owner of the NFT is not the sender', async () => { @@ -142,9 +150,13 @@ describe('Transfer command', () => { }, ); - await expect( - command.verify(nftIncorrectOwnerContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow('Transfer not initiated by the NFT owner'); + const nftIncorrectOwnerVerification = await command.verify( + nftIncorrectOwnerContext.createCommandVerifyContext(transferParamsSchema), + ); + expect(nftIncorrectOwnerVerification.status).toBe(VerifyStatus.FAIL); + expect(nftIncorrectOwnerVerification.error?.message).toBe( + 'Transfer not initiated by the NFT owner', + ); }); it('should fail if NFT exists and is locked by its owner', async () => { @@ -166,9 +178,11 @@ describe('Transfer command', () => { }, ); - await expect( - command.verify(lockedNFTContext.createCommandVerifyContext(transferParamsSchema)), - ).rejects.toThrow('Locked NFTs cannot be transferred'); + const lockedNFTVerification = await command.verify( + lockedNFTContext.createCommandVerifyContext(transferParamsSchema), + ); + expect(lockedNFTVerification.status).toBe(VerifyStatus.FAIL); + expect(lockedNFTVerification.error?.message).toBe('Locked NFTs cannot be transferred'); }); it('should verify if unlocked NFT exists and its owner is performing the transfer', async () => { diff --git a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts index a863839d5a5..1256d05a18e 100644 --- a/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts +++ b/framework/test/unit/modules/nft/commands/transfer_cross_chain.spec.ts @@ -18,7 +18,7 @@ import { address, utils } from '@liskhq/lisk-cryptography'; import { NFTModule } from '../../../../../src/modules/nft/module'; import { TransferCrossChainCommand, - Params, + TransferCrossChainParams, } from '../../../../../src/modules/nft/commands/transfer_cross_chain'; import { crossChainTransferParamsSchema } from '../../../../../src/modules/nft/schemas'; import { @@ -76,7 +76,7 @@ describe('TransferCrossChainComand', () => { let lockedExistingNFT: { nftID: any; owner: any }; let escrowedNFT: { nftID: any; owner: any }; - const validParams: Params = { + const validParams: TransferCrossChainParams = { nftID: Buffer.alloc(LENGTH_NFT_ID), receivingChainID, recipientAddress: utils.getRandomBytes(LENGTH_ADDRESS), @@ -146,7 +146,7 @@ describe('TransferCrossChainComand', () => { ownChainID, }); - internalMethod.addDependencies(nftMethod, interoperabilityMethod); + internalMethod.addDependencies(nftMethod, interoperabilityMethod, tokenMethod); command.init({ nftMethod, tokenMethod, interoperabilityMethod, internalMethod }); @@ -206,41 +206,46 @@ describe('TransferCrossChainComand', () => { it('should fail if receiving chain id is same as the own chain id', async () => { const receivingChainIDContext = createTransactionContextWithOverridingParams({ receivingChainID: ownChainID, + nftID: existingNFT.nftID, }); - - await expect( - command.verify( - receivingChainIDContext.createCommandVerifyContext(crossChainTransferParamsSchema), - ), - ).rejects.toThrow('Receiving chain cannot be the sending chain'); + const receivingChainIDVerification = await command.verify( + receivingChainIDContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(receivingChainIDVerification.status).toBe(VerifyStatus.FAIL); + expect(receivingChainIDVerification.error?.message).toBe( + 'Receiving chain cannot be the sending chain', + ); }); it('should fail if NFT does not exist', async () => { - const context = createTransactionContextWithOverridingParams({ + const nftIDNotExistingContext = createTransactionContextWithOverridingParams({ nftID: utils.getRandomBytes(LENGTH_NFT_ID), }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('NFT does not exist'); + const nftIDNotExistingVerification = await command.verify( + nftIDNotExistingContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(nftIDNotExistingVerification.status).toBe(VerifyStatus.FAIL); + expect(nftIDNotExistingVerification.error?.message).toBe('NFT does not exist'); }); it('should fail if NFT is escrowed', async () => { - const context = createTransactionContextWithOverridingParams({ + const nftEscrowedContext = createTransactionContextWithOverridingParams({ nftID: escrowedNFT.nftID, + receivingChainID: escrowedNFT.nftID.subarray(0, LENGTH_CHAIN_ID), }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('NFT is escrowed to another chain'); + const nftEscrowedVerification = await command.verify( + nftEscrowedContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(nftEscrowedVerification.status).toBe(VerifyStatus.FAIL); + expect(nftEscrowedVerification.error?.message).toBe('NFT is escrowed to another chain'); }); - it('should fail if NFT is not native to either the sending or receiving chain', async () => { + it('should fail if NFT is not native neither to the sending nor to the receiving chain', async () => { const nftID = utils.getRandomBytes(LENGTH_NFT_ID); - const context = createTransactionContextWithOverridingParams({ - nftID, - }); + const context = createTransactionContextWithOverridingParams({ nftID }); await nftStore.save(methodContext, nftID, { owner, @@ -251,13 +256,17 @@ describe('TransferCrossChainComand', () => { lockingModule: NFT_NOT_LOCKED, }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('NFT must be native to either the sending or the receiving chain'); + const receivingChainIDVerification = await command.verify( + context.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(receivingChainIDVerification.status).toBe(VerifyStatus.FAIL); + expect(receivingChainIDVerification.error?.message).toBe( + 'NFT must be native to either the sending or the receiving chain', + ); }); it('should fail if the owner of the NFT is not the sender', async () => { - const context = createTransactionContextWithOverridingParams({ + const ownerNotSenderContext = createTransactionContextWithOverridingParams({ nftID: existingNFT.nftID, }); @@ -269,34 +278,46 @@ describe('TransferCrossChainComand', () => { lockingModule: NFT_NOT_LOCKED, }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('Transfer not initiated by the NFT owner'); + const receivingChainIDVerification = await command.verify( + ownerNotSenderContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(receivingChainIDVerification.status).toBe(VerifyStatus.FAIL); + expect(receivingChainIDVerification.error?.message).toBe( + 'Transfer not initiated by the NFT owner', + ); }); it('should fail if NFT is locked', async () => { - const context = createTransactionContextWithOverridingParams({ + const nftLockedContext = createTransactionContextWithOverridingParams({ nftID: lockedExistingNFT.nftID, }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('Locked NFTs cannot be transferred'); + const receivingChainIDVerification = await command.verify( + nftLockedContext.createCommandVerifyContext(crossChainTransferParamsSchema), + ); + expect(receivingChainIDVerification.status).toBe(VerifyStatus.FAIL); + expect(receivingChainIDVerification.error?.message).toBe('Locked NFTs cannot be transferred'); }); it('should fail if senders has insufficient balance of value messageFee and token messageFeeTokenID', async () => { - const context = createTransactionContextWithOverridingParams({ + const insufficientMessageFeeBalanceContext = createTransactionContextWithOverridingParams({ messageFeeTokenID, messageFee: availableBalance + BigInt(1), nftID: existingNFT.nftID, }); - await expect( - command.verify(context.createCommandVerifyContext(crossChainTransferParamsSchema)), - ).rejects.toThrow('Insufficient balance for the message fee'); + const receivingChainIDVerification = await command.verify( + insufficientMessageFeeBalanceContext.createCommandVerifyContext( + crossChainTransferParamsSchema, + ), + ); + expect(receivingChainIDVerification.status).toBe(VerifyStatus.FAIL); + expect(receivingChainIDVerification.error?.message).toBe( + 'Insufficient balance for the message fee', + ); }); - it('should verify if NFT is native', async () => { + it('should pass verification when NFT is native', async () => { const context = createTransactionContextWithOverridingParams({ nftID: existingNFT.nftID, }); @@ -306,7 +327,7 @@ describe('TransferCrossChainComand', () => { ).resolves.toEqual({ status: VerifyStatus.OK }); }); - it('should verify if NFT is native to receiving chain', async () => { + it('should pass verification when NFT is native to receiving chain', async () => { const nftID = Buffer.concat([ receivingChainID, utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index b9a843e008e..983daf71eca 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -34,7 +34,7 @@ import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/ev import { UserStore } from '../../../../src/modules/nft/stores/user'; import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; import { NFTMethod } from '../../../../src/modules/nft/method'; -import { InteroperabilityMethod } from '../../../../src/modules/nft/types'; +import { InteroperabilityMethod, TokenMethod } from '../../../../src/modules/nft/types'; import { TransferCrossChainEvent, TransferCrossChainEventData, @@ -47,7 +47,8 @@ describe('InternalMethod', () => { const internalMethod = new InternalMethod(module.stores, module.events); const method = new NFTMethod(module.stores, module.events); let interoperabilityMethod!: InteroperabilityMethod; - internalMethod.addDependencies(method, interoperabilityMethod); + let tokenMethod!: TokenMethod; + internalMethod.addDependencies(method, interoperabilityMethod, tokenMethod); const ownChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); internalMethod.init({ ownChainID }); @@ -159,7 +160,7 @@ describe('InternalMethod', () => { }); }); - describe('transferInternal', () => { + describe('transfer', () => { it('should transfer NFT from sender to recipient and emit Transfer event', async () => { await module.stores.get(NFTStore).save(methodContext, nftID, { owner: senderAddress, @@ -170,7 +171,7 @@ describe('InternalMethod', () => { lockingModule: NFT_NOT_LOCKED, }); - await internalMethod.transferInternal(methodContext, recipientAddress, nftID); + await internalMethod.transfer(methodContext, recipientAddress, nftID); await expect(module.stores.get(NFTStore).get(methodContext, nftID)).resolves.toEqual({ owner: recipientAddress, @@ -195,13 +196,13 @@ describe('InternalMethod', () => { }); it('should fail if NFT does not exist', async () => { - await expect( - internalMethod.transferInternal(methodContext, recipientAddress, nftID), - ).rejects.toThrow('does not exist'); + await expect(internalMethod.transfer(methodContext, recipientAddress, nftID)).rejects.toThrow( + 'does not exist', + ); }); }); - describe('transferCrossChainInternal', () => { + describe('transferCrossChain', () => { let receivingChainID: Buffer; const messageFee = BigInt(1000); const data = ''; @@ -218,7 +219,7 @@ describe('InternalMethod', () => { .mockResolvedValue(Promise.resolve(utils.getRandomBytes(LENGTH_TOKEN_ID))), }; - internalMethod.addDependencies(method, interoperabilityMethod); + internalMethod.addDependencies(method, interoperabilityMethod, tokenMethod); }); describe('if attributes are not included ccm contains empty attributes', () => { @@ -246,7 +247,7 @@ describe('InternalMethod', () => { }); await expect( - internalMethod.transferCrossChainInternal( + internalMethod.transferCrossChain( methodContext, senderAddress, recipientAddress, @@ -324,7 +325,7 @@ describe('InternalMethod', () => { }); await expect( - internalMethod.transferCrossChainInternal( + internalMethod.transferCrossChain( methodContext, senderAddress, recipientAddress, @@ -403,7 +404,7 @@ describe('InternalMethod', () => { }); await expect( - internalMethod.transferCrossChainInternal( + internalMethod.transferCrossChain( methodContext, senderAddress, recipientAddress, @@ -488,7 +489,7 @@ describe('InternalMethod', () => { }); await expect( - internalMethod.transferCrossChainInternal( + internalMethod.transferCrossChain( methodContext, senderAddress, recipientAddress, diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 16cde615f6a..0a1d8f7671d 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -68,6 +68,7 @@ import { SetAttributesEventData, } from '../../../../src/modules/nft/events/set_attributes'; import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; +import { UnlockEvent, UnlockEventData } from '../../../../src/modules/nft/events/unlock'; describe('NFTMethod', () => { const module = new NFTModule(); @@ -86,8 +87,8 @@ describe('NFTMethod', () => { }; const config = { ownChainID: Buffer.alloc(LENGTH_CHAIN_ID, 1), - escrowAccountInitializationFee: BigInt(50000000), - userAccountInitializationFee: BigInt(50000000), + escrowAccountInitializationFee: BigInt(50_000_000), + userAccountInitializationFee: BigInt(50_000_000), }; let methodContext!: MethodContext; @@ -113,12 +114,13 @@ describe('NFTMethod', () => { expectedResult: EventDataType, result: any = 0, ) => { - expect(eventQueue.getEvents()).toHaveLength(length); - expect(eventQueue.getEvents()[index].toObject().name).toEqual(new EventClass('nft').name); + const events = eventQueue.getEvents(); + expect(events).toHaveLength(length); + expect(events[index].toObject().name).toEqual(new EventClass('nft').name); const eventData = codec.decode>( new EventClass('nft').schema, - eventQueue.getEvents()[index].toObject().data, + events[index].toObject().data, ); if (result !== null) { @@ -132,9 +134,9 @@ describe('NFTMethod', () => { let escrowedNFT: { nftID: any; owner: any }; beforeEach(async () => { - method.addDependencies(interopMethod, internalMethod, feeMethod, tokenMethod); + method.addDependencies(internalMethod, feeMethod); method.init(config); - internalMethod.addDependencies(method, interopMethod); + internalMethod.addDependencies(method, interopMethod, tokenMethod); internalMethod.init(config); methodContext = createMethodContext({ @@ -486,7 +488,7 @@ describe('NFTMethod', () => { const address = utils.getRandomBytes(LENGTH_ADDRESS); beforeEach(() => { - method.addDependencies(interopMethod, internalMethod, feeMethod, tokenMethod); + method.addDependencies(internalMethod, feeMethod); jest.spyOn(feeMethod, 'payFee'); }); @@ -523,6 +525,7 @@ describe('NFTMethod', () => { expect(nftStoreData.owner).toStrictEqual(address); expect(nftStoreData.attributesArray).toEqual(attributesArray2); expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); + checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, nftID: expectedKey, @@ -554,6 +557,7 @@ describe('NFTMethod', () => { expect(nftStoreData.owner).toStrictEqual(address); expect(nftStoreData.attributesArray).toEqual(attributesArray2); expect(userStoreData.lockingModule).toEqual(NFT_NOT_LOCKED); + checkEventResult(methodContext.eventQueue, 1, CreateEvent, 0, { address, nftID: expectedKey, @@ -655,10 +659,10 @@ describe('NFTMethod', () => { 'NFT does not exist', ); - checkEventResult( + checkEventResult( methodContext.eventQueue, 1, - LockEvent, + UnlockEvent, 0, { module: module.name, @@ -679,10 +683,10 @@ describe('NFTMethod', () => { 'NFT is not locked', ); - checkEventResult( + checkEventResult( methodContext.eventQueue, 1, - LockEvent, + UnlockEvent, 0, { module: module.name, @@ -692,15 +696,15 @@ describe('NFTMethod', () => { ); }); - it('should throw and log LockEvent if unlocking module is not the locking module', async () => { + it('should throw and log UnlockEvent if unlocking module is not the locking module', async () => { await expect( method.unlock(methodContext, module.name, lockedExistingNFT.nftID), ).rejects.toThrow('Unlocking NFT via module that did not lock it'); - checkEventResult( + checkEventResult( methodContext.eventQueue, 1, - LockEvent, + UnlockEvent, 0, { module: module.name, @@ -710,15 +714,15 @@ describe('NFTMethod', () => { ); }); - it('should unlock and log LockEvent', async () => { + it('should unlock and log UnlockEvent', async () => { await expect( method.unlock(methodContext, lockedExistingNFT.lockingModule, lockedExistingNFT.nftID), ).resolves.toBeUndefined(); - checkEventResult( + checkEventResult( methodContext.eventQueue, 1, - LockEvent, + UnlockEvent, 0, { module: lockedExistingNFT.lockingModule, @@ -762,6 +766,7 @@ describe('NFTMethod', () => { await expect( method.transfer(methodContext, senderAddress, recipientAddress, escrowedNFT.nftID), ).rejects.toThrow('NFT is escrowed to another chain'); + checkEventResult( methodContext.eventQueue, 1, @@ -780,6 +785,7 @@ describe('NFTMethod', () => { await expect( method.transfer(methodContext, senderAddress, recipientAddress, existingNFT.nftID), ).rejects.toThrow('Transfer not initiated by the NFT owner'); + checkEventResult( methodContext.eventQueue, 1, @@ -803,6 +809,7 @@ describe('NFTMethod', () => { lockedExistingNFT.nftID, ), ).rejects.toThrow('Locked NFTs cannot be transferred'); + checkEventResult( methodContext.eventQueue, 1, @@ -818,12 +825,12 @@ describe('NFTMethod', () => { }); it('should resolve if all params are valid', async () => { - jest.spyOn(internalMethod, 'transferInternal'); + jest.spyOn(internalMethod, 'transfer'); await expect( method.transfer(methodContext, existingNFT.owner, recipientAddress, existingNFT.nftID), ).resolves.toBeUndefined(); - expect(internalMethod['transferInternal']).toHaveBeenCalledWith( + expect(internalMethod['transfer']).toHaveBeenCalledWith( methodContext, recipientAddress, existingNFT.nftID, @@ -844,7 +851,9 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if receiving chain id is same as the own chain id', async () => { - receivingChainID = config.ownChainID; + config.ownChainID = receivingChainID; + method.init(config); + internalMethod.init(config); await expect( method.transferCrossChain( methodContext, @@ -857,6 +866,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).rejects.toThrow('Receiving chain cannot be the sending chain'); + checkEventResult( methodContext.eventQueue, 1, @@ -874,6 +884,9 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if nft does not exist', async () => { + config.ownChainID = Buffer.alloc(LENGTH_CHAIN_ID, 1); + method.init(config); + internalMethod.init(config); const nonExistingNFTID = utils.getRandomBytes(LENGTH_NFT_ID); receivingChainID = nonExistingNFTID.subarray(0, LENGTH_CHAIN_ID); await expect( @@ -918,6 +931,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).rejects.toThrow('NFT is escrowed to another chain'); + checkEventResult( methodContext.eventQueue, 1, @@ -935,28 +949,41 @@ describe('NFTMethod', () => { }); it('should throw and emit error transfer cross chain event if nft chain id is equal to neither own chain id or receiving chain id', async () => { + const randomAddress = utils.getRandomBytes(LENGTH_ADDRESS); + const randomNftID = utils.getRandomBytes(LENGTH_NFT_ID); + + await nftStore.save(methodContext, randomNftID, { + owner: randomAddress, + attributesArray: [], + }); + + await userStore.set(methodContext, userStore.getKey(randomAddress, randomNftID), { + lockingModule: NFT_NOT_LOCKED, + }); + await expect( method.transferCrossChain( methodContext, - lockedExistingNFT.owner, + randomAddress, recipientAddress, - lockedExistingNFT.nftID, + randomNftID, receivingChainID, messageFee, data, includeAttributes, ), - ).rejects.toThrow('NFT must be native either to the sending chain or the receiving chain'); + ).rejects.toThrow('NFT must be native to either the sending or the receiving chain'); + checkEventResult( methodContext.eventQueue, 1, TransferCrossChainEvent, 0, { - senderAddress: lockedExistingNFT.owner, + senderAddress: randomAddress, recipientAddress, receivingChainID, - nftID: lockedExistingNFT.nftID, + nftID: randomNftID, includeAttributes, }, NftEventResult.RESULT_NFT_NOT_NATIVE, @@ -976,6 +1003,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).rejects.toThrow('Transfer not initiated by the NFT owner'); + checkEventResult( methodContext.eventQueue, 1, @@ -1006,6 +1034,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).rejects.toThrow('Locked NFTs cannot be transferred'); + checkEventResult( methodContext.eventQueue, 1, @@ -1039,6 +1068,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).rejects.toThrow('Insufficient balance for the message fee'); + checkEventResult( methodContext.eventQueue, 1, @@ -1056,7 +1086,7 @@ describe('NFTMethod', () => { }); it('should resolve if all params are valid', async () => { - jest.spyOn(internalMethod, 'transferCrossChainInternal'); + jest.spyOn(internalMethod, 'transferCrossChain'); when(tokenMethod.getAvailableBalance) .calledWith(methodContext, existingNFT.owner, messageFeeTokenID) .mockResolvedValue(messageFee + BigInt(10)); @@ -1073,7 +1103,7 @@ describe('NFTMethod', () => { includeAttributes, ), ).resolves.toBeUndefined(); - expect(internalMethod['transferCrossChainInternal']).toHaveBeenCalledWith( + expect(internalMethod['transferCrossChain']).toHaveBeenCalledWith( methodContext, existingNFT.owner, recipientAddress, @@ -1565,6 +1595,7 @@ describe('NFTMethod', () => { await expect( method.recover(methodContext, terminatedChainID, Buffer.alloc(2, 2), storeKey, storeValue), ).rejects.toThrow('Invalid inputs'); + checkEventResult( methodContext.eventQueue, 1, @@ -1607,6 +1638,7 @@ describe('NFTMethod', () => { Buffer.from('asfas'), ), ).rejects.toThrow('Invalid inputs'); + checkEventResult( methodContext.eventQueue, 1, @@ -1649,6 +1681,7 @@ describe('NFTMethod', () => { await expect( method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, storeValue), ).rejects.toThrow('Recovery called by a foreign chain'); + checkEventResult( methodContext.eventQueue, 1, @@ -1672,6 +1705,7 @@ describe('NFTMethod', () => { await expect( method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), ).rejects.toThrow('NFT was not escrowed to terminated chain'); + checkEventResult( methodContext.eventQueue, 1, @@ -1695,6 +1729,7 @@ describe('NFTMethod', () => { await expect( method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), ).rejects.toThrow('Invalid account information'); + checkEventResult( methodContext.eventQueue, 1, @@ -1730,6 +1765,7 @@ describe('NFTMethod', () => { newStoreValue, ), ).resolves.toBeUndefined(); + checkEventResult( methodContext.eventQueue, 1, @@ -1765,6 +1801,7 @@ describe('NFTMethod', () => { await expect( method.setAttributes(methodContext, module.name, nftID, attributes), ).rejects.toThrow('NFT substore entry does not exist'); + checkEventResult( methodContext.eventQueue, 1, @@ -1784,6 +1821,7 @@ describe('NFTMethod', () => { await expect( method.setAttributes(methodContext, module.name, existingNFT.nftID, attributes), ).resolves.toBeUndefined(); + checkEventResult( methodContext.eventQueue, 1, @@ -1819,6 +1857,7 @@ describe('NFTMethod', () => { newAttributes, ), ).resolves.toBeUndefined(); + checkEventResult( methodContext.eventQueue, 1, From 2eabd20c5cac74ffef95be52ef2173ccfcfb35aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boban=20Milo=C5=A1evi=C4=87?= Date: Mon, 9 Oct 2023 09:55:33 +0200 Subject: [PATCH 145/170] NFT recover method now checks if NFT exists (#9028) * NFT recover method now checks if NFT exists * Rename storeKey and storeValue variables into nftID and nft --------- Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --- framework/src/modules/nft/method.ts | 41 +++++-- .../test/unit/modules/nft/method.spec.ts | 111 +++++++++++------- 2 files changed, 96 insertions(+), 56 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 8417751276e..6814453c4d4 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -700,15 +700,14 @@ export class NFTMethod extends BaseMethod { methodContext: MethodContext, terminatedChainID: Buffer, substorePrefix: Buffer, - storeKey: Buffer, - storeValue: Buffer, + nftID: Buffer, + nft: Buffer, ): Promise { const nftStore = this.stores.get(NFTStore); - const nftID = storeKey; let isValidInput = true; let decodedValue: NFTStoreData; try { - decodedValue = codec.decode(nftStoreSchema, storeValue); + decodedValue = codec.decode(nftStoreSchema, nft); validator.validate(nftStoreSchema, decodedValue); } catch (error) { isValidInput = false; @@ -716,7 +715,7 @@ export class NFTMethod extends BaseMethod { if ( !substorePrefix.equals(nftStore.subStorePrefix) || - storeKey.length !== LENGTH_NFT_ID || + nftID.length !== LENGTH_NFT_ID || !isValidInput ) { this.events.get(RecoverEvent).error( @@ -744,8 +743,26 @@ export class NFTMethod extends BaseMethod { throw new Error('Recovery called by a foreign chain'); } - const nft = await nftStore.get(methodContext, nftID); - if (!nft.owner.equals(terminatedChainID)) { + let nftData; + try { + nftData = await this.getNFT(methodContext, nftID); + } catch (error) { + if (error instanceof NotFoundError) { + this.events.get(RecoverEvent).error( + methodContext, + { + terminatedChainID, + nftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + + throw new Error('NFT substore entry does not exist'); + } + throw error; + } + + if (!nftData.owner.equals(terminatedChainID)) { this.events.get(RecoverEvent).error( methodContext, { @@ -772,17 +789,17 @@ export class NFTMethod extends BaseMethod { } const escrowStore = this.stores.get(EscrowStore); - nft.owner = storeValueOwner; - const storedAttributes = nft.attributesArray; + nftData.owner = storeValueOwner; + const storedAttributes = nftData.attributesArray; // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const receivedAttributes = decodedValue!.attributesArray; - nft.attributesArray = this._internalMethod.getNewAttributes( + nftData.attributesArray = this._internalMethod.getNewAttributes( nftID, storedAttributes, receivedAttributes, ); - await nftStore.save(methodContext, nftID, nft); - await this._internalMethod.createUserEntry(methodContext, nft.owner, nftID); + await nftStore.save(methodContext, nftID, nftData); + await this._internalMethod.createUserEntry(methodContext, nftData.owner, nftID); await escrowStore.del(methodContext, escrowStore.getKey(terminatedChainID, nftID)); this.events.get(RecoverEvent).log(methodContext, { diff --git a/framework/test/unit/modules/nft/method.spec.ts b/framework/test/unit/modules/nft/method.spec.ts index 0a1d8f7671d..7fd740f9e83 100644 --- a/framework/test/unit/modules/nft/method.spec.ts +++ b/framework/test/unit/modules/nft/method.spec.ts @@ -1583,17 +1583,17 @@ describe('NFTMethod', () => { }); describe('recover', () => { - const terminatedChainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + const terminatedChainID = Buffer.alloc(LENGTH_CHAIN_ID, 8); const substorePrefix = Buffer.from('0000', 'hex'); - const storeKey = utils.getRandomBytes(LENGTH_NFT_ID); - const storeValue = codec.encode(nftStoreSchema, { + const newNftID = Buffer.alloc(LENGTH_NFT_ID, 1); + const nft = codec.encode(nftStoreSchema, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: [], }); it('should throw and emit error recover event if substore prefix is not valid', async () => { await expect( - method.recover(methodContext, terminatedChainID, Buffer.alloc(2, 2), storeKey, storeValue), + method.recover(methodContext, terminatedChainID, Buffer.alloc(2, 2), nftID, nft), ).rejects.toThrow('Invalid inputs'); checkEventResult( @@ -1603,17 +1603,17 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: storeKey, + nftID, }, NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, ); }); - it('should throw and emit error recover event if store key length is not valid', async () => { - const newStoreKey = utils.getRandomBytes(LENGTH_NFT_ID + 1); + it('should throw and emit error recover event if NFT ID length is not valid', async () => { + const invalidNftID = utils.getRandomBytes(LENGTH_NFT_ID + 1); await expect( - method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + method.recover(methodContext, terminatedChainID, substorePrefix, invalidNftID, nft), ).rejects.toThrow('Invalid inputs'); checkEventResult( methodContext.eventQueue, @@ -1622,19 +1622,19 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: newStoreKey, + nftID: invalidNftID, }, NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, ); }); - it('should throw and emit error recover event if store value is not valid', async () => { + it('should throw and emit error recover event if NFT is not valid', async () => { await expect( method.recover( methodContext, terminatedChainID, substorePrefix, - storeKey, + nftID, Buffer.from('asfas'), ), ).rejects.toThrow('Invalid inputs'); @@ -1646,7 +1646,7 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: storeKey, + nftID, }, NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, ); @@ -1662,7 +1662,7 @@ describe('NFTMethod', () => { }); await expect( - method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, newStoreValue), + method.recover(methodContext, terminatedChainID, substorePrefix, nftID, newStoreValue), ).rejects.toThrow('Invalid inputs'); checkEventResult( methodContext.eventQueue, @@ -1671,15 +1671,25 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: storeKey, + nftID, }, NftEventResult.RESULT_RECOVER_FAIL_INVALID_INPUTS, ); }); it('should throw and emit error recover event if nft chain id is not same as own chain id', async () => { + // ensure that random NFT is on a different chain than ownChainID + const randomNftID = Buffer.concat([ + Buffer.alloc(LENGTH_CHAIN_ID, 9), + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + await nftStore.save(methodContext, randomNftID, { + owner: utils.getRandomBytes(LENGTH_ADDRESS), + attributesArray: [], + }); + await expect( - method.recover(methodContext, terminatedChainID, substorePrefix, storeKey, storeValue), + method.recover(methodContext, terminatedChainID, substorePrefix, randomNftID, nft), ).rejects.toThrow('Recovery called by a foreign chain'); checkEventResult( @@ -1689,21 +1699,42 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: storeKey, + nftID: randomNftID, }, NftEventResult.RESULT_INITIATED_BY_NONNATIVE_CHAIN, ); }); + it('should throw and emit error recover event if nft does not exist', async () => { + const unknownNftID = Buffer.concat([ + config.ownChainID, + utils.getRandomBytes(LENGTH_NFT_ID - LENGTH_CHAIN_ID), + ]); + + await expect( + method.recover(methodContext, terminatedChainID, substorePrefix, unknownNftID, nft), + ).rejects.toThrow('NFT substore entry does not exist'); + checkEventResult( + methodContext.eventQueue, + 1, + RecoverEvent, + 0, + { + terminatedChainID, + nftID: unknownNftID, + }, + NftEventResult.RESULT_NFT_DOES_NOT_EXIST, + ); + }); + it('should throw and emit error recover event if nft is not escrowed to terminated chain', async () => { - const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); - await nftStore.save(methodContext, newStoreKey, { + await nftStore.save(methodContext, newNftID, { owner: utils.getRandomBytes(LENGTH_CHAIN_ID), attributesArray: [], }); await expect( - method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + method.recover(methodContext, terminatedChainID, substorePrefix, newNftID, nft), ).rejects.toThrow('NFT was not escrowed to terminated chain'); checkEventResult( @@ -1713,21 +1744,20 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: newStoreKey, + nftID: newNftID, }, NftEventResult.RESULT_NFT_NOT_ESCROWED, ); }); - it('should throw and emit error recover event if store value owner length is invalid', async () => { - const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); - await nftStore.save(methodContext, newStoreKey, { + it('should throw and emit error recover event if NFT owner length is invalid', async () => { + await nftStore.save(methodContext, newNftID, { owner: terminatedChainID, attributesArray: [], }); await expect( - method.recover(methodContext, terminatedChainID, substorePrefix, newStoreKey, storeValue), + method.recover(methodContext, terminatedChainID, substorePrefix, newNftID, nft), ).rejects.toThrow('Invalid account information'); checkEventResult( @@ -1737,33 +1767,26 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: newStoreKey, + nftID: newNftID, }, NftEventResult.RESULT_INVALID_ACCOUNT, ); }); it('should set appropriate values to stores and resolve with emitting success recover event if params are valid', async () => { - const newStoreKey = Buffer.alloc(LENGTH_NFT_ID, 1); - const storeValueOwner = utils.getRandomBytes(LENGTH_ADDRESS); - const newStoreValue = codec.encode(nftStoreSchema, { - owner: storeValueOwner, + const nftOwner = utils.getRandomBytes(LENGTH_ADDRESS); + const newNft = codec.encode(nftStoreSchema, { + owner: nftOwner, attributesArray: [], }); - await nftStore.save(methodContext, newStoreKey, { + await nftStore.save(methodContext, newNftID, { owner: terminatedChainID, attributesArray: [], }); jest.spyOn(internalMethod, 'createUserEntry'); await expect( - method.recover( - methodContext, - terminatedChainID, - substorePrefix, - newStoreKey, - newStoreValue, - ), + method.recover(methodContext, terminatedChainID, substorePrefix, newNftID, newNft), ).resolves.toBeUndefined(); checkEventResult( @@ -1773,22 +1796,22 @@ describe('NFTMethod', () => { 0, { terminatedChainID, - nftID: newStoreKey, + nftID: newNftID, }, NftEventResult.RESULT_SUCCESSFUL, ); - const nftStoreData = await nftStore.get(methodContext, newStoreKey); + const retrievedNft = await nftStore.get(methodContext, newNftID); const escrowStore = module.stores.get(EscrowStore); const escrowAccountExists = await escrowStore.has( methodContext, - escrowStore.getKey(terminatedChainID, newStoreKey), + escrowStore.getKey(terminatedChainID, newNftID), ); - expect(nftStoreData.owner).toStrictEqual(storeValueOwner); - expect(nftStoreData.attributesArray).toEqual([]); + expect(retrievedNft.owner).toStrictEqual(nftOwner); + expect(retrievedNft.attributesArray).toEqual([]); expect(internalMethod['createUserEntry']).toHaveBeenCalledWith( methodContext, - storeValueOwner, - newStoreKey, + nftOwner, + newNftID, ); expect(escrowAccountExists).toBe(false); }); From 24bf9616fd15b02586b6ae4dd5793380038eb6ac Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Mon, 9 Oct 2023 15:59:27 +0200 Subject: [PATCH 146/170] Update nft module internal functions (#9014) * Update nft mod internal functions * Refactor the methods * Refactor to use internal methods * Update status check * Refactor redundant code and cleanup * :white_check_mark: Add unit tests * Update the unit tests * Update unit tests --- .../modules/nft/cc_commands/cc_transfer.ts | 41 +++++----- framework/src/modules/nft/internal_method.ts | 17 +++-- framework/src/modules/nft/method.ts | 29 +++---- framework/src/modules/nft/module.ts | 17 ++--- .../nft/cc_comands/cc_transfer.spec.ts | 75 +++++++++++++++++++ .../unit/modules/nft/internal_method.spec.ts | 48 ++++++++++-- 6 files changed, 165 insertions(+), 62 deletions(-) diff --git a/framework/src/modules/nft/cc_commands/cc_transfer.ts b/framework/src/modules/nft/cc_commands/cc_transfer.ts index d4a28e89c6f..ae4884b76fd 100644 --- a/framework/src/modules/nft/cc_commands/cc_transfer.ts +++ b/framework/src/modules/nft/cc_commands/cc_transfer.ts @@ -112,24 +112,26 @@ export class CrossChainTransferCommand extends BaseCCCommand { if (nftChainID.equals(ownChainID)) { const storeData = await nftStore.get(getMethodContext(), nftID); + if (status === CCM_STATUS_CODE_OK) { - storeData.owner = recipientAddress; const storedAttributes = storeData.attributesArray; storeData.attributesArray = this._internalMethod.getNewAttributes( nftID, storedAttributes, receivedAttributes, ); - await nftStore.save(getMethodContext(), nftID, storeData); - await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); - await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); } else { recipientAddress = senderAddress; - storeData.owner = recipientAddress; - await nftStore.save(getMethodContext(), nftID, storeData); - await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); - await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); } + + await this._internalMethod.createNFTEntry( + getMethodContext(), + recipientAddress, + nftID, + storeData.attributesArray, + ); + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); + await escrowStore.del(getMethodContext(), escrowStore.getKey(sendingChainID, nftID)); } else { const isSupported = await this._method.isNFTSupported(getMethodContext(), nftID); if (!isSupported) { @@ -146,22 +148,21 @@ export class CrossChainTransferCommand extends BaseCCCommand { ); throw new Error('Non-supported NFT'); } + this._feeMethod.payFee(getMethodContext(), BigInt(FEE_CREATE_NFT)); - if (status === CCM_STATUS_CODE_OK) { - await nftStore.save(getMethodContext(), nftID, { - owner: recipientAddress, - attributesArray: receivedAttributes as NFTAttributes[], - }); - await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); - } else { + if (status !== CCM_STATUS_CODE_OK) { recipientAddress = senderAddress; - await nftStore.save(getMethodContext(), nftID, { - owner: recipientAddress, - attributesArray: receivedAttributes as NFTAttributes[], - }); - await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); } + + await this._internalMethod.createNFTEntry( + getMethodContext(), + recipientAddress, + nftID, + receivedAttributes as NFTAttributes[], + ); + + await this._internalMethod.createUserEntry(getMethodContext(), recipientAddress, nftID); } this.events.get(CcmTransferEvent).log(context, { diff --git a/framework/src/modules/nft/internal_method.ts b/framework/src/modules/nft/internal_method.ts index 3ed927ff1d5..626ae05d62a 100644 --- a/framework/src/modules/nft/internal_method.ts +++ b/framework/src/modules/nft/internal_method.ts @@ -90,12 +90,8 @@ export class InternalMethod extends BaseMethod { nftID: Buffer, attributesArray: NFTAttributes[], ): Promise { - const moduleNames = []; - for (const item of attributesArray) { - moduleNames.push(item.module); - } - - if (new Set(moduleNames).size !== attributesArray.length) { + const hasDuplicates = this.hasDuplicateModuleNames(attributesArray); + if (hasDuplicates) { throw new Error('Invalid attributes array provided'); } @@ -106,6 +102,15 @@ export class InternalMethod extends BaseMethod { }); } + public hasDuplicateModuleNames(attributesArray: NFTAttributes[]): boolean { + const moduleNames = []; + for (const item of attributesArray) { + moduleNames.push(item.module); + } + + return new Set(moduleNames).size !== attributesArray.length; + } + public async verifyTransfer( immutableMethodContext: ImmutableMethodContext, senderAddress: Buffer, diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index 6814453c4d4..af78d9f0466 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -255,14 +255,6 @@ export class NFTMethod extends BaseMethod { collectionID: Buffer, attributesArray: NFTAttributes[], ): Promise { - const moduleNames = []; - for (const item of attributesArray) { - moduleNames.push(item.module); - } - if (new Set(moduleNames).size !== attributesArray.length) { - throw new Error('Invalid attributes array provided'); - } - const index = await this.getNextAvailableIndex(methodContext, collectionID); const indexBytes = Buffer.alloc(LENGTH_INDEX); indexBytes.writeBigInt64BE(index); @@ -270,16 +262,9 @@ export class NFTMethod extends BaseMethod { const nftID = Buffer.concat([this._config.ownChainID, collectionID, indexBytes]); this._feeMethod.payFee(methodContext, BigInt(FEE_CREATE_NFT)); - const nftStore = this.stores.get(NFTStore); - await nftStore.save(methodContext, nftID, { - owner: address, - attributesArray, - }); + await this._internalMethod.createNFTEntry(methodContext, address, nftID, attributesArray); - const userStore = this.stores.get(UserStore); - await userStore.set(methodContext, userStore.getKey(address, nftID), { - lockingModule: NFT_NOT_LOCKED, - }); + await this._internalMethod.createUserEntry(methodContext, address, nftID); this.events.get(CreateEvent).log(methodContext, { address, @@ -798,7 +783,12 @@ export class NFTMethod extends BaseMethod { storedAttributes, receivedAttributes, ); - await nftStore.save(methodContext, nftID, nftData); + await this._internalMethod.createNFTEntry( + methodContext, + nftData.owner, + nftID, + nftData.attributesArray, + ); await this._internalMethod.createUserEntry(methodContext, nftData.owner, nftID); await escrowStore.del(methodContext, escrowStore.getKey(terminatedChainID, nftID)); @@ -835,7 +825,8 @@ export class NFTMethod extends BaseMethod { } else { nft.attributesArray.push({ module, attributes }); } - await nftStore.save(methodContext, nftID, nft); + + await this._internalMethod.createNFTEntry(methodContext, nft.owner, nftID, nft.attributesArray); this.events.get(SetAttributesEvent).log(methodContext, { nftID, diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index 9fc86536261..a6eb5341716 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -67,7 +67,6 @@ import { LENGTH_ADDRESS, LENGTH_CHAIN_ID, MODULE_NAME_NFT, - NFT_NOT_LOCKED, } from './constants'; export class NFTModule extends BaseInteroperableModule { @@ -268,24 +267,20 @@ export class NFTModule extends BaseInteroperableModule { supportedChainsKeySet.add(supportedNFT.chainID); } - const nftStore = this.stores.get(NFTStore); - const escrowStore = this.stores.get(EscrowStore); - const userStore = this.stores.get(UserStore); - for (const nft of genesisStore.nftSubstore) { const { owner, nftID, attributesArray } = nft; - await nftStore.save(context, nftID, { + await this._internalMethod.createNFTEntry( + context.getMethodContext(), owner, + nftID, attributesArray, - }); + ); if (owner.length === LENGTH_CHAIN_ID) { - await escrowStore.set(context, escrowStore.getKey(owner, nftID), {}); + await this._internalMethod.createEscrowEntry(context.getMethodContext(), owner, nftID); } else { - await userStore.set(context, userStore.getKey(owner, nftID), { - lockingModule: NFT_NOT_LOCKED, - }); + await this._internalMethod.createUserEntry(context.getMethodContext(), owner, nftID); } } diff --git a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts index 9767f2b1897..c422e16e890 100644 --- a/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts +++ b/framework/test/unit/modules/nft/cc_comands/cc_transfer.spec.ts @@ -18,6 +18,7 @@ import { NFTModule } from '../../../../../src/modules/nft/module'; import { InMemoryPrefixedStateDB } from '../../../../../src/testing'; import { ALL_SUPPORTED_NFTS_KEY, + CCM_STATUS_CODE_OK, CROSS_CHAIN_COMMAND_NAME_TRANSFER, FEE_CREATE_NFT, LENGTH_CHAIN_ID, @@ -708,5 +709,79 @@ describe('CrossChain Transfer Command', () => { sendingChainID: ccm.sendingChainID, }); }); + + it('should throw if duplicate module attributes are found when a foreign NFT is received - status === CCM_STATUS_CODE_OK', async () => { + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray: [ + { module: 'module1', attributes: Buffer.alloc(5) }, + { module: 'module1', attributes: Buffer.alloc(5) }, + ], + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: CCM_STATUS_CODE_OK, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).rejects.toThrow('Invalid attributes array provided'); + }); + }); + + it('should throw if duplicate module attributes are found when a foreign NFT is bounced - status !== CCM_STATUS_CODE_OK', async () => { + params = codec.encode(crossChainNFTTransferMessageParamsSchema, { + nftID, + senderAddress, + recipientAddress, + attributesArray: [ + { module: 'module1', attributes: Buffer.alloc(5) }, + { module: 'module1', attributes: Buffer.alloc(5) }, + ], + data: '', + }); + ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_NAME_TRANSFER, + module: module.name, + nonce: BigInt(1), + sendingChainID, + receivingChainID, + fee: BigInt(30000), + status: 12345, + params, + }; + context = { + ccm, + transaction: defaultTransaction, + header: defaultHeader, + stateStore, + contextStore, + getMethodContext, + eventQueue: new EventQueue(0), + getStore, + logger: fakeLogger, + chainID, + }; + + await expect(command.execute(context)).rejects.toThrow('Invalid attributes array provided'); }); }); diff --git a/framework/test/unit/modules/nft/internal_method.spec.ts b/framework/test/unit/modules/nft/internal_method.spec.ts index 983daf71eca..89456d8d661 100644 --- a/framework/test/unit/modules/nft/internal_method.spec.ts +++ b/framework/test/unit/modules/nft/internal_method.spec.ts @@ -34,7 +34,11 @@ import { TransferEvent, TransferEventData } from '../../../../src/modules/nft/ev import { UserStore } from '../../../../src/modules/nft/stores/user'; import { EscrowStore } from '../../../../src/modules/nft/stores/escrow'; import { NFTMethod } from '../../../../src/modules/nft/method'; -import { InteroperabilityMethod, TokenMethod } from '../../../../src/modules/nft/types'; +import { + InteroperabilityMethod, + NFTAttributes, + TokenMethod, +} from '../../../../src/modules/nft/types'; import { TransferCrossChainEvent, TransferCrossChainEventData, @@ -103,15 +107,47 @@ describe('InternalMethod', () => { }); }); + describe('hasDuplicateModuleNames', () => { + it('should return false when the attributes array is empty', () => { + const attributesArray: NFTAttributes[] = []; + + expect(internalMethod.hasDuplicateModuleNames(attributesArray)).toBeFalse(); + }); + + it('should return false when all module names are unique', () => { + const attributesArray: NFTAttributes[] = [ + { module: 'module1', attributes: Buffer.from('attributes1') }, + { module: 'module2', attributes: Buffer.from('attributes2') }, + { module: 'module3', attributes: Buffer.from('attributes3') }, + ]; + + const result = internalMethod.hasDuplicateModuleNames(attributesArray); + + expect(result).toBeFalse(); + }); + + it('should return true when there are duplicate module names', () => { + const attributesArray: NFTAttributes[] = [ + { module: 'module1', attributes: Buffer.from('attributes1') }, + { module: 'module1', attributes: Buffer.from('attributes2') }, + { module: 'module3', attributes: Buffer.from('attributes3') }, + ]; + + const result = internalMethod.hasDuplicateModuleNames(attributesArray); + + expect(result).toBeTrue(); + }); + }); + describe('createNFTEntry', () => { it('should throw for duplicate module names in attributes array', async () => { const attributesArray = [ { - module: 'token', + module: 'module1', attributes: Buffer.alloc(8, 1), }, { - module: 'token', + module: 'module1', attributes: Buffer.alloc(8, 2), }, ]; @@ -124,16 +160,16 @@ describe('InternalMethod', () => { it('should create an entry in NFStore with attributes sorted by module if there is no duplicate module name', async () => { const unsortedAttributesArray = [ { - module: 'token', + module: 'module1', attributes: Buffer.alloc(8, 1), }, { - module: 'pos', + module: 'module2', attributes: Buffer.alloc(8, 1), }, ]; - const sortedAttributesArray = unsortedAttributesArray.sort((a, b) => + const sortedAttributesArray = [...unsortedAttributesArray].sort((a, b) => a.module.localeCompare(b.module, 'en'), ); From d75cf3a7c731d587fbb24062b043b70ee41327ec Mon Sep 17 00:00:00 2001 From: sitetester Date: Thu, 12 Oct 2023 12:40:02 +0300 Subject: [PATCH 147/170] Update execute common (#9054) * Update executeCommon to match latest LIP changes * Remove repeated assignment * Add relevant comments --------- Co-authored-by: !shan --- .../base_cross_chain_update_command.ts | 51 ++- .../submit_mainchain_cross_chain_update.ts | 4 +- .../submit_sidechain_cross_chain_update.ts | 4 +- .../base_cross_chain_update_command.spec.ts | 332 ++++++++++-------- ...ubmit_mainchain_cross_chain_update.spec.ts | 10 +- ...ubmit_sidechain_cross_chain_update.spec.ts | 10 +- 6 files changed, 236 insertions(+), 175 deletions(-) diff --git a/framework/src/modules/interoperability/base_cross_chain_update_command.ts b/framework/src/modules/interoperability/base_cross_chain_update_command.ts index 07e4724613d..9899ecc71c5 100644 --- a/framework/src/modules/interoperability/base_cross_chain_update_command.ts +++ b/framework/src/modules/interoperability/base_cross_chain_update_command.ts @@ -122,7 +122,7 @@ export abstract class BaseCrossChainUpdateCommand< } } - protected async executeCommon( + protected async beforeCrossChainMessagesExecution( context: CommandExecuteContext, isMainchain: boolean, ): Promise<[CCMsg[], boolean]> { @@ -193,21 +193,9 @@ export abstract class BaseCrossChainUpdateCommand< } try { - // The CCM must come from the sending chain. - if (isMainchain && !ccm.sendingChainID.equals(params.sendingChainID)) { - throw new Error('CCM is not from the sending chain.'); - } - // Sending and receiving chains must differ. - if (ccm.receivingChainID.equals(ccm.sendingChainID)) { - throw new Error('Sending and receiving chains must differ.'); - } - // The CCM must come be directed to the sidechain, unless it was bounced on the mainchain. - if (!isMainchain && !context.chainID.equals(ccm.receivingChainID)) { - throw new Error('CCM is not directed to the sidechain.'); - } - if (isMainchain && ccm.status === CCMStatusCode.CHANNEL_UNAVAILABLE) { - throw new Error('CCM status channel unavailable can only be set on the mainchain.'); - } + // Verify whether the CCM respects the routing rules, + // which differ on mainchain and sidechains. + this._verifyRoutingRules(context, isMainchain, ccm); ccms.push(ccm); } catch (error) { await this.internalMethod.terminateChainInternal(context, params.sendingChainID); @@ -226,7 +214,36 @@ export abstract class BaseCrossChainUpdateCommand< return [ccms, true]; } - protected async afterExecuteCommon( + // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0053.md#verifyroutingrules + private _verifyRoutingRules( + context: CommandExecuteContext, + isMainchain: boolean, + ccm: CCMsg, + ) { + // Sending and receiving chains must differ. + if (ccm.receivingChainID.equals(ccm.sendingChainID)) { + throw new Error('Sending and receiving chains must differ.'); + } + + // Processing on the mainchain + if (isMainchain) { + // The CCM must come from the sending chain. + if (!ccm.sendingChainID.equals(context.params.sendingChainID)) { + throw new Error('CCM is not from the sending chain.'); + } + if (ccm.status === CCMStatusCode.CHANNEL_UNAVAILABLE) { + throw new Error('CCM status channel unavailable can only be set on the mainchain.'); + } + } else { + // The CCM must come be directed to the sidechain, unless it was bounced on the mainchain. + // eslint-disable-next-line no-lonely-if + if (!context.chainID.equals(ccm.receivingChainID)) { + throw new Error('CCM is not directed to the sidechain.'); + } + } + } + + protected async afterCrossChainMessagesExecution( context: CommandExecuteContext, ) { const { params } = context; diff --git a/framework/src/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.ts b/framework/src/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.ts index fc5011d4d4c..7861a5e7f78 100644 --- a/framework/src/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.ts +++ b/framework/src/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.ts @@ -81,7 +81,7 @@ export class SubmitMainchainCrossChainUpdateCommand extends BaseCrossChainUpdate public async execute( context: CommandExecuteContext, ): Promise { - const [decodedCCMs, ok] = await this.executeCommon(context, true); + const [decodedCCMs, ok] = await this.beforeCrossChainMessagesExecution(context, true); if (!ok) { return; } @@ -119,7 +119,7 @@ export class SubmitMainchainCrossChainUpdateCommand extends BaseCrossChainUpdate context.contextStore.delete(CONTEXT_STORE_KEY_CCM_PROCESSING); } - await this.afterExecuteCommon(context); + await this.afterCrossChainMessagesExecution(context); } private async _beforeCrossChainMessageForwarding( diff --git a/framework/src/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.ts b/framework/src/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.ts index e9baddc6a8c..1ac9ff1d9bb 100644 --- a/framework/src/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.ts +++ b/framework/src/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.ts @@ -40,7 +40,7 @@ export class SubmitSidechainCrossChainUpdateCommand extends BaseCrossChainUpdate public async execute( context: CommandExecuteContext, ): Promise { - const [decodedCCMs, ok] = await this.executeCommon(context, false); + const [decodedCCMs, ok] = await this.beforeCrossChainMessagesExecution(context, false); if (!ok) { return; } @@ -72,6 +72,6 @@ export class SubmitSidechainCrossChainUpdateCommand extends BaseCrossChainUpdate context.contextStore.delete(CONTEXT_STORE_KEY_CCM_PROCESSING); } - await this.afterExecuteCommon(context); + await this.afterCrossChainMessagesExecution(context); } } diff --git a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts index 328baebba70..115cc738f60 100644 --- a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts +++ b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts @@ -22,6 +22,7 @@ import { Transaction, CommandVerifyContext, ChainAccount, + CCMsg, } from '../../../../src'; import { BaseCCCommand } from '../../../../src/modules/interoperability/base_cc_command'; import { BaseCrossChainUpdateCommand } from '../../../../src/modules/interoperability/base_cross_chain_update_command'; @@ -528,10 +529,10 @@ describe('BaseCrossChainUpdateCommand', () => { }); // CAUTION! - // tests should be written/executed as per `BaseCrossChainUpdateCommand::executeCommon`, + // tests should be written/executed as per `BaseCrossChainUpdateCommand::beforeCrossChainMessagesExecution`, // otherwise, they can fail due to some other check // also, we can simplify test cases by giving only one CCM to params.inboxUpdate.crossChainMessages array - describe('executeCommon', () => { + describe('beforeCrossChainMessagesExecution', () => { let executeContext: CommandExecuteContext; let stateStore: PrefixedStateReadWriter; @@ -561,7 +562,9 @@ describe('BaseCrossChainUpdateCommand', () => { }); it('should verify certificate signature', async () => { - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([ + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([ expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), true, ]); @@ -569,7 +572,9 @@ describe('BaseCrossChainUpdateCommand', () => { }); it('should initialize user account for message fee token ID when inboxUpdate is not empty', async () => { - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([ + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([ expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), true, ]); @@ -588,13 +593,17 @@ describe('BaseCrossChainUpdateCommand', () => { (command['internalMethod'].verifyPartnerChainOutboxRoot as jest.Mock).mockRejectedValue( new Error('invalid root'), ); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); expect(command['_interopsMethod'].getMessageFeeTokenID).not.toHaveBeenCalled(); }); it('should verifyPartnerChainOutboxRoot when inboxUpdate is not empty', async () => { - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([ + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([ expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), true, ]); @@ -625,7 +634,9 @@ describe('BaseCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], true]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], true]); expect(command['_interopsMethod'].getMessageFeeTokenID).not.toHaveBeenCalled(); expect(command['_tokenMethod'].initializeUserAccount).not.toHaveBeenCalled(); }); @@ -658,7 +669,9 @@ describe('BaseCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( expect.anything(), params.sendingChainID, @@ -696,7 +709,9 @@ describe('BaseCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( expect.anything(), params.sendingChainID, @@ -714,7 +729,9 @@ describe('BaseCrossChainUpdateCommand', () => { ); }); - it('should terminate the chain and add an event when CCM sending chain and ccu sending chain is not the same', async () => { + it('should call _verifyRoutingRules', async () => { + jest.spyOn(command, '_verifyRoutingRules' as any); + const ccm = { crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, fee: BigInt(0), @@ -745,15 +762,23 @@ describe('BaseCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); + expect(command['_verifyRoutingRules']).toHaveBeenCalledTimes(1); + }); + + const routingRulesCommonExpects = ( + cmdExecuteContext: CommandExecuteContext, + ccm: CCMsg, + ) => { expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( expect.anything(), - params.sendingChainID, + cmdExecuteContext.params.sendingChainID, ); - expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, - executeContext.params.sendingChainID, + cmdExecuteContext, + cmdExecuteContext.params.sendingChainID, ccm.receivingChainID, { ccm, @@ -761,12 +786,10 @@ describe('BaseCrossChainUpdateCommand', () => { code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, }, ); - }); + }; - // Sending and receiving chains must differ. - it('should terminate the chain and add an event when receiving chain is the same as sending chain', async () => { - const sendingChainID = chainID; - const ccm = { + describe('_verifyRoutingRules', () => { + const routingRulesCCM = { crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, fee: BigInt(0), module: MODULE_NAME_INTEROPERABILITY, @@ -779,133 +802,143 @@ describe('BaseCrossChainUpdateCommand', () => { status: CCMStatusCode.OK, }; - executeContext = createTransactionContext({ - chainID: sendingChainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, - // this is needed to pass `!ccm.sendingChainID.equals(params.sendingChainID)` check (previous test) - sendingChainID, + // Sending and receiving chains must differ + it('should terminate the chain and add an event when receiving chain is the same as sending chain', async () => { + const ccm = routingRulesCCM; + const sendingChainID = chainID; + + executeContext = createTransactionContext({ + chainID: sendingChainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, + // this is needed to pass `!ccm.sendingChainID.equals(params.sendingChainID)` check (previous test) + sendingChainID, + }), }), - }), - }).createCommandExecuteContext(command.schema); + }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); - expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( - expect.anything(), - sendingChainID, - ); - expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, - executeContext.params.sendingChainID, - ccm.receivingChainID, - { - ccm, - result: CCMProcessedResult.DISCARDED, - code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, - }, - ); - }); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); - it('should terminate the chain and add an event when CCM is not directed to the sidechain', async () => { - const ccm = { - crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, - fee: BigInt(0), - module: MODULE_NAME_INTEROPERABILITY, - nonce: BigInt(1), - params: utils.getRandomBytes(10), - // will fail for `!context.chainID.equals(ccm.receivingChainID)` - receivingChainID: Buffer.from([0, 0, 3, 0]), - sendingChainID: defaultSendingChainID, - status: CCMStatusCode.OK, - }; + routingRulesCommonExpects(executeContext, ccm); + }); - executeContext = createTransactionContext({ - chainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, + it('should terminate the chain and add an event when CCM sending chain and ccu sending chain is not the same', async () => { + const ccm = { + ...routingRulesCCM, + // this will fail for `!ccm.sendingChainID.equals(params.sendingChainID)` + // params.sendingChainID is `defaultSendingChainID` (line 158) + sendingChainID: Buffer.from([1, 2, 3, 4]), + }; + + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, + }), }), - }), - }).createCommandExecuteContext(command.schema); + }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, false)).resolves.toEqual([[], false]); - expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( - expect.anything(), - params.sendingChainID, - ); - expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, - executeContext.params.sendingChainID, - ccm.receivingChainID, - { - ccm, - result: CCMProcessedResult.DISCARDED, - code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, - }, - ); - }); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); - it('should reject with terminate the chain and add an event when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is true', async () => { - const ccm = { - crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, - fee: BigInt(0), - module: MODULE_NAME_INTEROPERABILITY, - nonce: BigInt(1), - params: utils.getRandomBytes(10), - // must be same as `context.chainID` to pass `!context.chainID.equals(ccm.receivingChainID)` - receivingChainID: chainID, - // must be same as defaultSendingChainID to pass `!ccm.sendingChainID.equals(params.sendingChainID)` - sendingChainID: defaultSendingChainID, - // will fail for `CCMStatusCode.CHANNEL_UNAVAILABLE` - status: CCMStatusCode.CHANNEL_UNAVAILABLE, - }; + routingRulesCommonExpects(executeContext, ccm); + }); - executeContext = createTransactionContext({ - chainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, + it('should reject with terminate the chain and add an event when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is true', async () => { + const ccm = { + ...routingRulesCCM, + // must be same as `context.chainID` to pass `!context.chainID.equals(ccm.receivingChainID)` + receivingChainID: chainID, + // must be same as defaultSendingChainID to pass `!ccm.sendingChainID.equals(params.sendingChainID)` + sendingChainID: defaultSendingChainID, + // will fail for `CCMStatusCode.CHANNEL_UNAVAILABLE` + status: CCMStatusCode.CHANNEL_UNAVAILABLE, + }; + + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, + }), }), - }), - }).createCommandExecuteContext(command.schema); + }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, true)).resolves.toEqual([[], false]); - expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( - expect.anything(), - params.sendingChainID, - ); - expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, - executeContext.params.sendingChainID, - ccm.receivingChainID, - { - ccm, - result: CCMProcessedResult.DISCARDED, - code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, - }, - ); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); + + routingRulesCommonExpects(executeContext, ccm); + }); + + it('should terminate the chain and add an event when CCM is not directed to the sidechain', async () => { + const ccm = { + ...routingRulesCCM, + // will fail for `!context.chainID.equals(ccm.receivingChainID)` + receivingChainID: Buffer.from([0, 0, 3, 0]), + sendingChainID: defaultSendingChainID, + }; + + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, + }), + }), + }).createCommandExecuteContext(command.schema); + + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, false), + ).resolves.toEqual([[], false]); + expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( + expect.anything(), + params.sendingChainID, + ); + expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( + executeContext, + executeContext.params.sendingChainID, + ccm.receivingChainID, + { + ccm, + result: CCMProcessedResult.DISCARDED, + code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, + }, + ); + }); }); it('should resolve when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is false', async () => { @@ -936,17 +969,16 @@ describe('BaseCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(command.schema); - await expect(command['executeCommon'](executeContext, false)).resolves.toEqual([ - expect.toBeArrayOfSize(1), - true, - ]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, false), + ).resolves.toEqual([expect.toBeArrayOfSize(1), true]); expect(internalMethod.terminateChainInternal).not.toHaveBeenCalled(); expect(command['events'].get(CcmProcessedEvent).log).not.toHaveBeenCalled(); }); }); - describe('afterExecuteCommon', () => { + describe('afterCrossChainMessagesExecution', () => { let executeContext: CommandExecuteContext; let chainValidatorsStore: ChainValidatorsStore; @@ -990,7 +1022,9 @@ describe('BaseCrossChainUpdateCommand', () => { certificateThreshold: BigInt(20), } as any); - await expect(command['afterExecuteCommon'](executeContext)).resolves.toBeUndefined(); + await expect( + command['afterCrossChainMessagesExecution'](executeContext), + ).resolves.toBeUndefined(); expect(command['internalMethod'].updateValidators).toHaveBeenCalledWith( expect.anything(), executeContext.params, @@ -999,7 +1033,9 @@ describe('BaseCrossChainUpdateCommand', () => { it('should update validators if activeValidatorsUpdate is empty but params.certificateThreshold !== sendingChainValidators.certificateThreshold', async () => { executeContext.params.activeValidatorsUpdate.bftWeightsUpdateBitmap = EMPTY_BUFFER; - await expect(command['afterExecuteCommon'](executeContext)).resolves.toBeUndefined(); + await expect( + command['afterCrossChainMessagesExecution'](executeContext), + ).resolves.toBeUndefined(); expect(command['internalMethod'].updateValidators).toHaveBeenCalledWith( expect.anything(), @@ -1009,7 +1045,9 @@ describe('BaseCrossChainUpdateCommand', () => { it('should not update certificate and updatePartnerChainOutboxRoot if certificate is empty', async () => { executeContext.params.certificate = EMPTY_BYTES; - await expect(command['afterExecuteCommon'](executeContext)).resolves.toBeUndefined(); + await expect( + command['afterCrossChainMessagesExecution'](executeContext), + ).resolves.toBeUndefined(); expect(command['internalMethod'].updateCertificate).not.toHaveBeenCalled(); expect(command['internalMethod'].updatePartnerChainOutboxRoot).not.toHaveBeenCalled(); }); @@ -1023,7 +1061,9 @@ describe('BaseCrossChainUpdateCommand', () => { bitmap: EMPTY_BUFFER, }, }; - await expect(command['afterExecuteCommon'](executeContext)).resolves.toBeUndefined(); + await expect( + command['afterCrossChainMessagesExecution'](executeContext), + ).resolves.toBeUndefined(); expect(command['internalMethod'].updatePartnerChainOutboxRoot).not.toHaveBeenCalled(); }); diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts index 9bf2a422351..db9fad7214a 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts @@ -511,7 +511,7 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { .mockResolvedValue(undefined as never); }); - it('should call executeCommon', async () => { + it('should call beforeCrossChainMessagesExecution', async () => { executeContext = createTransactionContext({ chainID, stateStore, @@ -524,12 +524,14 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(mainchainCCUUpdateCommand.schema); jest - .spyOn(mainchainCCUUpdateCommand, 'executeCommon' as never) + .spyOn(mainchainCCUUpdateCommand, 'beforeCrossChainMessagesExecution' as never) .mockResolvedValue([[], true] as never); await expect(mainchainCCUUpdateCommand.execute(executeContext)).resolves.toBeUndefined(); - expect(mainchainCCUUpdateCommand['executeCommon']).toHaveBeenCalledTimes(1); - expect(mainchainCCUUpdateCommand['executeCommon']).toHaveBeenCalledWith( + expect(mainchainCCUUpdateCommand['beforeCrossChainMessagesExecution']).toHaveBeenCalledTimes( + 1, + ); + expect(mainchainCCUUpdateCommand['beforeCrossChainMessagesExecution']).toHaveBeenCalledWith( expect.anything(), true, ); diff --git a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts index e2dcf9db178..3550031e0e3 100644 --- a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts @@ -354,7 +354,7 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { jest.spyOn(sidechainCCUUpdateCommand, 'apply' as never).mockResolvedValue(undefined as never); }); - it('should call executeCommon', async () => { + it('should call beforeCrossChainMessagesExecution', async () => { executeContext = createTransactionContext({ chainID, stateStore, @@ -367,12 +367,14 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { }), }).createCommandExecuteContext(sidechainCCUUpdateCommand.schema); jest - .spyOn(sidechainCCUUpdateCommand, 'executeCommon' as never) + .spyOn(sidechainCCUUpdateCommand, 'beforeCrossChainMessagesExecution' as never) .mockResolvedValue([[], true] as never); await expect(sidechainCCUUpdateCommand.execute(executeContext)).resolves.toBeUndefined(); - expect(sidechainCCUUpdateCommand['executeCommon']).toHaveBeenCalledTimes(1); - expect(sidechainCCUUpdateCommand['executeCommon']).toHaveBeenCalledWith( + expect(sidechainCCUUpdateCommand['beforeCrossChainMessagesExecution']).toHaveBeenCalledTimes( + 1, + ); + expect(sidechainCCUUpdateCommand['beforeCrossChainMessagesExecution']).toHaveBeenCalledWith( expect.anything(), false, ); From bc363862d811022b60b7d425db1ba79bf96478fe Mon Sep 17 00:00:00 2001 From: sitetester Date: Thu, 12 Oct 2023 17:43:37 +0300 Subject: [PATCH 148/170] Update terminate liveness command (#9063) * Update `Liveness Termination Command` * Add `validator.validate` check * Update test title * Add `verifySchema` describe block --------- Co-authored-by: !shan --- .../terminate_sidechain_for_liveness.ts | 17 +-- .../src/modules/interoperability/schemas.ts | 1 + .../terminate_sidechain_for_liveness.spec.ts | 133 ++++++++---------- 3 files changed, 70 insertions(+), 81 deletions(-) diff --git a/framework/src/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.ts b/framework/src/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.ts index 73263756ce7..74392c6766c 100644 --- a/framework/src/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.ts +++ b/framework/src/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.ts @@ -23,6 +23,7 @@ import { ChainAccountStore, ChainStatus } from '../../stores/chain_account'; import { TerminateSidechainForLivenessParams } from '../../types'; import { MainchainInteroperabilityInternalMethod } from '../internal_method'; +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#liveness-termination-command-1 export class TerminateSidechainForLivenessCommand extends BaseInteroperabilityCommand { public schema = terminateSidechainForLivenessParamsSchema; @@ -30,28 +31,24 @@ export class TerminateSidechainForLivenessCommand extends BaseInteroperabilityCo context: CommandVerifyContext, ): Promise { const { params } = context; - const doesChainAccountExist = await this.stores - .get(ChainAccountStore) - .has(context, params.chainID); - if (!doesChainAccountExist) { + const chainAccount = await this.stores + .get(ChainAccountStore) + .getOrUndefined(context, params.chainID); + if (!chainAccount) { throw new Error('Chain account does not exist.'); } - const chainAccount = await this.stores.get(ChainAccountStore).get(context, params.chainID); - - // The commands fails if the sidechain is already terminated. if (chainAccount.status === ChainStatus.TERMINATED) { throw new Error('Sidechain is already terminated.'); } // Or if the sidechain did not violate the liveness condition. - const isChainAccountLive = await this.internalMethod.isLive( + const live = await this.internalMethod.isLive( context, params.chainID, context.header.timestamp, ); - - if (isChainAccountLive) { + if (live) { throw new Error('Sidechain did not violate the liveness condition.'); } diff --git a/framework/src/modules/interoperability/schemas.ts b/framework/src/modules/interoperability/schemas.ts index a9eff070965..9e1804d3844 100644 --- a/framework/src/modules/interoperability/schemas.ts +++ b/framework/src/modules/interoperability/schemas.ts @@ -545,6 +545,7 @@ export const stateRecoveryInitParamsSchema = { }, }; +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#parameters-2 export const terminateSidechainForLivenessParamsSchema = { $id: '/modules/interoperability/mainchain/terminateSidechainForLiveness', type: 'object', diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts index 70c630dae63..5dc0146673a 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/terminate_sidechain_for_liveness.spec.ts @@ -15,6 +15,7 @@ import { codec } from '@liskhq/lisk-codec'; import { Transaction } from '@liskhq/lisk-chain'; import { utils } from '@liskhq/lisk-cryptography'; +import { validator } from '@liskhq/lisk-validator'; import { CommandExecuteContext, MainchainInteroperabilityModule } from '../../../../../../src'; import { BaseCCCommand } from '../../../../../../src/modules/interoperability/base_cc_command'; import { BaseCCMethod } from '../../../../../../src/modules/interoperability/base_cc_method'; @@ -35,35 +36,74 @@ import { ChainStatus, } from '../../../../../../src/modules/interoperability/stores/chain_account'; import { TerminateSidechainForLivenessCommand } from '../../../../../../src/modules/interoperability'; +import { CHAIN_ID_LENGTH } from '../../../../../../src/modules/token/constants'; describe('TerminateSidechainForLivenessCommand', () => { const interopMod = new MainchainInteroperabilityModule(); + let livenessTerminationCommand: TerminateSidechainForLivenessCommand; + let commandVerifyContext: CommandVerifyContext; + let interoperableCCMethods: Map; + let ccCommands: Map; + let transaction: Transaction; + let transactionParams: TerminateSidechainForLivenessParams; + let encodedTransactionParams: Buffer; + + beforeEach(() => { + interoperableCCMethods = new Map(); + ccCommands = new Map(); + transactionParams = { + chainID: utils.intToBuffer(3, 4), + }; + encodedTransactionParams = codec.encode( + terminateSidechainForLivenessParamsSchema, + transactionParams, + ); + transaction = new Transaction({ + module: MODULE_NAME_INTEROPERABILITY, + command: COMMAND_NAME_LIVENESS_TERMINATION, + fee: BigInt(100000000), + nonce: BigInt(0), + params: encodedTransactionParams, + senderPublicKey: utils.getRandomBytes(32), + signatures: [], + }); + livenessTerminationCommand = new TerminateSidechainForLivenessCommand( + interopMod.stores, + interopMod.events, + interoperableCCMethods, + ccCommands, + interopMod['internalMethod'], + ); + }); + + describe('verifySchema', () => { + it(`should throw error when chainID is not bytes`, () => { + expect(() => + validator.validate(livenessTerminationCommand.schema, { + chainID: 123, + }), + ).toThrow('Property \'.chainID\' should pass "dataType" keyword validation'); + }); + it(`should throw error when chainID has length less than ${CHAIN_ID_LENGTH}`, () => { + expect(() => + validator.validate(livenessTerminationCommand.schema, { + chainID: Buffer.alloc(CHAIN_ID_LENGTH - 1), + }), + ).toThrow("Property '.chainID' minLength not satisfied"); + }); + it(`should throw error when chainID has length greater than ${CHAIN_ID_LENGTH}`, () => { + expect(() => + validator.validate(livenessTerminationCommand.schema, { + chainID: Buffer.alloc(CHAIN_ID_LENGTH + 1), + }), + ).toThrow("Property '.chainID' maxLength exceeded"); + }); + }); describe('verify', () => { - let livenessTerminationCommand: TerminateSidechainForLivenessCommand; - let commandVerifyContext: CommandVerifyContext; - let interoperableCCMethods: Map; - let ccCommands: Map; - let transaction: Transaction; - let transactionParams: TerminateSidechainForLivenessParams; - let encodedTransactionParams: Buffer; let chainAccount: ChainAccount; beforeEach(async () => { - interoperableCCMethods = new Map(); - ccCommands = new Map(); - - livenessTerminationCommand = new TerminateSidechainForLivenessCommand( - interopMod.stores, - interopMod.events, - interoperableCCMethods, - ccCommands, - interopMod['internalMethod'], - ); - - transactionParams = { - chainID: utils.intToBuffer(3, 4), - }; chainAccount = { lastCertificate: { height: 10, @@ -74,20 +114,6 @@ describe('TerminateSidechainForLivenessCommand', () => { name: 'staleSidechain', status: ChainStatus.ACTIVE, }; - encodedTransactionParams = codec.encode( - terminateSidechainForLivenessParamsSchema, - transactionParams, - ); - - transaction = new Transaction({ - module: MODULE_NAME_INTEROPERABILITY, - command: COMMAND_NAME_LIVENESS_TERMINATION, - fee: BigInt(100000000), - nonce: BigInt(0), - params: encodedTransactionParams, - senderPublicKey: utils.getRandomBytes(32), - signatures: [], - }); commandVerifyContext = createTransactionContext({ transaction, }).createCommandVerifyContext( @@ -139,45 +165,10 @@ describe('TerminateSidechainForLivenessCommand', () => { }); describe('execute', () => { - let livenessTerminationCommand: TerminateSidechainForLivenessCommand; let commandExecuteContext: CommandExecuteContext; - let interoperableCCMethods: Map; - let ccCommands: Map; - let transaction: Transaction; - let transactionParams: TerminateSidechainForLivenessParams; - let encodedTransactionParams: Buffer; let transactionContext: TransactionContext; beforeEach(() => { - interoperableCCMethods = new Map(); - ccCommands = new Map(); - livenessTerminationCommand = new TerminateSidechainForLivenessCommand( - interopMod.stores, - interopMod.events, - interoperableCCMethods, - ccCommands, - interopMod['internalMethod'], - ); - - transactionParams = { - chainID: utils.intToBuffer(3, 4), - }; - - encodedTransactionParams = codec.encode( - terminateSidechainForLivenessParamsSchema, - transactionParams, - ); - - transaction = new Transaction({ - module: MODULE_NAME_INTEROPERABILITY, - command: COMMAND_NAME_LIVENESS_TERMINATION, - fee: BigInt(100000000), - nonce: BigInt(0), - params: encodedTransactionParams, - senderPublicKey: utils.getRandomBytes(32), - signatures: [], - }); - transactionContext = createTransactionContext({ transaction, }); @@ -192,7 +183,7 @@ describe('TerminateSidechainForLivenessCommand', () => { it('should successfully terminate chain', async () => { await livenessTerminationCommand.execute(commandExecuteContext); expect(interopMod['internalMethod'].terminateChainInternal).toHaveBeenCalledWith( - expect.anything(), + commandExecuteContext, transactionParams.chainID, ); }); From a5dab9db0220dc9dbbff26c0ef710f97c029fda9 Mon Sep 17 00:00:00 2001 From: Franco NG Date: Thu, 12 Oct 2023 17:49:48 +0200 Subject: [PATCH 149/170] Add chain connector plugin readme (#9055) * Add README * Update doc ref link for chain-connector * Add details to chain connector plugin README --------- Co-authored-by: !shan --- .../README.md | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 framework-plugins/lisk-framework-chain-connector-plugin/README.md diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/README.md b/framework-plugins/lisk-framework-chain-connector-plugin/README.md new file mode 100644 index 00000000000..475e243a079 --- /dev/null +++ b/framework-plugins/lisk-framework-chain-connector-plugin/README.md @@ -0,0 +1,78 @@ +# @liskhq/lisk-framework-chain-connector-plugin + +@liskhq/lisk-framework-chain-connector-plugin is a plugin for creating and sending Cross-Chain-Update (CCU) Transactions. + +Cross-chain update transactions are the carriers of the information transmitted between chains. By posting a cross-chain update, the receiving chain gets the information required about the advancement of the sending chain. The transaction can also include cross-chain messages and thus serves as an envelope for messages from one chain to another. + +## Installation + +```sh +$ npm install --save @liskhq/lisk-framework-chain-connector-plugin +``` + +## Config Options + +``` +{ + receivingChainID: string, + receivingChainWsURL?: string, + receivingChainIPCPath?: string, + ccuFrequency: number, + encryptedPrivateKey: string, + ccuFee: string, + isSaveCCU: boolean, + maxCCUSize: number, + registrationHeight: number, + ccuSaveLimit: number +} +``` + +## Parameters + +| Param | Required? | Description | +| ----------------------- | --------- | ------------------------------------------------------------------------------ | +| `receivingChainID` | **Y** | Chain ID of the receiving chain | +| `receivingChainWsURL` | **N** | The WS url of a receiving node | +| `receivingChainIPCPath` | **N** | The IPC path of a receiving node | +| `ccuFrequency` | **Y** | Number of blocks after which a CCU should be created | +| `encryptedPrivateKey` | **Y** | Encrypted privateKey of the relayer | +| `ccuFee` | **Y** | Fee to be paid for each CCU transaction | +| `isSaveCCU` | **Y** | Flag for the user to either save or send a CCU on creation. Send is by default | +| `maxCCUSize` | **Y** | Maximum size of CCU to be allowed | +| `registrationHeight` | **Y** | Height at the time of registration on the receiving chain | +| `ccuSaveLimit` | **Y** | Number of CCUs to save | + +## Usage + +Start your Lisk SDK with `--enable-chain-connector-plugin` flag, i.e. + +```sh + $ ./bin/run start --enable-chain-connector-plugin +``` + +## Documentation + +[Setting up a relayer node](https://lisk.com/documentation/beta/run-blockchain/setup-relayer.html#installing-the-chain-connector-plugin): Details SDK Doc for setting up node with Chain Connector Plugin. + +[LIP-53 # CCU Properties](https://github.com/LiskHQ/lips/blob/main/proposals/lip-0053.md#cross-chain-update-transaction-properties): Explaination of CCU Properties from LIP-53. + +[Interoperability Example](https://github.com/LiskHQ/lisk-sdk/tree/release/6.1.0/examples/interop): Example of Interoperability with 2 sidechains and 1 mainchain, Chain Connector Plugin enabled. + +## License + +Copyright 2016-2023 Lisk Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +[lisk core github]: https://github.com/LiskHQ/lisk +[lisk documentation site]: https://lisk.com/documentation/lisk-sdk/v6/references/typedoc/modules/_liskhq_lisk_framework_chain_connector_plugin.html From 458f98cc401ca60b10edafa12c46b6a9b27ce89e Mon Sep 17 00:00:00 2001 From: sitetester Date: Fri, 13 Oct 2023 13:08:32 +0300 Subject: [PATCH 150/170] Unit test review - Interoperability genesis state initialization/finalization (#8980) * Update code & test cases per latest LIP changes * Use objectUtils.isBufferArrayOrdered * Fix some minor bugs --------- Co-authored-by: !shan --- .../base_interoperability_module.ts | 26 +- .../interoperability/mainchain/module.ts | 100 +- .../interoperability/sidechain/module.ts | 20 +- .../base_interoperability_module.spec.ts | 218 +++-- .../interoperability/mainchain/module.spec.ts | 877 ++++++++++-------- .../interoperability/sidechain/module.spec.ts | 539 ++++++----- 6 files changed, 1046 insertions(+), 734 deletions(-) diff --git a/framework/src/modules/interoperability/base_interoperability_module.ts b/framework/src/modules/interoperability/base_interoperability_module.ts index 01f784c9740..cb29d3b7313 100644 --- a/framework/src/modules/interoperability/base_interoperability_module.ts +++ b/framework/src/modules/interoperability/base_interoperability_module.ts @@ -133,11 +133,8 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule } // activeValidators must be ordered lexicographically by blsKey property - const sortedByBlsKeys = [...activeValidators].sort((a, b) => a.blsKey.compare(b.blsKey)); - for (let i = 0; i < activeValidators.length; i += 1) { - if (!activeValidators[i].blsKey.equals(sortedByBlsKeys[i].blsKey)) { - throw new Error('activeValidators must be ordered lexicographically by blsKey property.'); - } + if (!objectUtils.isBufferArrayOrdered(activeValidators.map(v => v.blsKey))) { + throw new Error('activeValidators must be ordered lexicographically by blsKey property.'); } // all blsKey properties must be pairwise distinct @@ -147,7 +144,7 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule } // for each validator in activeValidators, validator.bftWeight > 0 must hold - if (activeValidators.filter(v => v.bftWeight <= 0).length > 0) { + if (activeValidators.filter(v => v.bftWeight <= BigInt(0)).length > 0) { throw new Error(`validator.bftWeight must be > 0.`); } @@ -200,16 +197,15 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule } // terminatedStateAccounts is ordered lexicographically by stateAccount.chainID - const sortedByChainID = [...terminatedStateAccounts].sort((a, b) => - a.chainID.compare(b.chainID), - ); - - for (let i = 0; i < terminatedStateAccounts.length; i += 1) { - const stateAccountWithChainID = terminatedStateAccounts[i]; - if (!stateAccountWithChainID.chainID.equals(sortedByChainID[i].chainID)) { - throw new Error('terminatedStateAccounts must be ordered lexicographically by chainID.'); - } + if ( + !objectUtils.isBufferArrayOrdered( + terminatedStateAccounts.map(accountWithChainID => accountWithChainID.chainID), + ) + ) { + throw new Error('terminatedStateAccounts must be ordered lexicographically by chainID.'); + } + for (const stateAccountWithChainID of terminatedStateAccounts) { this._verifyChainID(stateAccountWithChainID.chainID, mainchainID, 'stateAccount.'); } } diff --git a/framework/src/modules/interoperability/mainchain/module.ts b/framework/src/modules/interoperability/mainchain/module.ts index 8dbf6e9f613..9c4e9e30558 100644 --- a/framework/src/modules/interoperability/mainchain/module.ts +++ b/framework/src/modules/interoperability/mainchain/module.ts @@ -261,15 +261,7 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule throw new Error(`ownChainName must be equal to ${CHAIN_NAME_MAINCHAIN}.`); } - // if chainInfos is empty, then ownChainNonce == 0 - // If chainInfos is non-empty, ownChainNonce > 0 - if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { - throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); - } else if (chainInfos.length !== 0 && ownChainNonce <= 0) { - throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); - } - - this._verifyChainInfos(ctx, chainInfos); + this._verifyChainInfos(ctx, chainInfos, ownChainNonce); this._verifyTerminatedStateAccounts(chainInfos, terminatedStateAccounts, mainchainID); this._verifyTerminatedOutboxAccounts( chainInfos, @@ -281,7 +273,19 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule } // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#mainchain - private _verifyChainInfos(ctx: GenesisBlockExecuteContext, chainInfos: ChainInfo[]) { + private _verifyChainInfos( + ctx: GenesisBlockExecuteContext, + chainInfos: ChainInfo[], + ownChainNonce: bigint, + ) { + // if chainInfos is empty, then ownChainNonce == 0 + // If chainInfos is non-empty, ownChainNonce > 0 + if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { + throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); + } else if (chainInfos.length !== 0 && ownChainNonce <= 0) { + throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); + } + // Each entry chainInfo in chainInfos has a unique chainInfo.chainID const chainIDs = chainInfos.map(info => info.chainID); if (!objectUtils.bufferArrayUniqueItems(chainIDs)) { @@ -341,9 +345,9 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule mainchainID: Buffer, ) { // Sanity check to fulfill if-and-only-if situation - for (const account of terminatedStateAccounts) { + for (const terminatedStateAccount of terminatedStateAccounts) { const correspondingChainInfo = chainInfos.find(chainInfo => - chainInfo.chainID.equals(account.chainID), + chainInfo.chainID.equals(terminatedStateAccount.chainID), ); if ( !correspondingChainInfo || @@ -359,40 +363,52 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule // For each entry chainInfo in chainInfos, chainInfo.chainData.status == CHAIN_STATUS_TERMINATED // if and only if a corresponding entry (i.e., with chainID == chainInfo.chainID) exists in terminatedStateAccounts. if (chainInfo.chainData.status === ChainStatus.TERMINATED) { - const terminatedAccount = terminatedStateAccounts.find(tAccount => - tAccount.chainID.equals(chainInfo.chainID), + const correspondingTerminatedAccount = terminatedStateAccounts.find( + terminatedStateAccount => terminatedStateAccount.chainID.equals(chainInfo.chainID), ); - if (!terminatedAccount) { + if (!correspondingTerminatedAccount) { throw new Error( 'For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.', ); } + } + } - this._verifyTerminatedStateAccountsCommon(terminatedStateAccounts, mainchainID); - - // For each entry stateAccount in terminatedStateAccounts holds - // stateAccount.stateRoot == chainData.lastCertificate.stateRoot, - // stateAccount.mainchainStateRoot == EMPTY_HASH, and - // stateAccount.initialized == True. - // Here chainData is the corresponding entry (i.e., with chainID == stateAccount.chainID) in chainInfos. - const stateAccount = terminatedAccount.terminatedStateAccount; - if (stateAccount) { - if (!stateAccount.stateRoot.equals(chainInfo.chainData.lastCertificate.stateRoot)) { - throw new Error( - "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", - ); - } - - if (!stateAccount.mainchainStateRoot.equals(EMPTY_HASH)) { - throw new Error( - `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, - ); - } - - if (!stateAccount.initialized) { - throw new Error('stateAccount is not initialized.'); - } - } + this._verifyTerminatedStateAccountsCommon(terminatedStateAccounts, mainchainID); + + /** + * For each entry stateAccount in terminatedStateAccounts holds + * stateAccount.terminatedStateAccount.mainchainStateRoot == EMPTY_HASH, and stateAccount.terminatedStateAccount.initialized == True. + * + * Moreover, let chainInfo be the corresponding entry in chainInfos (i.e., with chainInfo.chainID == stateAccount.chainID); then it holds that + * stateAccount.terminatedStateAccount.stateRoot == chainInfo.chainData.lastCertificate.stateRoot. + */ + for (const terminatedStateAccountWithChainID of terminatedStateAccounts) { + if ( + !terminatedStateAccountWithChainID.terminatedStateAccount.mainchainStateRoot.equals( + EMPTY_HASH, + ) + ) { + throw new Error( + `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, + ); + } + if (!terminatedStateAccountWithChainID.terminatedStateAccount.initialized) { + throw new Error('stateAccount is not initialized.'); + } + + const correspondingChainInfo = chainInfos.find(chainInfo => + chainInfo.chainID.equals(terminatedStateAccountWithChainID.chainID), + ) as ChainInfo; // at this point, it's not undefined, since similar check already applied above + + if ( + !terminatedStateAccountWithChainID.terminatedStateAccount.stateRoot.equals( + correspondingChainInfo.chainData.lastCertificate.stateRoot, + ) + ) { + throw new Error( + "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", + ); } } } @@ -426,9 +442,9 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule terminatedStateAccounts.find(a => a.chainID.equals(outboxAccount.chainID)) === undefined ) { throw new Error( - `Each entry outboxAccount in terminatedOutboxAccounts must have a corresponding entry in terminatedStateAccount. outboxAccount with chainID: ${outboxAccount.chainID.toString( + `outboxAccount with chainID: ${outboxAccount.chainID.toString( 'hex', - )} does not exist in terminatedStateAccounts`, + )} must have a corresponding entry in terminatedStateAccounts.`, ); } } diff --git a/framework/src/modules/interoperability/sidechain/module.ts b/framework/src/modules/interoperability/sidechain/module.ts index eb528d789f8..bd554eccce8 100644 --- a/framework/src/modules/interoperability/sidechain/module.ts +++ b/framework/src/modules/interoperability/sidechain/module.ts @@ -237,8 +237,13 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule ctx: GenesisBlockExecuteContext, genesisInteroperability: GenesisInteroperability, ) { - const { ownChainName, ownChainNonce, chainInfos, terminatedStateAccounts } = - genesisInteroperability; + const { + ownChainName, + ownChainNonce, + chainInfos, + terminatedStateAccounts, + terminatedOutboxAccounts, + } = genesisInteroperability; // If chainInfos is empty, then check that: // @@ -257,6 +262,9 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule if (terminatedStateAccounts.length !== 0) { throw new Error(`terminatedStateAccounts must be empty, ${ifChainInfosIsEmpty}.`); } + if (terminatedOutboxAccounts.length !== 0) { + throw new Error(`terminatedOutboxAccounts must be empty, ${ifChainInfosIsEmpty}.`); + } } else { // ownChainName // has length between MIN_CHAIN_NAME_LENGTH and MAX_CHAIN_NAME_LENGTH, @@ -267,7 +275,7 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule ownChainName.length > MAX_CHAIN_NAME_LENGTH // will only run if not already applied in schema ) { throw new Error( - `ownChainName.length must be between ${MIN_CHAIN_NAME_LENGTH} and ${MAX_CHAIN_NAME_LENGTH}`, + `ownChainName.length must be inclusively between ${MIN_CHAIN_NAME_LENGTH} and ${MAX_CHAIN_NAME_LENGTH}.`, ); } // CAUTION! @@ -290,7 +298,7 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule } // mainchainInfo.chainID == getMainchainID(); const mainchainInfo = chainInfos[0]; - const mainchainID = getMainchainID(mainchainInfo.chainID); + const mainchainID = getMainchainID(ctx.chainID); if (!mainchainInfo.chainID.equals(mainchainID)) { throw new Error(`mainchainInfo.chainID must be equal to ${mainchainID.toString('hex')}.`); } @@ -335,7 +343,7 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule if (terminatedStateAccount.initialized) { if (terminatedStateAccount.stateRoot.equals(EMPTY_HASH)) { throw new Error( - `stateAccount.stateRoot mst be not equal to "${EMPTY_HASH.toString( + `stateAccount.stateRoot must not be equal to "${EMPTY_HASH.toString( 'hex', )}", if initialized is true.`, ); @@ -358,7 +366,7 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule } if (terminatedStateAccount.mainchainStateRoot.equals(EMPTY_HASH)) { throw new Error( - `terminatedStateAccount.mainchainStateRoot must be not equal to "${EMPTY_HASH.toString( + `terminatedStateAccount.mainchainStateRoot must not be equal to "${EMPTY_HASH.toString( 'hex', )}", if initialized is false.`, ); diff --git a/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts b/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts index 0a92b2ed446..3db753832a2 100644 --- a/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts +++ b/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts @@ -12,12 +12,14 @@ import { lastCertificate, terminatedOutboxAccount, terminatedStateAccount, + mainchainID, } from './interopFixtures'; import { ActiveValidator, ChainStatus, EMPTY_BYTES, MainchainInteroperabilityModule, + getMainchainID, } from '../../../../src'; import { MAX_NUM_VALIDATORS, @@ -43,6 +45,10 @@ import { TerminatedOutboxCreatedEvent } from '../../../../src/modules/interopera import { TerminatedStateCreatedEvent } from '../../../../src/modules/interoperability/events/terminated_state_created'; import { InvalidRMTVerification } from '../../../../src/modules/interoperability/events/invalid_rmt_verification'; import { InvalidSMTVerification } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; +import { + ChainInfo, + TerminatedStateAccountWithChainID, +} from '../../../../src/modules/interoperability/types'; describe('initGenesisState Common Tests', () => { const chainID = Buffer.from([0, 0, 0, 0]); @@ -344,76 +350,74 @@ must NOT have more than ${MAX_NUM_VALIDATORS} items`, ); }); - describe('activeValidators.certificateThreshold', () => { - it(`should throw error if 'totalWeight / BigInt(3) + BigInt(1) > certificateThreshold'`, async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainValidators: { - activeValidators: [ - { - blsKey: Buffer.from( - '901550cf1fde7dde29218ee82c5196754efea99813af079bb2809a7fad8a053f93726d1e61ccf427118dcc27b0c07d9a', - 'hex', - ), - bftWeight: BigInt(100), - }, - { - // utils.getRandomBytes(BLS_PUBLIC_KEY_LENGTH).toString('hex') - blsKey: Buffer.from( - 'c1d3c7919a4ea7e3b5d5b0068513c2cd7fe047a632e13d9238a51fcd6a4afd7ee16906978992a702bccf1f0149fa5d39', - 'hex', - ), - bftWeight: BigInt(200), - }, - ], - // totalWeight / BigInt(3) + BigInt(1) = (100 + 200)/3 + 1 = 101 - // totalWeight / BigInt(3) + BigInt(1) > certificateThreshold - certificateThreshold: BigInt(10), // 101 > 10 - }, + it(`should throw error if 'totalWeight / BigInt(3) + BigInt(1) > certificateThreshold'`, async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainValidators: { + activeValidators: [ + { + blsKey: Buffer.from( + '901550cf1fde7dde29218ee82c5196754efea99813af079bb2809a7fad8a053f93726d1e61ccf427118dcc27b0c07d9a', + 'hex', + ), + bftWeight: BigInt(100), + }, + { + // utils.getRandomBytes(BLS_PUBLIC_KEY_LENGTH).toString('hex') + blsKey: Buffer.from( + 'c1d3c7919a4ea7e3b5d5b0068513c2cd7fe047a632e13d9238a51fcd6a4afd7ee16906978992a702bccf1f0149fa5d39', + 'hex', + ), + bftWeight: BigInt(200), + }, + ], + // totalWeight / BigInt(3) + BigInt(1) = (100 + 200)/3 + 1 = 101 + // totalWeight / BigInt(3) + BigInt(1) > certificateThreshold + certificateThreshold: BigInt(10), // 101 > 10 }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `Invalid certificateThreshold input.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `Invalid certificateThreshold input.`, + ); + }); - it(`should throw error if certificateThreshold > totalWeight`, async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainValidators: { - activeValidators: [ - { - blsKey: Buffer.from( - '901550cf1fde7dde29218ee82c5196754efea99813af079bb2809a7fad8a053f93726d1e61ccf427118dcc27b0c07d9a', - 'hex', - ), - bftWeight: BigInt(10), - }, - ], - certificateThreshold: BigInt(20), - }, + it(`should throw error if certificateThreshold > totalWeight`, async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainValidators: { + activeValidators: [ + { + blsKey: Buffer.from( + '901550cf1fde7dde29218ee82c5196754efea99813af079bb2809a7fad8a053f93726d1e61ccf427118dcc27b0c07d9a', + 'hex', + ), + bftWeight: BigInt(10), + }, + ], + certificateThreshold: BigInt(20), }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `Invalid certificateThreshold input.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `Invalid certificateThreshold input.`, + ); }); it(`should throw error if invalid validatorsHash provided`, async () => { @@ -445,6 +449,50 @@ must NOT have more than ${MAX_NUM_VALIDATORS} items`, }); }); + describe('_verifyChainID', () => { + it('should throw error if chainInfo.chainID equals getMainchainID()', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainID: mainchainID, + }, + ], + }, + params, + ); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `chainInfo.chainID must not be equal to ${mainchainID.toString('hex')}.`, + ); + }); + + it('should throw error if chainInfo.chainID[0] !== getMainchainID()[0]', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainID: Buffer.from([1, 0, 0, 0]), + }, + ], + }, + params, + ); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `chainInfo.chainID[0] must be equal to ${mainchainID[0]}.`, + ); + }); + + it('should not throw error when chainID !== mainchainID & chainInfo.chainId[0] == getMainchainID()[0]', async () => { + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + }); + }); + describe('_verifyTerminatedStateAccountsCommon', () => { certificateThreshold = BigInt(10); const validChainInfos = [ @@ -533,6 +581,46 @@ must NOT have more than ${MAX_NUM_VALIDATORS} items`, 'terminatedStateAccounts must be ordered lexicographically by chainID.', ); }); + + it('should call _verifyChainID the same number of times as size of terminatedStateAccounts', () => { + const interopModPrototype = Object.getPrototypeOf(interopMod); + jest.spyOn(interopModPrototype, '_verifyChainID' as any); + + const chainInfoLocal: ChainInfo = { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }; + + const terminatedStateAccounts: TerminatedStateAccountWithChainID[] = [ + { + chainID: chainInfoLocal.chainID, + terminatedStateAccount, + }, + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedStateAccount, + }, + ]; + + interopModPrototype._verifyTerminatedStateAccountsCommon( + terminatedStateAccounts, + getMainchainID(params.chainID as Buffer), + ); + expect(interopModPrototype['_verifyChainID']).toHaveBeenCalledTimes( + terminatedStateAccounts.length, + ); + }); }); describe('processGenesisState', () => { diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index b62c2eaef71..e5e5533ea65 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -13,14 +13,23 @@ */ import { utils } from '@liskhq/lisk-cryptography'; +import { codec } from '@liskhq/lisk-codec'; import { PrefixedStateReadWriter } from '../../../../../src/state_machine/prefixed_state_read_writer'; import { HASH_LENGTH, CHAIN_NAME_MAINCHAIN, EMPTY_HASH, + MODULE_NAME_INTEROPERABILITY, } from '../../../../../src/modules/interoperability/constants'; -import { ChainStatus, MainchainInteroperabilityModule } from '../../../../../src'; -import { ChainInfo } from '../../../../../src/modules/interoperability/types'; +import { + ChainStatus, + MainchainInteroperabilityModule, + genesisInteroperabilitySchema, +} from '../../../../../src'; +import { + ChainInfo, + GenesisInteroperability, +} from '../../../../../src/modules/interoperability/types'; import { InMemoryPrefixedStateDB, createGenesisBlockContext, @@ -39,13 +48,13 @@ import { lastCertificate, terminatedStateAccount, terminatedOutboxAccount, - mainchainID, createInitGenesisStateContext, contextWithValidValidatorsHash, getStoreMock, } from '../interopFixtures'; import { RegisteredNamesStore } from '../../../../../src/modules/interoperability/stores/registered_names'; import { InvalidNameError } from '../../../../../src/modules/interoperability/errors'; +import { BaseInteroperabilityModule } from '../../../../../src/modules/interoperability/base_interoperability_module'; describe('initGenesisState', () => { const chainID = Buffer.from([0, 0, 0, 0]); @@ -108,8 +117,21 @@ describe('initGenesisState', () => { ); }); - describe('when chainInfos is empty', () => { - it('should throw error if ownChainNonce !== 0', async () => { + it(`should call _verifyChainInfos from initGenesisState`, async () => { + jest.spyOn(interopMod, '_verifyChainInfos' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + expect(interopMod['_verifyChainInfos']).toHaveBeenCalledTimes(1); + }); + + describe('_verifyChainInfos', () => { + beforeEach(() => { + certificateThreshold = BigInt(10); + }); + + it('should throw error when chainInfos is empty & ownChainNonce !== 0', async () => { const context = createInitGenesisStateContext( { ...genesisInteroperability, @@ -122,14 +144,8 @@ describe('initGenesisState', () => { 'ownChainNonce must be 0 if chainInfos is empty.', ); }); - }); - describe('when chainInfos is not empty', () => { - beforeEach(() => { - certificateThreshold = BigInt(10); - }); - - it('should throw error if ownChainNonce <= 0', async () => { + it('should throw error when chainInfos is not empty & ownChainNonce <= 0', async () => { const context = createInitGenesisStateContext( { ...genesisInteroperability, @@ -177,45 +193,51 @@ describe('initGenesisState', () => { ); }); - describe('chainInfo.chainID', () => { - it('should throw error if chainInfo.chainID equals getMainchainID()', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainID: mainchainID, - }, - ], - }, - params, - ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `chainID must not be equal to ${mainchainID.toString('hex')}.`, - ); - }); + it("should throw error if not 'the entries chainData.name must be pairwise distinct' ", async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainID: Buffer.from([0, 0, 0, 1]), + }, + { + ...chainInfo, + chainID: Buffer.from([0, 0, 0, 2]), + }, + ], + }, + params, + ); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'chainData.name must be pairwise distinct.', + ); + }); - it('should throw error if chainInfo.chainID[0] !== getMainchainID()[0]', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainID: Buffer.from([1, 0, 0, 0]), - }, - ], - }, - params, - ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `chainID[0] must be equal to ${mainchainID[0]}.`, - ); - }); + it('should check that _verifyChainID is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChainID' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + + // must be true to pass this test + expect(interopMod['_verifyChainID']).toHaveBeenCalled(); + }); + + it('should check that _verifyChainData is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChainData' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + + // must be true to pass this test + expect(interopMod['_verifyChainData']).toHaveBeenCalled(); }); - describe('chainInfo.chainData', () => { + describe('_verifyChainData', () => { it(`should throw error if not 'chainData.lastCertificate.timestamp < g.header.timestamp'`, async () => { const context = createInitGenesisStateContext( { @@ -302,375 +324,498 @@ describe('initGenesisState', () => { }); }); - describe('terminatedStateAccounts', () => { - it('should not throw error if length of terminatedStateAccounts is zero', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [], - }, - params, - ); + it('should check that _verifyChannelData is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChannelData' as any); - await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); - }); + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); - it('should throw if chainInfo.chainData.status===TERMINATED exists but no terminateStateAccount', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - // No terminatedStateAccount - terminatedStateAccounts: [], - }, - params, - ); + // must be true to pass this test + expect(interopMod['_verifyChannelData']).toHaveBeenCalled(); + }); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.`, - ); - }); + it('should check that _verifyChainValidators is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChainValidators' as any); - it('should throw if there is an entry in terminateStateAccounts for a chainID that is ACTIVE in chainInfos', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.ACTIVE, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + + // must be true to pass this test + expect(interopMod['_verifyChainValidators']).toHaveBeenCalled(); + }); + }); + + it(`should call _verifyTerminatedStateAccounts from initGenesisState`, async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); + + await interopMod.initGenesisState(contextWithValidValidatorsHash); + expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); + }); + + describe('_verifyTerminatedStateAccounts', () => { + it("should not throw error if length of terminatedStateAccounts is zero while there doesn't exist some chain in chainData with status TERMINATED", async () => { + certificateThreshold = BigInt(10); + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.ACTIVE, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); + }, + ], + terminatedStateAccounts: [], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, - ); - }); + await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); + }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status !== CHAIN_STATUS_TERMINATED', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, + it('should throw if there is an entry in terminateStateAccounts for a chainID that is ACTIVE in chainInfos', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.ACTIVE, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', + ); + }); - it('should throw error if chainID in terminatedStateAccounts does not exist in chainInfo', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - status: ChainStatus.TERMINATED, - }, - chainValidators: { - activeValidators, - certificateThreshold, + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status !== CHAIN_STATUS_TERMINATED', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, // status: ChainStatus.REGISTERED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedStateAccount, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', + ); + }); - it('should throw error if some stateAccount in terminatedStateAccounts have stateRoot not equal to chainData.lastCertificate.stateRoot', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), + it('should throw error if chainID in terminatedStateAccounts does not exist in chainInfo', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, + status: ChainStatus.TERMINATED, }, - ], - }, - params, - ); + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', + ); + }); - it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - mainchainStateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), + it('should throw if chainInfo.chainData.status === TERMINATED but no corresponding terminateStateAccount', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - }, - params, - ); + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + // No terminatedStateAccount + terminatedStateAccounts: [], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.`, + ); + }); - it('should throw error if some stateAccount in terminatedStateAccounts is not initialized', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - initialized: false, + it('should call _verifyTerminatedStateAccountsCommon', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccountsCommon' as any); + + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - }, - params, - ); + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'stateAccount is not initialized.', - ); - }); + await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); + expect(interopMod['_verifyTerminatedStateAccountsCommon']).toHaveBeenCalledTimes(1); }); - describe('terminatedOutboxAccounts', () => { - it('should throw error if terminatedOutboxAccounts do not hold unique chainID', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedOutboxAccounts: [ - { - chainID: chainInfo.chainID, - terminatedOutboxAccount, - }, - { - chainID: chainInfo.chainID, - terminatedOutboxAccount, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, + it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + mainchainStateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), // *** }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'terminatedOutboxAccounts do not hold unique chainID', - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, + ); + }); - it('should throw error if terminatedOutboxAccounts is not ordered lexicographically by chainID', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...validChainInfos[0], - chainData: { - ...validChainInfos[0].chainData, - name: 'dummy1', - }, - chainID: Buffer.from([0, 0, 0, 1]), - }, - { - ...validChainInfos[0], - chainData: { - ...validChainInfos[0].chainData, - name: 'dummy2', - }, - chainID: Buffer.from([0, 0, 0, 2]), - }, - ], - terminatedOutboxAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedOutboxAccount, - }, - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedOutboxAccount, - }, - ], - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount, + it('should throw error if some stateAccount in terminatedStateAccounts is not initialized', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + initialized: false, // *** }, - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'stateAccount is not initialized.', + ); + }); + + it('should throw error if some stateAccount in terminatedStateAccounts have stateRoot not equal to chainData.lastCertificate.stateRoot', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), // *** }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'terminatedOutboxAccounts must be ordered lexicographically by chainID.', - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", + ); + }); + }); - it("should throw error if terminatedOutboxAccounts don't have a corresponding entry (with chainID == outboxAccount.chainID) in terminatedStateAccounts", async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount, + it(`should call _verifyTerminatedOutboxAccounts from initGenesisState `, async () => { + jest.spyOn(interopMod, '_verifyTerminatedOutboxAccounts' as any); + + await interopMod.initGenesisState(contextWithValidValidatorsHash); + expect(interopMod['_verifyTerminatedOutboxAccounts']).toHaveBeenCalledTimes(1); + }); + + describe('_verifyTerminatedOutboxAccounts', () => { + it('should throw error if terminatedOutboxAccounts do not hold unique chainID', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedOutboxAccounts: [ + { + chainID: chainInfo.chainID, + terminatedOutboxAccount, + }, + { + chainID: chainInfo.chainID, + terminatedOutboxAccount, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'terminatedOutboxAccounts do not hold unique chainID', + ); + }); + + it('should throw error if terminatedOutboxAccounts is not ordered lexicographically by chainID', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...validChainInfos[0], + chainData: { + ...validChainInfos[0].chainData, + name: 'dummy1', }, - ], - terminatedOutboxAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedOutboxAccount, + chainID: Buffer.from([0, 0, 0, 1]), + }, + { + ...validChainInfos[0], + chainData: { + ...validChainInfos[0].chainData, + name: 'dummy2', }, - ], - }, - params, - ); + chainID: Buffer.from([0, 0, 0, 2]), + }, + ], + terminatedOutboxAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedOutboxAccount, + }, + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedOutboxAccount, + }, + ], + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount, + }, + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `Each entry outboxAccount in terminatedOutboxAccounts must have a corresponding entry in terminatedStateAccount. outboxAccount with chainID: ${Buffer.from( - [0, 0, 0, 2], - ).toString('hex')} does not exist in terminatedStateAccounts`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'terminatedOutboxAccounts must be ordered lexicographically by chainID.', + ); + }); + + it("should throw error if terminatedOutboxAccounts don't have a corresponding entry (with chainID == outboxAccount.chainID) in terminatedStateAccounts", async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount, + }, + ], + terminatedOutboxAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedOutboxAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `outboxAccount with chainID: ${Buffer.from([0, 0, 0, 2]).toString( + 'hex', + )} must have a corresponding entry in terminatedStateAccounts.`, + ); }); }); + it(`should call processGenesisState from initGenesisState`, async () => { + jest.spyOn(interopMod, 'processGenesisState' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.not.toThrow(); + + expect(interopMod['processGenesisState']).toHaveBeenCalledTimes(1); + }); + describe('processGenesisState', () => { - it('should check that processGenesisState method has been called', async () => { - jest.spyOn(interopMod, 'processGenesisState'); + let registeredNamesStore: RegisteredNamesStore; + + beforeEach(() => { + registeredNamesStore = interopMod.stores.get(RegisteredNamesStore); + }); + + it('should check that super.processGenesisState has been called', async () => { + const spyInstance = jest.spyOn(BaseInteroperabilityModule.prototype, 'processGenesisState'); + await interopMod.initGenesisState(contextWithValidValidatorsHash); + expect(spyInstance).toHaveBeenCalledTimes(1); + }); + + it('should check that all entries are created in registered names substore', async () => { + jest.spyOn(registeredNamesStore, 'set'); await expect( interopMod.initGenesisState(contextWithValidValidatorsHash), ).resolves.not.toThrow(); - expect(interopMod.processGenesisState).toHaveBeenCalled(); - expect(registeredNamesStoreMock.set).toHaveBeenCalledTimes(2); + // let's go with dynamic fixtures, so that if chainInfos length will change inside contextWithValidValidatorsHash, + // we wouldn't have to refactor this part of tests + const genesisInteroperabilityLocal = codec.decode( + genesisInteroperabilitySchema, + contextWithValidValidatorsHash.assets.getAsset(MODULE_NAME_INTEROPERABILITY) as Buffer, // not undefined at this point + ); + + expect(registeredNamesStore.set).toHaveBeenCalledTimes( + 1 + genesisInteroperabilityLocal.chainInfos.length, + ); + + for (const chainInfoLocal of genesisInteroperabilityLocal.chainInfos) { + expect(registeredNamesStore.set).toHaveBeenCalledWith( + contextWithValidValidatorsHash, + Buffer.from(chainInfoLocal.chainData.name, 'ascii'), + { + chainID: chainInfo.chainID, + }, + ); + } + + expect(registeredNamesStore.set).toHaveBeenCalledWith( + contextWithValidValidatorsHash, + Buffer.from(CHAIN_NAME_MAINCHAIN, 'ascii'), + { + chainID: contextWithValidValidatorsHash.chainID, + }, + ); }); }); }); diff --git a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts index 7d82ac2029a..2c265156395 100644 --- a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts @@ -44,6 +44,7 @@ import { MIN_CHAIN_NAME_LENGTH, } from '../../../../../src/modules/interoperability/constants'; import { InvalidNameError } from '../../../../../src/modules/interoperability/errors'; +import { BaseInteroperabilityModule } from '../../../../../src/modules/interoperability/base_interoperability_module'; describe('initGenesisState', () => { const chainID = Buffer.from([1, 2, 3, 4]); @@ -51,6 +52,52 @@ describe('initGenesisState', () => { let stateStore: PrefixedStateReadWriter; let interopMod: SidechainInteroperabilityModule; + const activeValidators = [ + { + ...activeValidator, + bftWeight: BigInt(300), + }, + ]; + + const defaultData = { + ...genesisInteroperability, + ownChainName: 'dummy', + chainInfos: [ + { + ...chainInfo, + chainID: getMainchainID(chainID), + chainData: { + ...chainData, + name: CHAIN_NAME_MAINCHAIN, + }, + }, + ], + }; + + const certificateThreshold = BigInt(150); + const chainInfosDefault = [ + { + ...defaultData.chainInfos[0], + chainData: { + ...defaultData.chainInfos[0].chainData, + lastCertificate: { + ...lastCertificate, + timestamp: Date.now() / 10000, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + channelData: { + ...defaultData.chainInfos[0].channelData, + messageFeeTokenID: getTokenIDLSK(chainID), + }, + chainValidators: { + ...chainValidators, + activeValidators, + certificateThreshold, + }, + }, + ]; + beforeEach(() => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); interopMod = new SidechainInteroperabilityModule(); @@ -60,6 +107,26 @@ describe('initGenesisState', () => { }; }); + it('should check that _verifyChainInfos is called from initGenesisState', async () => { + jest.spyOn(interopMod, '_verifyChainInfos' as any); + + const genesisInteropWithEmptyChainInfos = { + ...genesisInteroperability, + chainInfos: [], + }; + + const context = createInitGenesisStateContext( + { + ...genesisInteropWithEmptyChainInfos, + ownChainName: 'xyz', + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow(); + expect(interopMod['_verifyChainInfos']).toHaveBeenCalledTimes(1); + }); + describe('_verifyChainInfos', () => { describe('when chainInfos is empty', () => { const genesisInteropWithEmptyChainInfos = { @@ -97,7 +164,7 @@ describe('initGenesisState', () => { ); }); - it('should throw error terminatedStateAccounts is not empty', async () => { + it('should throw error when terminatedStateAccounts is not empty', async () => { const context = createInitGenesisStateContext( { ...genesisInteropWithEmptyChainInfos, @@ -117,69 +184,31 @@ describe('initGenesisState', () => { `terminatedStateAccounts must be empty, ${ifChainInfosIsEmpty}.`, ); }); - }); - describe('when chainInfos is not empty', () => { - const defaultData = { - ...genesisInteroperability, - ownChainName: 'dummy', - chainInfos: [ + + it('should throw error when terminatedOutboxAccounts is not empty', async () => { + const context = createInitGenesisStateContext( { - ...chainInfo, - chainID: getMainchainID(chainID), - chainData: { - ...chainData, - name: CHAIN_NAME_MAINCHAIN, - }, + ...genesisInteropWithEmptyChainInfos, + ownChainName: '', + ownChainNonce: BigInt(0), + terminatedOutboxAccounts: [ + { + chainID, + terminatedOutboxAccount, + }, + ], }, - ], - }; - - const activeValidators = [ - { - ...activeValidator, - bftWeight: BigInt(300), - }, - ]; + params, + ); - const certificateThreshold = BigInt(150); - const chainInfosDefault = [ - { - ...defaultData.chainInfos[0], - chainData: { - ...defaultData.chainInfos[0].chainData, - lastCertificate: { - ...lastCertificate, - timestamp: Date.now() / 10000, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - channelData: { - ...defaultData.chainInfos[0].channelData, - messageFeeTokenID: getTokenIDLSK(chainID), - }, - chainValidators: { - ...chainValidators, - activeValidators, - certificateThreshold, - }, - }, - ]; + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `terminatedOutboxAccounts must be empty, ${ifChainInfosIsEmpty}.`, + ); + }); + }); + describe('when chainInfos is not empty', () => { describe('ownChainName', () => { - it(`should throw error if doesn't contain chars from ${validNameChars}`, async () => { - const context = createInitGenesisStateContext( - { - ...defaultData, - ownChainName: 'a%b', - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - new InvalidNameError('ownChainName').message, - ); - }); - it(`should throw error if doesn't have length between ${MIN_CHAIN_NAME_LENGTH} and ${MAX_CHAIN_NAME_LENGTH}`, async () => { const context1 = createInitGenesisStateContext( { @@ -189,7 +218,7 @@ describe('initGenesisState', () => { params, ); await expect(interopMod.initGenesisState(context1)).rejects.toThrow( - `ownChainName.length must be between ${MIN_CHAIN_NAME_LENGTH} and ${MAX_CHAIN_NAME_LENGTH}`, + `ownChainName.length must be inclusively between ${MIN_CHAIN_NAME_LENGTH} and ${MAX_CHAIN_NAME_LENGTH}.`, ); const context2 = createInitGenesisStateContext( @@ -206,6 +235,20 @@ describe('initGenesisState', () => { ); }); + it(`should throw error if doesn't contain chars from ${validNameChars}`, async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + ownChainName: 'a%b', + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + new InvalidNameError('ownChainName').message, + ); + }); + it(`should throw error if === ${CHAIN_NAME_MAINCHAIN}`, async () => { const context = createInitGenesisStateContext( { @@ -370,213 +413,192 @@ describe('initGenesisState', () => { }); }); - describe('_verifyTerminatedStateAccounts', () => { - const chainIDNotEqualToOwnChainID = Buffer.from([1, 3, 5, 7]); + it('should call _verifyChannelData & _verifyChainValidators', async () => { + jest.spyOn(interopMod, '_verifyChannelData' as any); + jest.spyOn(interopMod, '_verifyChainValidators' as any); - it(`should throw error if stateAccount.chainID is equal to getMainchainID()`, async () => { - const chainIDDefault = getMainchainID(chainID); - const context = createInitGenesisStateContext( - { - ...defaultData, - chainInfos: [ - { - ...defaultData.chainInfos[0], - chainData: { - ...defaultData.chainInfos[0].chainData, - lastCertificate: { - ...lastCertificate, - timestamp: Date.now() / 10000, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - channelData: { - ...defaultData.chainInfos[0].channelData, - messageFeeTokenID: getTokenIDLSK(chainID), - }, - chainValidators: { - ...chainValidators, - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainIDDefault, - terminatedStateAccount, - }, - ], - }, - { - ...params, - header: { - timestamp: Date.now(), - } as any, - }, - ); + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + }, + { + ...params, + header: { + timestamp: chainInfosDefault[0].chainData.lastCertificate.timestamp + 1000, + } as any, + }, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.chainID must not be equal to ${chainIDDefault.toString('hex')}.`, - ); - }); + await interopMod.initGenesisState(context); + expect(interopMod['_verifyChannelData']).toHaveBeenCalledTimes(1); + expect(interopMod['_verifyChainValidators']).toHaveBeenCalledTimes(1); + }); + }); + }); - it(`should throw error if not stateAccount.chainId[0] == getMainchainID()[0]`, async () => { - const mainchainID = getMainchainID(params.chainID as Buffer); - const context = createInitGenesisStateContext( - { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 1, 2, 3]), - terminatedStateAccount, - }, - ], - }, - params, - ); + it('should check that _verifyTerminatedStateAccounts is called from initGenesisState', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.chainID[0] must be equal to ${mainchainID[0]}.`, - ); - }); + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ + { + chainID: Buffer.from([1, 1, 2, 3]), + terminatedStateAccount, + }, + ], + }, + params, + ); - it(`should throw error if stateAccount.chainID is equal to OWN_CHAIN_ID`, async () => { - const context = createInitGenesisStateContext( + await interopMod.initGenesisState(context); + expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); + }); + + describe('_verifyTerminatedStateAccounts', () => { + const chainIDNotEqualToOwnChainID = Buffer.from([1, 3, 5, 7]); + + it('should call _verifyTerminatedStateAccountsCommon', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccountsCommon' as any); + + // const chainIDDefault = getMainchainID(chainID); + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: params.chainID as Buffer, - terminatedStateAccount, - }, - ], + chainID: Buffer.from([1, 1, 2, 3]), + terminatedStateAccount, }, - params, - ); + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.chainID must not be equal to OWN_CHAIN_ID.`, - ); - }); + await interopMod.initGenesisState(context); + expect(interopMod['_verifyTerminatedStateAccountsCommon']).toHaveBeenCalledTimes(1); + }); - it(`should throw error if stateAccount.stateRoot equals EMPTY_HASH, if initialised is true`, async () => { - const context = createInitGenesisStateContext( + it(`should throw error if stateAccount.chainID is equal to OWN_CHAIN_ID`, async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: chainIDNotEqualToOwnChainID, - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: EMPTY_HASH, - initialized: true, - }, - }, - ], + chainID: params.chainID as Buffer, + terminatedStateAccount, }, - params, - ); + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.stateRoot mst be not equal to "${EMPTY_HASH.toString( - 'hex', - )}", if initialized is true.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `stateAccount.chainID must not be equal to OWN_CHAIN_ID.`, + ); + }); - it(`should throw error if stateAccount.mainchainStateRoot is not equal to EMPTY_HASH, if initialised is true`, async () => { - const context = createInitGenesisStateContext( - { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: chainIDNotEqualToOwnChainID, - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: utils.getRandomBytes(HASH_LENGTH), - mainchainStateRoot: utils.getRandomBytes(HASH_LENGTH), - initialized: true, - }, + describe('when initialised is true', () => { + it(`should throw error if stateAccount.stateRoot equals EMPTY_HASH`, async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ + { + chainID: chainIDNotEqualToOwnChainID, + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: EMPTY_HASH, + initialized: true, }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `terminatedStateAccount.mainchainStateRoot must be equal to "${EMPTY_HASH.toString( - 'hex', - )}", if initialized is true`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `stateAccount.stateRoot must not be equal to "${EMPTY_HASH.toString( + 'hex', + )}", if initialized is true.`, + ); + }); - it(`should throw error if stateAccount.stateRoot is not equal to EMPTY_HASH, if initialised is false`, async () => { - const context = createInitGenesisStateContext( - { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: chainIDNotEqualToOwnChainID, - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: utils.getRandomBytes(HASH_LENGTH), - initialized: false, - }, + it(`should throw error if stateAccount.mainchainStateRoot is not equal to EMPTY_HASH`, async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ + { + chainID: chainIDNotEqualToOwnChainID, + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: utils.getRandomBytes(HASH_LENGTH), + mainchainStateRoot: utils.getRandomBytes(HASH_LENGTH), + initialized: true, }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.stateRoot mst be equal to "${EMPTY_HASH.toString( - 'hex', - )}", if initialized is false.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `terminatedStateAccount.mainchainStateRoot must be equal to "${EMPTY_HASH.toString( + 'hex', + )}", if initialized is true`, + ); + }); + }); - it(`should throw error if stateAccount.mainchainStateRoot is equal to EMPTY_HASH, if initialised is false`, async () => { - const context = createInitGenesisStateContext( - { - ...defaultData, - chainInfos: chainInfosDefault, - terminatedStateAccounts: [ - { - chainID: chainIDNotEqualToOwnChainID, - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: EMPTY_HASH, - mainchainStateRoot: EMPTY_HASH, - initialized: false, - }, + describe('when initialised is false', () => { + it(`should throw error if stateAccount.stateRoot is not equal to EMPTY_HASH`, async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ + { + chainID: chainIDNotEqualToOwnChainID, + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: utils.getRandomBytes(HASH_LENGTH), + initialized: false, }, - ], - }, - params, - ); + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `terminatedStateAccount.mainchainStateRoot must be not equal to "${EMPTY_HASH.toString( - 'hex', - )}", if initialized is false.`, - ); - }); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `stateAccount.stateRoot mst be equal to "${EMPTY_HASH.toString( + 'hex', + )}", if initialized is false.`, + ); }); - it('should throw error if terminatedOutboxAccounts is not empty', async () => { + it(`should throw error if stateAccount.mainchainStateRoot is equal to EMPTY_HASH`, async () => { const context = createInitGenesisStateContext( { ...defaultData, chainInfos: chainInfosDefault, - terminatedOutboxAccounts: [ + terminatedStateAccounts: [ { - chainID, - terminatedOutboxAccount, + chainID: chainIDNotEqualToOwnChainID, + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: EMPTY_HASH, + mainchainStateRoot: EMPTY_HASH, + initialized: false, + }, }, ], }, @@ -584,9 +606,46 @@ describe('initGenesisState', () => { ); await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `terminatedOutboxAccounts must be empty.`, + `terminatedStateAccount.mainchainStateRoot must not be equal to "${EMPTY_HASH.toString( + 'hex', + )}", if initialized is false.`, ); }); }); }); + + it('should throw error if terminatedOutboxAccounts is not empty', async () => { + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedOutboxAccounts: [ + { + chainID, + terminatedOutboxAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `terminatedOutboxAccounts must be empty.`, + ); + }); + + it('should check that super.processGenesisState has been called from initGenesisState', async () => { + const spyInstance = jest.spyOn(BaseInteroperabilityModule.prototype, 'processGenesisState'); + + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedOutboxAccounts: [], + }, + params, + ); + await interopMod.initGenesisState(context); + expect(spyInstance).toHaveBeenCalledTimes(1); + }); }); From f79aa3003791177e56bb0329fdc31fb900a34b16 Mon Sep 17 00:00:00 2001 From: Martin Macharia Date: Fri, 13 Oct 2023 18:30:53 +0200 Subject: [PATCH 151/170] Update the NFT module endpoints (#9036) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update the NFT module endpoints * Update the endpoint logic * Clean up the tests * Update the id to nftID * Update the getSupportedCollectionIDs return type * Update the endpoints * initialize the `endpoint` object of the `NFTModule` class with the `ownChainID` param * Update returned object Co-authored-by: Miroslav Jerković * Update returned object Co-authored-by: Miroslav Jerković * Update the getSupportedCollectionIDs method and tests * Refactor the isNFTSupported method * Update checks * Refactor the `getSupportedCollectionIDs` endpoint * Refactor the `getSupportedCollectionIDs` endpoint * Cleanup the endpoint * Update unit tests * Refactor the `getSupportedCollectionIDs` endpoint * Update unit tests * Add a unit test to test for unique collection ids * Update unit tests * update the nativeCollectionIDs * Refactor the getSupportedCollectionIDs endpoint * Refactor the tests * Update test desc Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> * Update test desc Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --------- Co-authored-by: Miroslav Jerković Co-authored-by: Incede <33103370+Incede@users.noreply.github.com> --- framework/src/modules/nft/endpoint.ts | 73 +++---- framework/src/modules/nft/module.ts | 18 +- framework/src/modules/nft/schemas.ts | 34 +--- .../test/unit/modules/nft/endpoint.spec.ts | 181 +++++++++++------- 4 files changed, 163 insertions(+), 143 deletions(-) diff --git a/framework/src/modules/nft/endpoint.ts b/framework/src/modules/nft/endpoint.ts index 6a4c18b0939..01cea421482 100644 --- a/framework/src/modules/nft/endpoint.ts +++ b/framework/src/modules/nft/endpoint.ts @@ -17,8 +17,7 @@ import { validator } from '@liskhq/lisk-validator'; import { BaseEndpoint } from '../base_endpoint'; import { JSONObject, ModuleEndpointContext } from '../../types'; import { - collectionExistsRequestSchema, - getCollectionIDsRequestSchema, + isCollectionIDSupportedRequestSchema, getEscrowedNFTIDsRequestSchema, getNFTRequestSchema, getNFTsRequestSchema, @@ -143,56 +142,58 @@ export class NFTEndpoint extends BaseEndpoint { }; } - public async getCollectionIDs( + public async getSupportedCollectionIDs( context: ModuleEndpointContext, - ): Promise<{ collectionIDs: string[] }> { - const { params } = context; - - validator.validate<{ chainID: string }>(getCollectionIDsRequestSchema, params); - - const chainID = Buffer.from(params.chainID, 'hex'); - + ): Promise<{ supportedCollectionIDs: string[] }> { const supportedNFTsStore = this.stores.get(SupportedNFTsStore); + if (await supportedNFTsStore.has(context, ALL_SUPPORTED_NFTS_KEY)) { + return { supportedCollectionIDs: ['*'] }; + } - const chainExists = await supportedNFTsStore.has(context.getImmutableMethodContext(), chainID); + const supportedCollectionIDs: string[] = []; - if (!chainExists) { - return { collectionIDs: [] }; - } + supportedCollectionIDs.push(`${context.chainID.toString('hex')}********`); - const supportedNFTsData = await supportedNFTsStore.get( - context.getImmutableMethodContext(), - chainID, - ); + const supportedNFTsStoreData = await supportedNFTsStore.getAll(context); + for (const { key, value } of supportedNFTsStoreData) { + if (!value.supportedCollectionIDArray.length) { + supportedCollectionIDs.push(`${key.toString('hex')}********`); + } else { + const collectionIDs = value.supportedCollectionIDArray.map( + supportedCollectionID => + key.toString('hex') + supportedCollectionID.collectionID.toString('hex'), + ); + supportedCollectionIDs.push(...collectionIDs); + } + } - return { - collectionIDs: supportedNFTsData.supportedCollectionIDArray.map(collection => - collection.collectionID.toString('hex'), - ), - }; + return { supportedCollectionIDs }; } - public async collectionExists( + public async isCollectionIDSupported( context: ModuleEndpointContext, - ): Promise<{ collectionExists: boolean }> { + ): Promise<{ isCollectionIDSupported: boolean }> { const { params } = context; validator.validate<{ chainID: string; collectionID: string }>( - collectionExistsRequestSchema, + isCollectionIDSupportedRequestSchema, params, ); const chainID = Buffer.from(params.chainID, 'hex'); + const collectionID = Buffer.from(params.collectionID, 'hex'); + const nftID = Buffer.concat([chainID, collectionID, Buffer.alloc(8)]); - const supportedNFTsStore = this.stores.get(SupportedNFTsStore); - - const chainExists = await supportedNFTsStore.has(context.getImmutableMethodContext(), chainID); + const isNFTSupported = await this._nftMethod.isNFTSupported( + context.getImmutableMethodContext(), + nftID, + ); - if (!chainExists) { - return { collectionExists: false }; + if (!isNFTSupported) { + return { isCollectionIDSupported: false }; } - const collectionID = Buffer.from(params.collectionID, 'hex'); + const supportedNFTsStore = this.stores.get(SupportedNFTsStore); const supportedNFTsData = await supportedNFTsStore.get( context.getImmutableMethodContext(), @@ -200,8 +201,8 @@ export class NFTEndpoint extends BaseEndpoint { ); return { - collectionExists: supportedNFTsData.supportedCollectionIDArray.some(supportedCollection => - supportedCollection.collectionID.equals(collectionID), + isCollectionIDSupported: supportedNFTsData.supportedCollectionIDArray.some( + supportedCollection => supportedCollection.collectionID.equals(collectionID), ), }; } @@ -234,9 +235,9 @@ export class NFTEndpoint extends BaseEndpoint { ): Promise<{ isNFTSupported: boolean }> { const { params } = context; - validator.validate<{ id: string }>(isNFTSupportedRequestSchema, params); + validator.validate<{ nftID: string }>(isNFTSupportedRequestSchema, params); - const nftID = Buffer.from(params.id, 'hex'); + const nftID = Buffer.from(params.nftID, 'hex'); let isNFTSupported = false; try { diff --git a/framework/src/modules/nft/module.ts b/framework/src/modules/nft/module.ts index a6eb5341716..7521b750b36 100644 --- a/framework/src/modules/nft/module.ts +++ b/framework/src/modules/nft/module.ts @@ -39,10 +39,9 @@ import { UnlockEvent } from './events/unlock'; import { InternalMethod } from './internal_method'; import { NFTMethod } from './method'; import { - collectionExistsRequestSchema, - collectionExistsResponseSchema, - getCollectionIDsRequestSchema, - getCollectionIDsResponseSchema, + isCollectionIDSupportedRequestSchema, + isCollectionIDSupportedResponseSchema, + getSupportedCollectionIDsResponseSchema, getEscrowedNFTIDsRequestSchema, getEscrowedNFTIDsResponseSchema, getNFTRequestSchema, @@ -144,14 +143,13 @@ export class NFTModule extends BaseInteroperableModule { ...this.baseMetadata(), endpoints: [ { - name: this.endpoint.collectionExists.name, - request: collectionExistsRequestSchema, - response: collectionExistsResponseSchema, + name: this.endpoint.isCollectionIDSupported.name, + request: isCollectionIDSupportedRequestSchema, + response: isCollectionIDSupportedResponseSchema, }, { - name: this.endpoint.getCollectionIDs.name, - request: getCollectionIDsRequestSchema, - response: getCollectionIDsResponseSchema, + name: this.endpoint.getSupportedCollectionIDs.name, + response: getSupportedCollectionIDsResponseSchema, }, { name: this.endpoint.getEscrowedNFTIDs.name, diff --git a/framework/src/modules/nft/schemas.ts b/framework/src/modules/nft/schemas.ts index 7d0de1825df..204a6535cec 100644 --- a/framework/src/modules/nft/schemas.ts +++ b/framework/src/modules/nft/schemas.ts @@ -270,25 +270,11 @@ export const getNFTResponseSchema = { }, }; -export const getCollectionIDsRequestSchema = { - $id: '/nft/endpoint/getCollectionIDsRequest', +export const getSupportedCollectionIDsResponseSchema = { + $id: '/nft/endpoint/getSupportedCollectionIDsRespone', type: 'object', properties: { - chainID: { - type: 'string', - format: 'hex', - minLength: LENGTH_CHAIN_ID * 2, - maxLength: LENGTH_CHAIN_ID * 2, - }, - }, - required: ['chainID'], -}; - -export const getCollectionIDsResponseSchema = { - $id: '/nft/endpoint/getCollectionIDsRespone', - type: 'object', - properties: { - collectionIDs: { + supportedCollectionIDs: { type: 'array', items: { type: 'string', @@ -298,8 +284,8 @@ export const getCollectionIDsResponseSchema = { }, }; -export const collectionExistsRequestSchema = { - $id: '/nft/endpoint/collectionExistsRequest', +export const isCollectionIDSupportedRequestSchema = { + $id: '/nft/endpoint/isCollectionIDSupportedRequest', type: 'object', properties: { chainID: { @@ -318,11 +304,11 @@ export const collectionExistsRequestSchema = { required: ['chainID', 'collectionID'], }; -export const collectionExistsResponseSchema = { - $id: '/nft/endpoint/collectionExistsResponse', +export const isCollectionIDSupportedResponseSchema = { + $id: '/nft/endpoint/isCollectionIDSupportedResponse', type: 'object', properties: { - collectionExists: { + isCollectionIDSupported: { type: 'boolean', }, }, @@ -360,14 +346,14 @@ export const isNFTSupportedRequestSchema = { $id: '/nft/endpoint/isNFTSupportedRequest', type: 'object', properties: { - id: { + nftID: { type: 'string', format: 'hex', minLength: LENGTH_NFT_ID * 2, maxLength: LENGTH_NFT_ID * 2, }, }, - required: ['id'], + required: ['nftID'], }; export const isNFTSupportedResponseSchema = { diff --git a/framework/test/unit/modules/nft/endpoint.spec.ts b/framework/test/unit/modules/nft/endpoint.spec.ts index a9dca0e4e71..6bf141958fa 100644 --- a/framework/test/unit/modules/nft/endpoint.spec.ts +++ b/framework/test/unit/modules/nft/endpoint.spec.ts @@ -38,8 +38,7 @@ import { NFT } from '../../../../src/modules/nft/types'; import { JSONObject } from '../../../../src'; import { SupportedNFTsStore } from '../../../../src/modules/nft/stores/supported_nfts'; import { - collectionExistsResponseSchema, - getCollectionIDsResponseSchema, + isCollectionIDSupportedResponseSchema, getEscrowedNFTIDsResponseSchema, getNFTResponseSchema, getNFTsResponseSchema, @@ -360,80 +359,100 @@ describe('NFTEndpoint', () => { }); }); - describe('getCollectionIDs', () => { - it('should fail if provided chainID has invalid length', async () => { - const minLengthContext = createTransientModuleEndpointContext({ - stateStore, - params: { - chainID: utils.getRandomBytes(LENGTH_CHAIN_ID - 1).toString('hex'), - }, + describe('getSupportedCollectionIDs', () => { + it('should return a supportedCollectionIDs array as [*] when ALL_SUPPORTED_NFTS_KEY exists in SupportedNFTsStore', async () => { + await supportedNFTsStore.save(methodContext, ALL_SUPPORTED_NFTS_KEY, { + supportedCollectionIDArray: [], }); - const maxLengthContext = createTransientModuleEndpointContext({ + const context = createTransientModuleEndpointContext({ stateStore, - params: { - chainID: utils.getRandomBytes(LENGTH_CHAIN_ID + 1).toString('hex'), - }, + chainID: ownChainID, }); - await expect(endpoint.getCollectionIDs(minLengthContext)).rejects.toThrow( - `'.chainID' must NOT have fewer than 8 characters`, - ); - - await expect(endpoint.getCollectionIDs(maxLengthContext)).rejects.toThrow( - `'.chainID' must NOT have more than 8 characters`, - ); + await expect(endpoint.getSupportedCollectionIDs(context)).resolves.toEqual({ + supportedCollectionIDs: ['*'], + }); }); - it('should return empty list if provided chainID does not exist', async () => { + it("should return a supportedCollectionIDs array with ownChainID + 8 [*]s and chainID + 8 [*]s for all chain id's stored in the supportedNFT store when supportedCollectionIDArray is empty", async () => { + const chainID1 = Buffer.from('00000001', 'hex'); + const chainID2 = Buffer.from('00000002', 'hex'); + + await supportedNFTsStore.save(methodContext, chainID1, { + supportedCollectionIDArray: [], + }); + await supportedNFTsStore.save(methodContext, chainID2, { + supportedCollectionIDArray: [], + }); + const context = createTransientModuleEndpointContext({ stateStore, - params: { - chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), - }, + chainID: ownChainID, }); - await expect(endpoint.getCollectionIDs(context)).resolves.toEqual({ collectionIDs: [] }); + await expect(endpoint.getSupportedCollectionIDs(context)).resolves.toEqual({ + supportedCollectionIDs: [ + `${ownChainID.toString('hex')}********`, + `${chainID1.toString('hex')}********`, + `${chainID2.toString('hex')}********`, + ], + }); }); - it('should return supported collections of the provided chain', async () => { - const chainID = utils.getRandomBytes(LENGTH_CHAIN_ID); + it("should return a supportedCollectionIDs array with ownChainID + 8 [*]s and for all chain id's stored in the supportedNFT store when supportedCollectionIDArray is not empty", async () => { + const chainID1 = Buffer.from('00000001', 'hex'); + const chainID2 = Buffer.from('00000002', 'hex'); + const collectionID1 = Buffer.from('00000001', 'hex'); + const collectionID2 = Buffer.from('00000002', 'hex'); + const collectionID3 = Buffer.from('00000003', 'hex'); - const supportedCollections = [ - { - collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), - }, - { - collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID), - }, - ]; - - await supportedNFTsStore.save(methodContext, chainID, { - supportedCollectionIDArray: supportedCollections, + await supportedNFTsStore.save(methodContext, chainID1, { + supportedCollectionIDArray: [ + { + collectionID: collectionID1, + }, + { + collectionID: collectionID2, + }, + ], + }); + await supportedNFTsStore.save(methodContext, chainID2, { + supportedCollectionIDArray: [ + { + collectionID: collectionID3, + }, + ], }); const context = createTransientModuleEndpointContext({ stateStore, - params: { - chainID: chainID.toString('hex'), - }, + chainID: ownChainID, }); - const expectedSupportedCollection = { - collectionIDs: supportedCollections.map(collection => - collection.collectionID.toString('hex'), - ), - }; + await expect(endpoint.getSupportedCollectionIDs(context)).resolves.toEqual({ + supportedCollectionIDs: [ + `${ownChainID.toString('hex')}********`, + Buffer.concat([chainID1, collectionID1]).toString('hex'), + Buffer.concat([chainID1, collectionID2]).toString('hex'), + Buffer.concat([chainID2, collectionID3]).toString('hex'), + ], + }); + }); - await expect(endpoint.getCollectionIDs(context)).resolves.toEqual( - expectedSupportedCollection, - ); + it('should return supportedCollectionIDs array with ownChainID + 8(*)s when there are no entries in the supportedNftStore', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + chainID: ownChainID, + }); - validator.validate(getCollectionIDsResponseSchema, expectedSupportedCollection); + await expect(endpoint.getSupportedCollectionIDs(context)).resolves.toEqual({ + supportedCollectionIDs: [`${ownChainID.toString('hex')}********`], + }); }); }); - describe('collectionExists', () => { + describe('isCollectionIDSupported', () => { it('should fail if provided chainID has invalid length', async () => { const minLengthContext = createTransientModuleEndpointContext({ stateStore, @@ -451,11 +470,11 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.collectionExists(minLengthContext)).rejects.toThrow( + await expect(endpoint.isCollectionIDSupported(minLengthContext)).rejects.toThrow( `'.chainID' must NOT have fewer than 8 characters`, ); - await expect(endpoint.collectionExists(maxLengthContext)).rejects.toThrow( + await expect(endpoint.isCollectionIDSupported(maxLengthContext)).rejects.toThrow( `'.chainID' must NOT have more than 8 characters`, ); }); @@ -477,15 +496,29 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.collectionExists(minLengthContext)).rejects.toThrow( + await expect(endpoint.isCollectionIDSupported(minLengthContext)).rejects.toThrow( `'.collectionID' must NOT have fewer than 8 characters`, ); - await expect(endpoint.collectionExists(maxLengthContext)).rejects.toThrow( + await expect(endpoint.isCollectionIDSupported(maxLengthContext)).rejects.toThrow( `'.collectionID' must NOT have more than 8 characters`, ); }); + it('should return false if NFT is not supported', async () => { + const context = createTransientModuleEndpointContext({ + stateStore, + params: { + chainID: utils.getRandomBytes(LENGTH_CHAIN_ID).toString('hex'), + collectionID: utils.getRandomBytes(LENGTH_COLLECTION_ID).toString('hex'), + }, + }); + + await expect(endpoint.isCollectionIDSupported(context)).resolves.toEqual({ + isCollectionIDSupported: false, + }); + }); + it('should return false if provided chainID does not exist', async () => { const context = createTransientModuleEndpointContext({ stateStore, @@ -495,11 +528,11 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.collectionExists(context)).resolves.toEqual({ - collectionExists: false, + await expect(endpoint.isCollectionIDSupported(context)).resolves.toEqual({ + isCollectionIDSupported: false, }); - validator.validate(collectionExistsResponseSchema, { collectionExists: false }); + validator.validate(isCollectionIDSupportedResponseSchema, { isCollectionIDSupported: false }); }); it('should return false if provided collectionID does not exist for the provided chainID', async () => { @@ -520,8 +553,8 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.collectionExists(context)).resolves.toEqual({ - collectionExists: false, + await expect(endpoint.isCollectionIDSupported(context)).resolves.toEqual({ + isCollectionIDSupported: false, }); }); @@ -544,9 +577,11 @@ describe('NFTEndpoint', () => { }, }); - await expect(endpoint.collectionExists(context)).resolves.toEqual({ collectionExists: true }); + await expect(endpoint.isCollectionIDSupported(context)).resolves.toEqual({ + isCollectionIDSupported: true, + }); - validator.validate(collectionExistsResponseSchema, { collectionExists: true }); + validator.validate(isCollectionIDSupportedResponseSchema, { isCollectionIDSupported: true }); }); }); @@ -617,27 +652,27 @@ describe('NFTEndpoint', () => { }); describe('isNFTSupported', () => { - it('should fail if id does not have valid length', async () => { + it('should fail if nftID does not have valid length', async () => { const minLengthContext = createTransientModuleEndpointContext({ stateStore, params: { - id: utils.getRandomBytes(LENGTH_NFT_ID - 1).toString('hex'), + nftID: utils.getRandomBytes(LENGTH_NFT_ID - 1).toString('hex'), }, }); const maxLengthContext = createTransientModuleEndpointContext({ stateStore, params: { - id: utils.getRandomBytes(LENGTH_NFT_ID + 1).toString('hex'), + nftID: utils.getRandomBytes(LENGTH_NFT_ID + 1).toString('hex'), }, }); await expect(endpoint.isNFTSupported(minLengthContext)).rejects.toThrow( - `'.id' must NOT have fewer than 32 characters`, + `'.nftID' must NOT have fewer than 32 characters`, ); await expect(endpoint.isNFTSupported(maxLengthContext)).rejects.toThrow( - `'.id' must NOT have more than 32 characters`, + `'.nftID' must NOT have more than 32 characters`, ); }); @@ -645,7 +680,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: utils.getRandomBytes(LENGTH_NFT_ID).toString('hex'), + nftID: utils.getRandomBytes(LENGTH_NFT_ID).toString('hex'), }, }); @@ -665,7 +700,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: nftID.toString('hex'), + nftID: nftID.toString('hex'), }, }); @@ -689,7 +724,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: nftID.toString('hex'), + nftID: nftID.toString('hex'), }, }); @@ -712,7 +747,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: nftID.toString('hex'), + nftID: nftID.toString('hex'), }, }); @@ -744,7 +779,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: nftID.toString('hex'), + nftID: nftID.toString('hex'), }, }); @@ -776,7 +811,7 @@ describe('NFTEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - id: nftID.toString('hex'), + nftID: nftID.toString('hex'), }, }); From 9747ccef87eb326a69b77f0286c1f1275990a3e5 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:18:54 +0200 Subject: [PATCH 152/170] Adds test for getPrivateKeyFromPhraseAndPath (#9096) :white_check_mark: getPrivateKeyFromPhraseAndPath --- elements/lisk-cryptography/test/ed.spec.ts | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/elements/lisk-cryptography/test/ed.spec.ts b/elements/lisk-cryptography/test/ed.spec.ts index e174e35c718..8edb23aed0f 100644 --- a/elements/lisk-cryptography/test/ed.spec.ts +++ b/elements/lisk-cryptography/test/ed.spec.ts @@ -166,6 +166,20 @@ describe('getPrivateKeyFromPhraseAndPath', () => { ); }); + it('should derive distinct keys from same valid phrase but distinct paths', async () => { + const privateKeyFromPassphrase = await getPrivateKeyFromPhraseAndPath( + passphrase, + `m/44'/134'/0'`, + ); + + const anotherPrivateKeyFromPassphrase = await getPrivateKeyFromPhraseAndPath( + passphrase, + `m/44'/134'/1'`, + ); + + expect(privateKeyFromPassphrase).not.toEqual(anotherPrivateKeyFromPassphrase); + }); + it('should fail for empty string path', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, '')).rejects.toThrow( 'Invalid path format', From 185e3faa2185026769bb8a4fda27e7c4e15edbf4 Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Thu, 19 Oct 2023 10:54:18 +0200 Subject: [PATCH 153/170] Remove redundant call to verifyTransfer in method of NFT module (#9102) Remove redundant check --- framework/src/modules/nft/method.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/framework/src/modules/nft/method.ts b/framework/src/modules/nft/method.ts index af78d9f0466..9695cfedd99 100644 --- a/framework/src/modules/nft/method.ts +++ b/framework/src/modules/nft/method.ts @@ -432,8 +432,6 @@ export class NFTMethod extends BaseMethod { includeAttributes: boolean, ): Promise { try { - await this._internalMethod.verifyTransfer(methodContext, senderAddress, nftID); - await this._internalMethod.verifyTransferCrossChain( methodContext, senderAddress, From 12c5d4c37e4d4ee46e2d7e977b29a8eda8e63e03 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 24 Oct 2023 18:40:45 +0200 Subject: [PATCH 154/170] Merge release/6.0.0 into release/6.1.0 --- commander/oclif.manifest.json | 683 ++++++++++++++++++ .../commands/genesis-block/create.ts | 24 +- .../src/bootstrapping/commands/keys/create.ts | 126 +++- .../commands/transaction/sign.ts | 11 +- .../commands/keys/create.spec.ts | 66 ++ elements/lisk-api-client/src/ipc_channel.ts | 8 +- elements/lisk-chain/src/block_header.ts | 6 +- elements/lisk-chain/test/unit/block.spec.ts | 2 +- .../lisk-chain/test/unit/block_header.spec.ts | 4 +- elements/lisk-cryptography/src/encrypt.ts | 4 +- elements/lisk-cryptography/src/utils.ts | 8 +- elements/lisk-cryptography/test/ed.spec.ts | 14 +- .../lisk-cryptography/test/encrypt.spec.ts | 50 +- .../interop/common/mainchain_registration.ts | 67 +- .../config/default/genesis_assets.json | 82 ++- .../config/scripts/sidechain_registration.ts | 48 +- .../scripts/transfer_lsk_sidechain_one.ts | 17 +- .../config/default/config.json | 4 + .../config/scripts/mainchain_registration.ts | 7 +- .../pos-sidechain-example-one/src/app/app.ts | 6 + .../hello/cc_commands/react_command.ts | 82 +++ .../src/app/modules/hello/cc_method.ts | 3 + .../hello/commands/create_hello_command.ts | 89 +++ .../src/app/modules/hello/constants.ts | 4 + .../src/app/modules/hello/endpoint.ts | 47 ++ .../src/app/modules/hello/events/.gitkeep | 0 .../src/app/modules/hello/events/new_hello.ts | 39 + .../src/app/modules/hello/method.ts | 14 + .../src/app/modules/hello/module.ts | 141 ++++ .../src/app/modules/hello/schema.ts | 116 +++ .../src/app/modules/hello/stores/.gitkeep | 0 .../src/app/modules/hello/stores/counter.ts | 36 + .../src/app/modules/hello/stores/message.ts | 25 +- .../src/app/modules/hello/stores/reaction.ts | 32 + .../src/app/modules/hello/types.ts | 9 + .../config/default/config.json | 4 + .../config/scripts/mainchain_registration.ts | 7 +- .../config/scripts/transfer_lsk_mainchain.ts | 2 +- .../pos-sidechain-example-two/src/app/app.ts | 7 +- .../src/app/modules/react/cc_method.ts | 17 + .../modules/react/commands/react_command.ts | 108 +++ .../src/app/modules/react/constants.ts | 4 + .../src/app/modules/react/endpoint.ts | 3 + .../src/app/modules/react/events/.gitkeep | 0 .../src/app/modules/react/method.ts | 3 + .../src/app/modules/react/module.ts | 85 +++ .../src/app/modules/react/schemas.ts | 94 +++ .../src/app/modules/react/stores/.gitkeep | 0 .../src/app/modules/react/types.ts | 28 + examples/interop/run_sidechains.json | 18 +- .../src/certificate_generation.ts | 26 +- .../src/chain_connector_plugin.ts | 62 +- .../test/unit/certificate_generation.spec.ts | 4 +- .../test/unit/plugin.spec.ts | 47 ++ .../src/controllers/transactions.ts | 3 +- framework/src/application.ts | 2 +- framework/src/controller/constants.ts | 4 +- framework/src/controller/controller.ts | 6 +- framework/src/controller/ipc/ipc_client.ts | 8 +- framework/src/engine/bft/method.ts | 16 +- .../certificate_generation/commit_list.ts | 4 + .../certificate_generation/commit_pool.ts | 18 +- framework/src/engine/consensus/consensus.ts | 5 +- framework/src/engine/engine.ts | 2 + framework/src/engine/generator/endpoint.ts | 9 + framework/src/engine/generator/generator.ts | 104 +-- framework/src/engine/generator/schemas.ts | 2 + .../engine/generator/single_commit_handler.ts | 166 +++++ framework/src/engine/legacy/codec.ts | 38 +- framework/src/engine/legacy/constants.ts | 16 +- framework/src/engine/legacy/endpoint.ts | 27 +- framework/src/engine/legacy/errors.ts | 6 +- .../src/engine/legacy/legacy_chain_handler.ts | 242 +++++-- .../src/engine/legacy/network_endpoint.ts | 81 ++- framework/src/engine/legacy/schemas.ts | 21 + framework/src/engine/legacy/storage.ts | 45 +- framework/src/engine/legacy/types.ts | 11 +- framework/src/engine/legacy/utils.ts | 12 +- framework/src/genesis_block.ts | 2 +- framework/src/index.ts | 2 +- .../constants.ts | 0 .../endpoint.ts | 4 +- .../index.ts | 0 .../method.ts | 0 .../module.ts | 18 +- .../src/modules/dynamic_reward/schemas.ts | 59 ++ .../stores/end_of_round_timestamp.ts | 0 .../types.ts | 1 - .../utils.ts | 15 +- .../base_cross_chain_update_command.ts | 127 ++-- .../base_interoperability_internal_methods.ts | 36 +- .../base_interoperability_module.ts | 40 +- .../interoperability/base_state_recovery.ts | 4 +- .../modules/interoperability/certificates.ts | 2 +- .../src/modules/interoperability/constants.ts | 2 + .../invalid_outbox_root_verification.ts | 48 ++ .../events/invalid_rmt_verification.ts | 2 +- .../events/invalid_smt_verification.ts | 2 +- .../commands/initialize_message_recovery.ts | 45 +- .../mainchain/commands/recover_message.ts | 14 +- .../submit_mainchain_cross_chain_update.ts | 15 +- .../interoperability/mainchain/module.ts | 120 ++- .../commands/initialize_state_recovery.ts | 4 +- .../sidechain/commands/register_mainchain.ts | 2 +- .../submit_sidechain_cross_chain_update.ts | 17 +- .../interoperability/sidechain/module.ts | 4 +- .../src/modules/interoperability/utils.ts | 90 +-- framework/src/modules/pos/constants.ts | 12 +- framework/src/modules/pos/endpoint.ts | 7 +- framework/src/modules/pos/module.ts | 3 +- framework/src/modules/pos/schemas.ts | 13 + framework/src/modules/pos/types.ts | 2 + framework/src/modules/token/cc_method.ts | 2 + framework/src/modules/token/module.ts | 6 - framework/src/state_machine/constants.ts | 1 + framework/src/state_machine/event_queue.ts | 3 +- framework/src/state_machine/state_machine.ts | 4 +- .../src/state_machine/transaction_context.ts | 9 +- framework/src/testing/block_processing_env.ts | 2 +- framework/src/utils/panic.ts | 23 + .../integration/node/processor/pos.spec.ts | 9 +- .../test/unit/controller/controller.spec.ts | 6 + framework/test/unit/engine/bft/method.spec.ts | 287 +++----- .../commit_list.spec.ts | 20 + .../commit_pool.spec.ts | 46 +- .../unit/engine/consensus/consensus.spec.ts | 1 + .../unit/engine/generator/endpoint.spec.ts | 27 + .../unit/engine/generator/generator.spec.ts | 192 +---- .../generator/single_commit_handler.spec.ts | 301 ++++++++ .../test/unit/engine/legacy/endpoint.spec.ts | 107 ++- framework/test/unit/engine/legacy/fixtures.ts | 17 +- .../legacy/legacy_chain_handler.spec.ts | 141 ++-- .../engine/legacy/network_endpoint.spec.ts | 63 +- .../test/unit/engine/legacy/storage.spec.ts | 25 +- framework/test/unit/genesis_block.spec.ts | 1 + .../endpoint.spec.ts | 13 +- .../module.spec.ts | 14 +- .../base_cross_chain_update_command.spec.ts | 462 ++++++------ .../base_interoperability_module.spec.ts | 56 +- .../base_state_recovery.spec.ts | 4 +- .../interoperability/internal_method.spec.ts | 349 +++++++-- .../initialize_message_recovery.spec.ts | 98 ++- .../commands/recover_message.spec.ts | 15 +- ...ubmit_mainchain_cross_chain_update.spec.ts | 59 +- .../interoperability/mainchain/module.spec.ts | 271 ++++++- .../initialize_state_recovery.spec.ts | 8 +- ...ubmit_sidechain_cross_chain_update.spec.ts | 28 +- .../interoperability/sidechain/module.spec.ts | 6 +- .../modules/interoperability/utils.spec.ts | 249 +------ .../test/unit/modules/pos/endpoint.spec.ts | 3 +- .../test/unit/modules/pos/module.spec.ts | 3 +- .../test/unit/modules/token/cc_method.spec.ts | 48 ++ .../token/init_genesis_state_fixture.ts | 16 - .../unit/state_machine/state_machine.spec.ts | 13 +- yarn.lock | 180 +++-- 155 files changed, 5185 insertions(+), 1956 deletions(-) create mode 100644 commander/oclif.manifest.json create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_commands/react_command.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_method.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/commands/create_hello_command.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/constants.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/endpoint.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/.gitkeep create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/new_hello.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/method.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/module.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/schema.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/.gitkeep create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/counter.ts rename framework/src/modules/dynamic_rewards/schemas.ts => examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/message.ts (56%) create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/reaction.ts create mode 100644 examples/interop/pos-sidechain-example-one/src/app/modules/hello/types.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/cc_method.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/commands/react_command.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/constants.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/endpoint.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/events/.gitkeep create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/method.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/module.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/schemas.ts create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/stores/.gitkeep create mode 100644 examples/interop/pos-sidechain-example-two/src/app/modules/react/types.ts create mode 100644 framework/src/engine/generator/single_commit_handler.ts rename framework/src/modules/{dynamic_rewards => dynamic_reward}/constants.ts (100%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/endpoint.ts (96%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/index.ts (100%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/method.ts (100%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/module.ts (96%) create mode 100644 framework/src/modules/dynamic_reward/schemas.ts rename framework/src/modules/{dynamic_rewards => dynamic_reward}/stores/end_of_round_timestamp.ts (100%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/types.ts (96%) rename framework/src/modules/{dynamic_rewards => dynamic_reward}/utils.ts (67%) create mode 100644 framework/src/modules/interoperability/events/invalid_outbox_root_verification.ts create mode 100644 framework/src/utils/panic.ts create mode 100644 framework/test/unit/engine/generator/single_commit_handler.spec.ts rename framework/test/unit/modules/{dynamic_rewards => dynamic_reward}/endpoint.spec.ts (93%) rename framework/test/unit/modules/{dynamic_rewards => dynamic_reward}/module.spec.ts (96%) diff --git a/commander/oclif.manifest.json b/commander/oclif.manifest.json new file mode 100644 index 00000000000..dcca4a28e10 --- /dev/null +++ b/commander/oclif.manifest.json @@ -0,0 +1,683 @@ +{ + "version": "6.0.0-rc.3", + "commands": { + "console": { + "id": "console", + "description": "Lisk interactive REPL session to run commands.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "console", + "console --api-ws=ws://localhost:8080", + "console --api-ipc=/path/to/server" + ], + "flags": { + "api-ipc": { + "name": "api-ipc", + "type": "option", + "description": "Enable api-client with IPC communication.", + "multiple": false, + "exclusive": ["api-ws"] + }, + "api-ws": { + "name": "api-ws", + "type": "option", + "description": "Enable api-client with Websocket communication.", + "multiple": false, + "exclusive": ["api-ipc"] + } + }, + "args": [] + }, + "hash-onion": { + "id": "hash-onion", + "description": "Create hash onions to be used by the forger.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "hash-onion --count=1000000 --distance=2000 --pretty", + "hash-onion --count=1000000 --distance=2000 --output ~/my_onion.json" + ], + "flags": { + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "Output file path", + "multiple": false + }, + "count": { + "name": "count", + "type": "option", + "char": "c", + "description": "Total number of hashes to produce", + "multiple": false, + "default": 1000000 + }, + "distance": { + "name": "distance", + "type": "option", + "char": "d", + "description": "Distance between each hash", + "multiple": false, + "default": 1000 + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed.", + "allowNo": false + } + }, + "args": [] + }, + "init": { + "id": "init", + "description": "Bootstrap a blockchain application using Lisk SDK.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "init", + "init --template lisk-ts", + "init --template @some-global-npm-package", + "init /project/path", + "init /project/path --template lisk-ts" + ], + "flags": { + "template": { + "name": "template", + "type": "option", + "char": "t", + "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", + "multiple": false + }, + "registry": { + "name": "registry", + "type": "option", + "description": "URL of a registry to download dependencies from.", + "multiple": false + } + }, + "args": [ + { + "name": "projectPath", + "description": "Path to create the project.", + "default": "/Users/ishan/repos/lisk-sdk/commander" + } + ] + }, + "endpoint:invoke": { + "id": "endpoint:invoke", + "description": "Invokes the provided endpoint.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "endpoint:invoke {endpoint} {parameters}", + "endpoint:invoke --data-path --file", + "endpoint:invoke generator_getAllKeys", + "endpoint:invoke consensus_getBFTParameters '{\"height\": 2}' -d ~/.lisk/pos-mainchain --pretty", + "endpoint:invoke consensus_getBFTParameters -f ./input.json" + ], + "flags": { + "data-path": { + "name": "data-path", + "type": "option", + "char": "d", + "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", + "multiple": false + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed.", + "allowNo": false + }, + "file": { + "name": "file", + "type": "option", + "char": "f", + "description": "Input file.", + "multiple": false + } + }, + "args": [ + { "name": "endpoint", "description": "Endpoint to invoke", "required": true }, + { "name": "params", "description": "Endpoint parameters (Optional)", "required": false } + ] + }, + "generate:command": { + "id": "generate:command", + "description": "Creates an command skeleton for the given module name, name and id.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "generate:command moduleName commandName commandID", + "generate:command nft transfer 1" + ], + "flags": { + "template": { + "name": "template", + "type": "option", + "char": "t", + "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", + "multiple": false + } + }, + "args": [ + { "name": "moduleName", "description": "Module name.", "required": true }, + { "name": "commandName", "description": "Asset name.", "required": true } + ] + }, + "generate:module": { + "id": "generate:module", + "description": "Creates a module skeleton for the given name.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": ["generate:module nft"], + "flags": { + "template": { + "name": "template", + "type": "option", + "char": "t", + "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", + "multiple": false + } + }, + "args": [{ "name": "moduleName", "description": "Module name.", "required": true }] + }, + "generate:plugin": { + "id": "generate:plugin", + "description": "Creates custom plugin.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "generate:plugin myPlugin", + "generate:plugin myPlugin --standalone --output ./my_plugin" + ], + "flags": { + "template": { + "name": "template", + "type": "option", + "char": "t", + "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", + "multiple": false + }, + "standalone": { + "name": "standalone", + "type": "boolean", + "description": "Create a standalone plugin package.", + "allowNo": false + }, + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "Path to create the plugin.", + "multiple": false, + "dependsOn": ["standalone"] + }, + "registry": { + "name": "registry", + "type": "option", + "description": "URL of a registry to download dependencies from.", + "multiple": false, + "dependsOn": ["standalone"] + } + }, + "args": [{ "name": "name", "description": "Name of the plugin.", "required": true }] + }, + "keys:create": { + "id": "keys:create", + "description": "Return keys corresponding to the given passphrase.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "keys:create", + "keys:create --passphrase your-passphrase", + "keys:create --passphrase your-passphrase --no-encrypt", + "keys:create --passphrase your-passphrase --password your-password", + "keys:create --passphrase your-passphrase --password your-password --count 2", + "keys:create --passphrase your-passphrase --no-encrypt --count 2 --offset 1", + "keys:create --passphrase your-passphrase --no-encrypt --count 2 --offset 1 --chainid 1", + "keys:create --passphrase your-passphrase --password your-password --count 2 --offset 1 --chainid 1 --output /mypath/keys.json" + ], + "flags": { + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "The output directory. Default will set to current working directory.", + "multiple": false + }, + "passphrase": { + "name": "passphrase", + "type": "option", + "char": "p", + "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", + "multiple": false + }, + "no-encrypt": { + "name": "no-encrypt", + "type": "boolean", + "char": "n", + "description": "No encrypted message object to be created", + "allowNo": false + }, + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + }, + "count": { + "name": "count", + "type": "option", + "char": "c", + "description": "Number of keys to create", + "multiple": false, + "default": 1 + }, + "offset": { + "name": "offset", + "type": "option", + "char": "f", + "description": "Offset for the key derivation path", + "multiple": false, + "default": 0 + }, + "chainid": { + "name": "chainid", + "type": "option", + "char": "i", + "description": "Chain id", + "multiple": false, + "default": 0 + }, + "add-legacy": { + "name": "add-legacy", + "type": "boolean", + "description": "Add legacy key derivation path to the result", + "allowNo": false + } + }, + "args": [] + }, + "keys:encrypt": { + "id": "keys:encrypt", + "description": "Encrypt keys from a file and overwrite the file", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "keys:encrypt --file-path ./my/path/keys.json", + "keys:encrypt --file-path ./my/path/keys.json --password mypass" + ], + "flags": { + "file-path": { + "name": "file-path", + "type": "option", + "char": "f", + "description": "Path of the file to encrypt from", + "required": true, + "multiple": false + }, + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + } + }, + "args": [] + }, + "keys:export": { + "id": "keys:export", + "description": "Export to .", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "keys:export --output /mypath/keys.json", + "keys:export --output /mypath/keys.json --data-path ./data " + ], + "flags": { + "data-path": { + "name": "data-path", + "type": "option", + "char": "d", + "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", + "multiple": false + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed.", + "allowNo": false + }, + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "The output directory. Default will set to current working directory.", + "required": true, + "multiple": false + } + }, + "args": [] + }, + "keys:import": { + "id": "keys:import", + "description": "Import from .", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "keys:import --file-path ./my/path/keys.json", + "keys:import --file-path ./my/path/keys.json --data-path ./data " + ], + "flags": { + "data-path": { + "name": "data-path", + "type": "option", + "char": "d", + "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", + "multiple": false + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed.", + "allowNo": false + }, + "file-path": { + "name": "file-path", + "type": "option", + "char": "f", + "description": "Path of the file to import from", + "required": true, + "multiple": false + } + }, + "args": [] + }, + "message:decrypt": { + "id": "message:decrypt", + "description": "\n\tDecrypts a previously encrypted message using your the password used to encrypt.\n\t", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": ["message:decrypt "], + "flags": { + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + }, + "message": { + "name": "message", + "type": "option", + "char": "m", + "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", + "multiple": false + } + }, + "args": [{ "name": "message", "description": "Encrypted message." }] + }, + "message:encrypt": { + "id": "message:encrypt", + "description": "\n\tEncrypts a message with a password provided.\n\t", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": ["message:encrypt \"Hello world\""], + "flags": { + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + }, + "message": { + "name": "message", + "type": "option", + "char": "m", + "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", + "multiple": false + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed.", + "allowNo": false + }, + "stringify": { + "name": "stringify", + "type": "boolean", + "char": "s", + "description": "Display encrypted message in stringified format", + "allowNo": false + } + }, + "args": [{ "name": "message", "description": "Message to encrypt." }] + }, + "message:sign": { + "id": "message:sign", + "description": "\n\tSigns a message using your secret passphrase.\n\t", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": ["message:sign \"Hello world\""], + "flags": { + "json": { + "name": "json", + "type": "boolean", + "char": "j", + "description": "Prints output in JSON format. You can change the default behavior in your config.json file.", + "allowNo": true + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behavior in your config.json file.", + "allowNo": true + }, + "passphrase": { + "name": "passphrase", + "type": "option", + "char": "p", + "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", + "multiple": false + }, + "message": { + "name": "message", + "type": "option", + "char": "m", + "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", + "multiple": false + } + }, + "args": [{ "name": "message", "description": "Message to sign." }] + }, + "message:verify": { + "id": "message:verify", + "description": "\n\tVerifies a signature for a message using the signer’s public key.\n\t", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "message:verify 647aac1e2df8a5c870499d7ddc82236b1e10936977537a3844a6b05ea33f9ef6 2a3ca127efcf7b2bf62ac8c3b1f5acf6997cab62ba9fde3567d188edcbacbc5dc8177fb88d03a8691ce03348f569b121bca9e7a3c43bf5c056382f35ff843c09 \"Hello world\"" + ], + "flags": { + "json": { + "name": "json", + "type": "boolean", + "char": "j", + "description": "Prints output in JSON format. You can change the default behavior in your config.json file.", + "allowNo": true + }, + "pretty": { + "name": "pretty", + "type": "boolean", + "description": "Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behavior in your config.json file.", + "allowNo": true + }, + "message": { + "name": "message", + "type": "option", + "char": "m", + "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", + "multiple": false + } + }, + "args": [ + { + "name": "publicKey", + "description": "Public key of the signer of the message.", + "required": true + }, + { "name": "signature", "description": "Signature to verify.", "required": true }, + { "name": "message", "description": "Message to verify." } + ] + }, + "passphrase:create": { + "id": "passphrase:create", + "description": "Returns a randomly generated 24 words mnemonic passphrase.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": ["passphrase:create", "passphrase:create --output /mypath/passphrase.json"], + "flags": { + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "The output directory. Default will set to current working directory.", + "multiple": false + } + }, + "args": [] + }, + "passphrase:decrypt": { + "id": "passphrase:decrypt", + "description": "Decrypt secret passphrase using the password provided at the time of encryption.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "passphrase:decrypt --file-path ./my/path/output.json", + "passphrase:decrypt --file-path ./my/path/output.json --password your-password" + ], + "flags": { + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + }, + "file-path": { + "name": "file-path", + "type": "option", + "char": "f", + "description": "Path of the file to import from", + "required": true, + "multiple": false + } + }, + "args": [] + }, + "passphrase:encrypt": { + "id": "passphrase:encrypt", + "description": "Encrypt secret passphrase using password.", + "strict": true, + "pluginName": "lisk-commander", + "pluginAlias": "lisk-commander", + "pluginType": "core", + "aliases": [], + "examples": [ + "passphrase:encrypt", + "passphrase:encrypt --passphrase your-passphrase --output /mypath/keys.json", + "passphrase:encrypt --password your-password", + "passphrase:encrypt --password your-password --passphrase your-passphrase --output /mypath/keys.json", + "passphrase:encrypt --output-public-key --output /mypath/keys.json" + ], + "flags": { + "password": { + "name": "password", + "type": "option", + "char": "w", + "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", + "multiple": false + }, + "passphrase": { + "name": "passphrase", + "type": "option", + "char": "p", + "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", + "multiple": false + }, + "output-public-key": { + "name": "output-public-key", + "type": "boolean", + "description": "Includes the public key in the output. This option is provided for the convenience of node operators.", + "allowNo": false + }, + "output": { + "name": "output", + "type": "option", + "char": "o", + "description": "The output directory. Default will set to current working directory.", + "multiple": false + } + }, + "args": [] + } + } +} diff --git a/commander/src/bootstrapping/commands/genesis-block/create.ts b/commander/src/bootstrapping/commands/genesis-block/create.ts index c18a69dde6a..bd348abe670 100644 --- a/commander/src/bootstrapping/commands/genesis-block/create.ts +++ b/commander/src/bootstrapping/commands/genesis-block/create.ts @@ -18,9 +18,10 @@ import { Application, PartialApplicationConfig } from 'lisk-framework'; import { objects } from '@liskhq/lisk-utils'; import { Command, Flags as flagParser } from '@oclif/core'; import * as fs from 'fs-extra'; -import { join, resolve } from 'path'; +import { isAbsolute, join, resolve } from 'path'; import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; +import { homedir } from 'os'; import { GenesisAssetsInput, genesisAssetsSchema } from '../../../utils/genesis_creation'; import { flagsWithParser } from '../../../utils/flags'; import { getNetworkConfigFilesPath } from '../../../utils/path'; @@ -47,6 +48,10 @@ export abstract class BaseGenesisBlockCommand extends Command { description: 'Path to file which contains genesis block asset in JSON format', required: true, }), + 'export-json': flagParser.boolean({ + description: 'Export genesis block as JSON format along with blob', + default: false, + }), height: flagParser.integer({ char: 'h', description: 'Genesis block height', @@ -74,6 +79,7 @@ export abstract class BaseGenesisBlockCommand extends Command { height, timestamp, 'previous-block-id': previousBlockIDString, + 'export-json': exportJSON, }, } = await this.parse(BaseGenesisBlockCommand); // validate folder name to not include camelcase or whitespace @@ -95,7 +101,12 @@ export abstract class BaseGenesisBlockCommand extends Command { config = objects.mergeDeep(config, customConfig); } // determine proper path - const configPath = join(process.cwd(), output); + let configPath = output; + if (output.includes('~')) { + configPath = configPath.replace('~', homedir()); + } else if (!isAbsolute(output)) { + configPath = join(process.cwd(), output); + } const app = this.getApplication(config); // If assetsFile exist, create from assetsFile and default config/accounts are not needed const assetsJSON = (await fs.readJSON(resolve(assetsFile))) as GenesisAssetsInput; @@ -118,6 +129,15 @@ export abstract class BaseGenesisBlockCommand extends Command { fs.writeFileSync(resolve(configPath, 'genesis_block.blob'), genesisBlock.getBytes(), { mode: OWNER_READ_WRITE, }); + if (exportJSON) { + fs.writeFileSync( + resolve(configPath, 'genesis_block.json'), + JSON.stringify(genesisBlock.toJSON(), undefined, ' '), + { + mode: OWNER_READ_WRITE, + }, + ); + } this.log(`Genesis block files saved at: ${configPath}`); } diff --git a/commander/src/bootstrapping/commands/keys/create.ts b/commander/src/bootstrapping/commands/keys/create.ts index 4765303e6cd..e47cd8dbd45 100644 --- a/commander/src/bootstrapping/commands/keys/create.ts +++ b/commander/src/bootstrapping/commands/keys/create.ts @@ -14,7 +14,7 @@ */ import { codec } from '@liskhq/lisk-codec'; -import { bls, address as addressUtil, ed, encrypt } from '@liskhq/lisk-cryptography'; +import { bls, address as addressUtil, ed, encrypt, legacy } from '@liskhq/lisk-cryptography'; import { Command, Flags as flagParser } from '@oclif/core'; import * as fs from 'fs-extra'; import * as path from 'path'; @@ -60,6 +60,9 @@ export class CreateCommand extends Command { description: 'Chain id', default: 0, }), + 'add-legacy': flagParser.boolean({ + description: 'Add legacy key derivation path to the result', + }), }; async run(): Promise { @@ -72,6 +75,7 @@ export class CreateCommand extends Command { count, offset, chainid, + 'add-legacy': addLegacy, }, } = await this.parse(CreateCommand); @@ -86,7 +90,37 @@ export class CreateCommand extends Command { } const keys = []; - for (let i = 0; i < count; i += 1) { + let i = 0; + if (addLegacy) { + const legacyKeyPath = 'legacy'; + const { privateKey: accountPrivateKey, publicKey: accountPublicKey } = + legacy.getPrivateAndPublicKeyFromPassphrase(passphrase); + const address = addressUtil.getAddressFromPublicKey(accountPublicKey); + const generatorPrivateKey = accountPrivateKey; + const generatorPublicKey = ed.getPublicKeyFromPrivateKey(generatorPrivateKey); + const blsKeyPath = `m/12381/134/${chainid}/99999`; + const blsPrivateKey = await bls.getPrivateKeyFromPhraseAndPath(passphrase, blsKeyPath); + const blsPublicKey = bls.getPublicKeyFromPrivateKey(blsPrivateKey); + const result = await this._createEncryptedObject( + { + address, + keyPath: legacyKeyPath, + accountPrivateKey, + accountPublicKey, + generatorKeyPath: legacyKeyPath, + generatorPrivateKey, + generatorPublicKey, + blsKeyPath, + blsPrivateKey, + blsPublicKey, + password, + }, + noEncrypt, + ); + keys.push(result); + i += 1; + } + for (; i < count; i += 1) { const accountKeyPath = `m/44'/134'/${offset + i}'`; const generatorKeyPath = `m/25519'/134'/${chainid}'/${offset + i}'`; const blsKeyPath = `m/12381/134/${chainid}/${offset + i}`; @@ -102,37 +136,23 @@ export class CreateCommand extends Command { const blsPrivateKey = await bls.getPrivateKeyFromPhraseAndPath(passphrase, blsKeyPath); const blsPublicKey = bls.getPublicKeyFromPrivateKey(blsPrivateKey); - let encryptedMessageObject = {}; - if (!noEncrypt) { - const plainGeneratorKeyData = { - generatorKey: generatorPublicKey, + const result = await this._createEncryptedObject( + { + address, + keyPath: accountKeyPath, + accountPrivateKey, + accountPublicKey, + generatorKeyPath, generatorPrivateKey, - blsKey: blsPublicKey, + generatorPublicKey, + blsKeyPath, blsPrivateKey, - }; - const encodedGeneratorKeys = codec.encode(plainGeneratorKeysSchema, plainGeneratorKeyData); - encryptedMessageObject = await encrypt.encryptMessageWithPassword( - encodedGeneratorKeys, + blsPublicKey, password, - ); - } - - keys.push({ - address: addressUtil.getLisk32AddressFromAddress(address), - keyPath: accountKeyPath, - publicKey: accountPublicKey.toString('hex'), - privateKey: accountPrivateKey.toString('hex'), - plain: { - generatorKeyPath, - generatorKey: generatorPublicKey.toString('hex'), - generatorPrivateKey: generatorPrivateKey.toString('hex'), - blsKeyPath, - blsKey: blsPublicKey.toString('hex'), - blsProofOfPossession: bls.popProve(blsPrivateKey).toString('hex'), - blsPrivateKey: blsPrivateKey.toString('hex'), }, - encrypted: encryptedMessageObject, - }); + noEncrypt, + ); + keys.push(result); } if (output) { @@ -141,4 +161,52 @@ export class CreateCommand extends Command { this.log(JSON.stringify({ keys }, undefined, ' ')); } } + private async _createEncryptedObject( + input: { + address: Buffer; + keyPath: string; + accountPublicKey: Buffer; + accountPrivateKey: Buffer; + generatorKeyPath: string; + generatorPublicKey: Buffer; + generatorPrivateKey: Buffer; + blsKeyPath: string; + blsPublicKey: Buffer; + blsPrivateKey: Buffer; + password: string; + }, + noEncrypt: boolean, + ) { + let encryptedMessageObject = {}; + if (!noEncrypt) { + const plainGeneratorKeyData = { + generatorKey: input.generatorPublicKey, + generatorPrivateKey: input.generatorPrivateKey, + blsKey: input.blsPublicKey, + blsPrivateKey: input.blsPrivateKey, + }; + const encodedGeneratorKeys = codec.encode(plainGeneratorKeysSchema, plainGeneratorKeyData); + encryptedMessageObject = await encrypt.encryptMessageWithPassword( + encodedGeneratorKeys, + input.password, + ); + } + + return { + address: addressUtil.getLisk32AddressFromAddress(input.address), + keyPath: input.keyPath, + publicKey: input.accountPublicKey.toString('hex'), + privateKey: input.accountPrivateKey.toString('hex'), + plain: { + generatorKeyPath: input.generatorKeyPath, + generatorKey: input.generatorPublicKey.toString('hex'), + generatorPrivateKey: input.generatorPrivateKey.toString('hex'), + blsKeyPath: input.blsKeyPath, + blsKey: input.blsPublicKey.toString('hex'), + blsProofOfPossession: bls.popProve(input.blsPrivateKey).toString('hex'), + blsPrivateKey: input.blsPrivateKey.toString('hex'), + }, + encrypted: encryptedMessageObject, + }; + } } diff --git a/commander/src/bootstrapping/commands/transaction/sign.ts b/commander/src/bootstrapping/commands/transaction/sign.ts index bce2cdb0e50..0387c6b0b17 100644 --- a/commander/src/bootstrapping/commands/transaction/sign.ts +++ b/commander/src/bootstrapping/commands/transaction/sign.ts @@ -37,7 +37,6 @@ import { getParamsSchema, } from '../../../utils/transaction'; import { getDefaultPath } from '../../../utils/path'; -import { isApplicationRunning } from '../../../utils/application'; import { PromiseResolvedType } from '../../../types'; import { DEFAULT_KEY_DERIVATION_PATH } from '../../../utils/config'; import { deriveKeypair } from '../../../utils/commons'; @@ -203,7 +202,7 @@ export abstract class SignCommand extends Command { let signedTransaction: Record; if (flags.offline) { - const app = this.getApplication({}, {}); + const app = this.getApplication({ genesis: { chainID: flags['chain-id'] } }); this._metadata = app.getMetadata(); this._schema = { header: blockHeaderSchema, @@ -256,9 +255,6 @@ export abstract class SignCommand extends Command { async finally(error?: Error | string): Promise { if (error) { - if (this._dataPath && !isApplicationRunning(this._dataPath)) { - throw new Error(`Application at data path ${this._dataPath} is not running.`); - } this.error(error instanceof Error ? error.message : error); } if (this._client) { @@ -266,8 +262,5 @@ export abstract class SignCommand extends Command { } } - abstract getApplication( - genesisBlock: Record, - config: PartialApplicationConfig, - ): Application; + abstract getApplication(config: PartialApplicationConfig): Application; } diff --git a/commander/test/bootstrapping/commands/keys/create.spec.ts b/commander/test/bootstrapping/commands/keys/create.spec.ts index fe0b6c0ab8f..3b1bd4c6e6f 100644 --- a/commander/test/bootstrapping/commands/keys/create.spec.ts +++ b/commander/test/bootstrapping/commands/keys/create.spec.ts @@ -91,6 +91,7 @@ describe('keys:create command', () => { jest.spyOn(process.stdout, 'write').mockImplementation(val => stdout.push(val as string) > -1); jest.spyOn(process.stderr, 'write').mockImplementation(val => stderr.push(val as string) > -1); jest.spyOn(cryptography.ed, 'getPrivateKeyFromPhraseAndPath'); + jest.spyOn(cryptography.legacy, 'getPrivateAndPublicKeyFromPassphrase'); jest.spyOn(cryptography.ed, 'getPublicKeyFromPrivateKey'); jest.spyOn(cryptography.address, 'getAddressFromPublicKey'); jest.spyOn(cryptography.bls, 'getPrivateKeyFromPhraseAndPath'); @@ -176,6 +177,71 @@ describe('keys:create command', () => { }); }); + describe('keys:create --add-legacy --passphrase', () => { + it('should create valid keys', async () => { + const legacyKeys = + cryptography.legacy.getPrivateAndPublicKeyFromPassphrase(defaultPassphrase); + const legacyBLSKeyPath = 'm/12381/134/0/99999'; + const legacyBLSPrivateKey = await cryptography.bls.getPrivateKeyFromPhraseAndPath( + defaultPassphrase, + legacyBLSKeyPath, + ); + await CreateCommand.run(['--add-legacy', '--passphrase', defaultPassphrase], config); + const loggedData = JSON.parse(stdout[0]); + + expect(cryptography.legacy.getPrivateAndPublicKeyFromPassphrase).toHaveBeenCalledWith( + defaultPassphrase, + ); + expect(cryptography.address.getAddressFromPublicKey).toHaveBeenCalledWith( + legacyKeys.publicKey, + ); + expect(cryptography.ed.getPublicKeyFromPrivateKey).not.toHaveBeenCalledWith( + defaultAccountPrivateKey, + ); + expect(cryptography.ed.getPrivateKeyFromPhraseAndPath).not.toHaveBeenCalledWith( + defaultPassphrase, + defaultGeneratorKeyPath, + ); + expect(cryptography.ed.getPublicKeyFromPrivateKey).not.toHaveBeenCalledWith( + defaultGeneratorPrivateKey, + ); + expect(cryptography.bls.getPrivateKeyFromPhraseAndPath).toHaveBeenCalledWith( + defaultPassphrase, + legacyBLSKeyPath, + ); + expect(cryptography.bls.getPublicKeyFromPrivateKey).toHaveBeenCalledWith(legacyBLSPrivateKey); + expect(readerUtils.getPassphraseFromPrompt).not.toHaveBeenCalledWith('passphrase', true); + expect(readerUtils.getPasswordFromPrompt).toHaveBeenCalledWith('password', true); + + expect(loggedData).toMatchObject({ + keys: [ + { + address: cryptography.address.getLisk32AddressFromPublicKey(legacyKeys.publicKey), + keyPath: 'legacy', + publicKey: legacyKeys.publicKey.toString('hex'), + privateKey: legacyKeys.privateKey.toString('hex'), + plain: { + generatorKeyPath: 'legacy', + generatorKey: legacyKeys.publicKey.toString('hex'), + generatorPrivateKey: legacyKeys.privateKey.toString('hex'), + blsKeyPath: legacyBLSKeyPath, + blsPrivateKey: legacyBLSPrivateKey.toString('hex'), + }, + }, + ], + }); + expect(loggedData.keys[0].encrypted).toBeDefined(); + expect(loggedData.keys[0].encrypted).toHaveProperty('ciphertext'); + expect(loggedData.keys[0].encrypted).toHaveProperty('mac'); + expect(loggedData.keys[0].encrypted).toHaveProperty('cipherparams'); + expect(loggedData.keys[0].encrypted).toHaveProperty('kdfparams'); + expect(loggedData.keys[0].encrypted.cipher).toBe('aes-128-gcm'); + expect(loggedData.keys[0].encrypted.kdf).toBe('argon2id'); + expect(loggedData.keys[0].encrypted.version).toBe('1'); + expect(consoleWarnSpy).toHaveBeenCalledTimes(0); + }); + }); + describe('keys:create --no-encrypt true', () => { it('should create valid keys', async () => { await CreateCommand.run(['--no-encrypt'], config); diff --git a/elements/lisk-api-client/src/ipc_channel.ts b/elements/lisk-api-client/src/ipc_channel.ts index dbbd9b7dece..3171cb2cb09 100644 --- a/elements/lisk-api-client/src/ipc_channel.ts +++ b/elements/lisk-api-client/src/ipc_channel.ts @@ -20,8 +20,8 @@ import { Subscriber, Dealer } from 'zeromq'; import { Channel, EventCallback, Defer, JSONRPCMessage, JSONRPCResponse } from './types'; import { convertRPCError, defer, promiseWithTimeout } from './utils'; -const CONNECTION_TIME_OUT = 2000; -const RESPONSE_TIMEOUT = 3000; +const CONNECTION_TIMEOUT = 5000; +const RESPONSE_TIMEOUT = 10000; const getSocketsPath = (dataPath: string) => { const socketDir = path.join(path.resolve(dataPath.replace('~', homedir())), 'tmp', 'sockets'); @@ -67,7 +67,7 @@ export class IPCChannel implements Channel { 'IPC Socket client connection timeout. Please check if IPC server is running.', ), ); - }, CONNECTION_TIME_OUT); + }, CONNECTION_TIMEOUT); this._subSocket.events.on('connect', () => { clearTimeout(timeout); resolve(); @@ -83,7 +83,7 @@ export class IPCChannel implements Channel { 'IPC Socket client connection timeout. Please check if IPC server is running.', ), ); - }, CONNECTION_TIME_OUT); + }, CONNECTION_TIMEOUT); this._rpcClient.events.on('connect', () => { clearTimeout(timeout); resolve(undefined); diff --git a/elements/lisk-chain/src/block_header.ts b/elements/lisk-chain/src/block_header.ts index d7a3c21ae26..2523b95d193 100644 --- a/elements/lisk-chain/src/block_header.ts +++ b/elements/lisk-chain/src/block_header.ts @@ -261,13 +261,13 @@ export class BlockHeader { }); } - if (header.aggregateCommit.height !== 0) { + if (header.aggregateCommit.height !== header.height) { errors.push({ - message: 'Genesis block header aggregateCommit.height must equal 0', + message: 'Genesis block header aggregateCommit.height must equal to the genesis height', keyword: 'const', dataPath: 'aggregateCommit.height', schemaPath: 'properties.aggregateCommit.height', - params: { allowedValue: 0 }, + params: { allowedValue: header.height }, }); } diff --git a/elements/lisk-chain/test/unit/block.spec.ts b/elements/lisk-chain/test/unit/block.spec.ts index b1c25337bee..5bac9da83ca 100644 --- a/elements/lisk-chain/test/unit/block.spec.ts +++ b/elements/lisk-chain/test/unit/block.spec.ts @@ -124,7 +124,7 @@ describe('block', () => { maxHeightGenerated: 0, validatorsHash: utils.hash(Buffer.alloc(0)), aggregateCommit: { - height: 0, + height: 1009988, aggregationBits: Buffer.alloc(0), certificateSignature: EMPTY_BUFFER, }, diff --git a/elements/lisk-chain/test/unit/block_header.spec.ts b/elements/lisk-chain/test/unit/block_header.spec.ts index f713ccd9fca..8010797718e 100644 --- a/elements/lisk-chain/test/unit/block_header.spec.ts +++ b/elements/lisk-chain/test/unit/block_header.spec.ts @@ -277,7 +277,7 @@ describe('block_header', () => { ); }); - it('should throw error if aggregateCommit.height is not equal to 0', () => { + it('should throw error if aggregateCommit.height is not equal to the height', () => { const block = getGenesisBlockAttrs(); const blockHeader = new BlockHeader({ ...block, @@ -285,7 +285,7 @@ describe('block_header', () => { }); expect(() => blockHeader.validateGenesis()).toThrow( - 'Genesis block header aggregateCommit.height must equal 0', + 'Genesis block header aggregateCommit.height must equal to the genesis height', ); }); diff --git a/elements/lisk-cryptography/src/encrypt.ts b/elements/lisk-cryptography/src/encrypt.ts index e87fe559e3c..64cd6eafb35 100644 --- a/elements/lisk-cryptography/src/encrypt.ts +++ b/elements/lisk-cryptography/src/encrypt.ts @@ -101,7 +101,9 @@ export const encryptAES128GCMWithPassword = async ( const salt = crypto.randomBytes(SALT_BUFFER_SIZE); const iv = crypto.randomBytes(IV_BUFFER_SIZE); const iterations = - kdf === KDF.ARGON2 ? ARGON2_ITERATIONS : options?.kdfparams?.iterations ?? PBKDF2_ITERATIONS; + options?.kdfparams?.iterations ?? + (kdf === KDF.ARGON2 ? ARGON2_ITERATIONS : options?.kdfparams?.iterations ?? PBKDF2_ITERATIONS); + const parallelism = options?.kdfparams?.parallelism ?? ARGON2_PARALLELISM; const memorySize = options?.kdfparams?.memorySize ?? ARGON2_MEMORY; let key: Buffer; diff --git a/elements/lisk-cryptography/src/utils.ts b/elements/lisk-cryptography/src/utils.ts index feb84e406ca..aecb346c366 100644 --- a/elements/lisk-cryptography/src/utils.ts +++ b/elements/lisk-cryptography/src/utils.ts @@ -48,7 +48,7 @@ export const hash = (data: Buffer | string, format?: string): Buffer => { export const parseKeyDerivationPath = (path: string) => { if (!path.startsWith('m') || !path.includes('/')) { - throw new Error('Invalid path format'); + throw new Error('Invalid key derivation path format'); } return ( @@ -58,19 +58,19 @@ export const parseKeyDerivationPath = (path: string) => { .slice(1) .map(segment => { if (!/^[0-9']+$/g.test(segment)) { - throw new Error('Invalid path format'); + throw new Error('Invalid key derivation path format'); } // if segment includes apostrophe add HARDENED_OFFSET if (segment.includes(`'`)) { if (parseInt(segment.slice(0, -1), 10) > MAX_UINT32 / 2) { - throw new Error('Invalid path format'); + throw new Error('Invalid key derivation path format'); } return parseInt(segment, 10) + HARDENED_OFFSET; } if (parseInt(segment, 10) > MAX_UINT32) { - throw new Error('Invalid path format'); + throw new Error('Invalid key derivation path format'); } return parseInt(segment, 10); diff --git a/elements/lisk-cryptography/test/ed.spec.ts b/elements/lisk-cryptography/test/ed.spec.ts index 8edb23aed0f..721625096cf 100644 --- a/elements/lisk-cryptography/test/ed.spec.ts +++ b/elements/lisk-cryptography/test/ed.spec.ts @@ -182,19 +182,19 @@ describe('getPrivateKeyFromPhraseAndPath', () => { it('should fail for empty string path', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, '')).rejects.toThrow( - 'Invalid path format', + 'Invalid key derivation path format', ); }); it('should fail if path does not start with "m"', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, `/44'/134'/0'`)).rejects.toThrow( - 'Invalid path format', + 'Invalid key derivation path format', ); }); it('should fail if path does not include at least one "/"', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, 'm441340')).rejects.toThrow( - 'Invalid path format', + 'Invalid key derivation path format', ); }); @@ -204,24 +204,24 @@ describe('getPrivateKeyFromPhraseAndPath', () => { passphrase, `m//134'/0'`, // should be number with or without ' between every back slash ), - ).rejects.toThrow('Invalid path format'); + ).rejects.toThrow('Invalid key derivation path format'); }); it('should fail for path with invalid characters', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, `m/a'/134b'/0'`)).rejects.toThrow( - 'Invalid path format', + 'Invalid key derivation path format', ); }); it('should fail for path with non-sanctioned special characters', async () => { await expect(getPrivateKeyFromPhraseAndPath(passphrase, `m/4a'/#134b'/0'`)).rejects.toThrow( - 'Invalid path format', + 'Invalid key derivation path format', ); }); it(`should fail for path with segment greater than ${MAX_UINT32} / 2`, async () => { await expect( getPrivateKeyFromPhraseAndPath(passphrase, `m/44'/134'/${MAX_UINT32}'`), - ).rejects.toThrow('Invalid path format'); + ).rejects.toThrow('Invalid key derivation path format'); }); }); diff --git a/elements/lisk-cryptography/test/encrypt.spec.ts b/elements/lisk-cryptography/test/encrypt.spec.ts index 2a0b3aa22fe..7376504efe4 100644 --- a/elements/lisk-cryptography/test/encrypt.spec.ts +++ b/elements/lisk-cryptography/test/encrypt.spec.ts @@ -110,21 +110,45 @@ describe('encrypt', () => { expect(encryptedMessage.kdfparams.iterations).toBe(customIterations); }); - it('should call options.getKey if provided', async () => { - const mockKey = utils.getRandomBytes(32); - const mockGetKey = jest.fn().mockResolvedValue(mockKey); - encryptedMessage = await encryptMessageWithPassword(passphrase, password, { - kdf: KDF.ARGON2, - getKey: mockGetKey, + describe('if options.getKey is provided', () => { + it('should call options.getKey', async () => { + const mockKey = utils.getRandomBytes(32); + const mockGetKey = jest.fn().mockResolvedValue(mockKey); + encryptedMessage = await encryptMessageWithPassword(passphrase, password, { + kdf: KDF.ARGON2, + getKey: mockGetKey, + }); + + expect(mockGetKey).toHaveBeenCalledOnceWith({ + password: expect.anything(), + salt: expect.anything(), + iterations: expect.anything(), + parallelism: expect.anything(), + memorySize: expect.anything(), + hashLength: HASH_LENGTH, + }); }); - expect(mockGetKey).toHaveBeenCalledOnceWith({ - password: expect.anything(), - salt: expect.anything(), - iterations: expect.anything(), - parallelism: expect.anything(), - memorySize: expect.anything(), - hashLength: HASH_LENGTH, + it('should call options.getKey with provided iterations', async () => { + const iterations = 10; + const mockKey = utils.getRandomBytes(32); + const mockGetKey = jest.fn().mockResolvedValue(mockKey); + encryptedMessage = await encryptMessageWithPassword(passphrase, password, { + kdf: KDF.ARGON2, + getKey: mockGetKey, + kdfparams: { + iterations: 10, + }, + }); + + expect(mockGetKey).toHaveBeenCalledOnceWith({ + password: expect.anything(), + salt: expect.anything(), + iterations, + parallelism: expect.anything(), + memorySize: expect.anything(), + hashLength: HASH_LENGTH, + }); }); }); }); diff --git a/examples/interop/common/mainchain_registration.ts b/examples/interop/common/mainchain_registration.ts index 939757f5d1c..fd3acc40db0 100644 --- a/examples/interop/common/mainchain_registration.ts +++ b/examples/interop/common/mainchain_registration.ts @@ -1,22 +1,42 @@ -import { codec, cryptography, apiClient, Transaction } from 'lisk-sdk'; import { + codec, + cryptography, + apiClient, + Transaction, registrationSignatureMessageSchema, mainchainRegParams as mainchainRegParamsSchema, MESSAGE_TAG_CHAIN_REG, MODULE_NAME_INTEROPERABILITY, -} from 'lisk-framework'; -import { COMMAND_NAME_MAINCHAIN_REG } from 'lisk-framework/dist-node/modules/interoperability/constants'; - -export const registerMainchain = async ( - num: string, - sidechainDevValidators: any[], - sidechainValidatorsKeys: any[], -) => { +} from 'lisk-sdk'; + +/** + * Registers the mainchain to a specific sidechain node. + * + * @example + * ```js + * // Update path to point to the dev-validators.json file of the sidechain which shall be registered on the mainchain +import { keys as sidechainDevValidators } from '../default/dev-validators.json'; + + * (async () => { + * await registerMainchain("lisk-core","my-lisk-app",sidechainDevValidators); + *})(); + * ``` + * + * @param mc mainchain alias of the mainchain to be registered. + * @param sc sidechain alias of the sidechain, where the mainchain shall be registered. + * @param sidechainDevValidators the `key` property of the `dev-validators.json` file. + * Includes all keys of the sidechain validators to create the aggregated signature. + */ + +export const registerMainchain = async (mc: string, sc: string, sidechainDevValidators: any[]) => { const { bls, address } = cryptography; - const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-${num}`); - const sidechainClient = await apiClient.createIPCClient(`~/.lisk/pos-sidechain-example-${num}`); + // Connect to the mainchain node + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/${mc}`); + // Connect to the sidechain node + const sidechainClient = await apiClient.createIPCClient(`~/.lisk/${sc}`); + // Get node info data from sidechain and mainchain const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); const sidechainNodeInfo = await sidechainClient.invoke('system_getNodeInfo'); @@ -28,15 +48,17 @@ export const registerMainchain = async ( height: mainchainNodeInfo.height, }); + // Sort validator list lexicographically after their BLS key const paramsJSON = { ownChainID: sidechainNodeInfo.chainID, - ownName: `sidechain_example_${num}`, + ownName: sc.replace(/-/g, '_'), mainchainValidators: (mainchainActiveValidators as { blsKey: string; bftWeight: string }[]) .map(v => ({ blsKey: v.blsKey, bftWeight: v.bftWeight })) .sort((a, b) => Buffer.from(a.blsKey, 'hex').compare(Buffer.from(b.blsKey, 'hex'))), mainchainCertificateThreshold, }; + // Define parameters for the mainchain registration const params = { ownChainID: Buffer.from(paramsJSON.ownChainID as string, 'hex'), ownName: paramsJSON.ownName, @@ -47,17 +69,19 @@ export const registerMainchain = async ( mainchainCertificateThreshold: paramsJSON.mainchainCertificateThreshold, }; + // Encode parameters const message = codec.encode(registrationSignatureMessageSchema, params); - // Get active validators from sidechainChain + // Get active validators from sidechain const { validators: sidechainActiveValidators } = await sidechainClient.invoke( 'consensus_getBFTParameters', { height: sidechainNodeInfo.height }, ); + // Add validator private keys to the sidechain validator list const activeValidatorsWithPrivateKey: { blsPublicKey: Buffer; blsPrivateKey: Buffer }[] = []; for (const v of sidechainActiveValidators as { blsKey: string; bftWeight: string }[]) { - const validatorInfo = sidechainValidatorsKeys.find( + const validatorInfo = sidechainDevValidators.find( configValidator => configValidator.plain.blsKey === v.blsKey, ); if (validatorInfo) { @@ -68,11 +92,13 @@ export const registerMainchain = async ( } } console.log('Total activeValidatorsWithPrivateKey:', activeValidatorsWithPrivateKey.length); - // Sort active validators from sidechainChain + + // Sort active validators from sidechain lexicographically after their BLS public key activeValidatorsWithPrivateKey.sort((a, b) => a.blsPublicKey.compare(b.blsPublicKey)); const sidechainValidatorsSignatures: { publicKey: Buffer; signature: Buffer }[] = []; - // Sign with each active validator + + // Sign parameters with each active sidechain validator for (const validator of activeValidatorsWithPrivateKey) { const signature = bls.signData( MESSAGE_TAG_CHAIN_REG, @@ -92,18 +118,23 @@ export const registerMainchain = async ( sidechainValidatorsSignatures, ); + // Get public key and nonce of the sender account const relayerKeyInfo = sidechainDevValidators[0]; const { nonce } = await sidechainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerKeyInfo.publicKey, 'hex')), }); + + // Add aggregated signature to the parameters of the mainchain registration const mainchainRegParams = { ...paramsJSON, signature: signature.toString('hex'), aggregationBits: aggregationBits.toString('hex'), }; + + // Create registerMainchain transaction const tx = new Transaction({ module: MODULE_NAME_INTEROPERABILITY, - command: COMMAND_NAME_MAINCHAIN_REG, + command: 'registerMainchain', fee: BigInt(2000000000), params: codec.encodeJSON(mainchainRegParamsSchema, mainchainRegParams), nonce: BigInt(nonce), @@ -111,11 +142,13 @@ export const registerMainchain = async ( signatures: [], }); + // Sign the transaction tx.sign( Buffer.from(sidechainNodeInfo.chainID as string, 'hex'), Buffer.from(relayerKeyInfo.privateKey, 'hex'), ); + // Post the transaction to a sidechain node const result = await sidechainClient.invoke<{ transactionId: string; }>('txpool_postTransaction', { diff --git a/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json b/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json index ba01c00aa2b..cbadf15b996 100644 --- a/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json +++ b/examples/interop/pos-mainchain-fast/config/default/genesis_assets.json @@ -4,10 +4,72 @@ "module": "interoperability", "data": { "ownChainName": "lisk_mainchain", - "ownChainNonce": 0, - "chainInfos": [], - "terminatedStateAccounts": [], - "terminatedOutboxAccounts": [] + "ownChainNonce": "123", + "chainInfos": [ + { + "chainID": "04123456", + "chainData": { + "name": "dummy", + "lastCertificate": { + "height": 567467, + "timestamp": 1000, + "stateRoot": "0000000000000000000000000000000000000000000000000000000000000000", + "validatorsHash": "a5a053d50182ea0c33bc03594cf4760f11d67cdba407d16bc0512fabb468253a" + }, + "status": 2 + }, + "channelData": { + "inbox": { + "appendPath": [ + "0000000000000000000000000000000000000000000000000000000000000000", + "0000000000000000000000000000000000000000000000000000000000000000" + ], + "size": 18, + "root": "9e37ffe87f08f6b7952d82acdb3376046792533ebf2ba8908dda3ebc813dac9a" + }, + "outbox": { + "appendPath": [ + "0000000000000000000000000000000000000000000000000000000000000000", + "0000000000000000000000000000000000000000000000000000000000000000" + ], + "size": 18, + "root": "221a0c844883022e3a54e78eb5dcdc2ed8faa85f648c40659cf5fd2054f36500" + }, + "partnerChainOutboxRoot": "851faa36d87411d625fb4416c33c6a44795cb84155637f9991fcef06d1de7155", + "messageFeeTokenID": "0400000000000000", + "minReturnFeePerByte": "1000" + }, + "chainValidators": { + "activeValidators": [ + { + "blsKey": "3c1e6f29e3434f816cd6697e56cc54bc8d80927bf65a1361b383aa338cd3f63cbf82ce801b752cb32f8ecb3f8cc16835", + "bftWeight": "10" + } + ], + "certificateThreshold": "10" + } + } + ], + "terminatedStateAccounts": [ + { + "chainID": "04123456", + "terminatedStateAccount": { + "stateRoot": "0000000000000000000000000000000000000000000000000000000000000000", + "mainchainStateRoot": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "initialized": true + } + } + ], + "terminatedOutboxAccounts": [ + { + "chainID": "04123456", + "terminatedOutboxAccount": { + "outboxRoot": "0aed892d544980f5b806dbce4bcb65517acf57a7af012c55c0b2f80b188fa290", + "outboxSize": 1, + "partnerChainInboxSize": 1 + } + } + ] }, "schema": { "$id": "/interoperability/module/genesis", @@ -916,11 +978,17 @@ "totalSupply": "10300000000000000" } ], - "escrowSubstore": [], + "escrowSubstore": [ + { + "escrowChainID": "04123456", + "tokenID": "0400000000000000", + "amount": "0" + } + ], "supportedTokensSubstore": [ { - "chainID": "", - "supportedTokenIDs": [] + "chainID": "04123456", + "supportedTokenIDs": ["0412345600000000"] } ] }, diff --git a/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts b/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts index 7e0d09cdefa..a287c80a607 100644 --- a/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts +++ b/examples/interop/pos-mainchain-fast/config/scripts/sidechain_registration.ts @@ -1,57 +1,70 @@ -import { apiClient, codec, cryptography, Transaction } from 'lisk-sdk'; +import { apiClient, codec, sidechainRegParams, cryptography, Transaction } from 'lisk-sdk'; +// Replace this with the path to a file storing the public and private key of a mainchain account who will send the sidechain registration transaction. +// (Can be any account with enough tokens). import { keys } from '../default/dev-validators.json'; -import { sidechainRegParams } from 'lisk-framework'; (async () => { const { address } = cryptography; - const SIDECHAIN_ARRAY = ['one', 'two']; + // Replace this with alias of the sidechain node(s) + const SIDECHAIN_ARRAY = ['pos-sidechain-example-one', 'pos-sidechain-example-two']; + // Replace this with the alias of the mainchain node(s), e.g. lisk-core + // Note: Number of mainchain nodes should be equal to sidechain nodes, for this script to work properly. + const MAINCHAIN_ARRAY = ['mainchain-node-one', 'mainchain-node-two']; let i = 0; for (const nodeAlias of SIDECHAIN_ARRAY) { - const sidechainClient = await apiClient.createIPCClient( - `~/.lisk/pos-sidechain-example-${nodeAlias}`, - ); - const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-${nodeAlias}`); + // Connect to the sidechain node + const sidechainClient = await apiClient.createIPCClient(`~/.lisk/${nodeAlias}`); + // Connect to the mainchain node + const mainchainClient = await apiClient.createIPCClient(`~/.lisk/${MAINCHAIN_ARRAY[i]}`); + // Get node info data from sidechain and mainchain const sidechainNodeInfo = await sidechainClient.invoke('system_getNodeInfo'); const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); - // Get active validators from sidechainchain - const { validators: sidehcainActiveValidators, certificateThreshold } = + + // Get info about the active sidechain validators and the certificate threshold + const { validators: sidechainActiveValidators, certificateThreshold } = await sidechainClient.invoke('consensus_getBFTParameters', { height: sidechainNodeInfo.height, }); - (sidehcainActiveValidators as { blsKey: string; bftWeight: string }[]).sort((a, b) => + // Sort validator list lexicographically after their BLS key + (sidechainActiveValidators as { blsKey: string; bftWeight: string }[]).sort((a, b) => Buffer.from(a.blsKey, 'hex').compare(Buffer.from(b.blsKey, 'hex')), ); + // Define parameters for the sidechain registration const params = { sidechainCertificateThreshold: certificateThreshold, - sidechainValidators: sidehcainActiveValidators, + sidechainValidators: sidechainActiveValidators, chainID: sidechainNodeInfo.chainID, - name: `sidechain_example_${nodeAlias}`, + name: nodeAlias.replace(/-/g, '_'), }; - const relayerkeyInfo = keys[2]; + // Get public key and nonce of the sender account + const relayerKeyInfo = keys[2]; const { nonce } = await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { - address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerkeyInfo.publicKey, 'hex')), + address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerKeyInfo.publicKey, 'hex')), }); + // Create registerSidechain transaction const tx = new Transaction({ module: 'interoperability', command: 'registerSidechain', fee: BigInt(2000000000), params: codec.encodeJSON(sidechainRegParams, params), nonce: BigInt(nonce), - senderPublicKey: Buffer.from(relayerkeyInfo.publicKey, 'hex'), + senderPublicKey: Buffer.from(relayerKeyInfo.publicKey, 'hex'), signatures: [], }); + // Sign the transaction tx.sign( Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), - Buffer.from(relayerkeyInfo.privateKey, 'hex'), + Buffer.from(relayerKeyInfo.privateKey, 'hex'), ); + // Post the transaction to a mainchain node const result = await mainchainClient.invoke<{ transactionId: string; }>('txpool_postTransaction', { @@ -59,11 +72,12 @@ import { sidechainRegParams } from 'lisk-framework'; }); console.log( - `Sent sidechain registration transaction on mainchain node ${nodeAlias}. Result from transaction pool is: `, + `Sent sidechain registration transaction on mainchain node ${MAINCHAIN_ARRAY[1]}. Result from transaction pool is: `, result, ); i += 1; + // Wait in case there are more elements in the SIDECHAIN_ARRAY, after performing another loop with the next element. const wait = async (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); if (i < SIDECHAIN_ARRAY.length) { const WAIT_PERIOD = 10000; diff --git a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts index 97266315f99..4f0cafa3146 100644 --- a/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts +++ b/examples/interop/pos-mainchain-fast/config/scripts/transfer_lsk_sidechain_one.ts @@ -1,4 +1,6 @@ import { apiClient, codec, cryptography, Schema, Transaction } from 'lisk-sdk'; +// Replace this with the path to a file storing the public and private key of a mainchain account who will send the sidechain registration transaction. +// (Can be any account with enough tokens). import { keys } from '../default/dev-validators.json'; type ModulesMetadata = [ { @@ -12,25 +14,30 @@ type ModulesMetadata = [ const { address } = cryptography; const nodeAlias = 'one'; + // Update this with the Token ID of the token you wish to transfer const tokenID = Buffer.from('0400000000000000', 'hex'); - const sidechainID = Buffer.from('04000001', 'hex'); // Update this to send to another sidechain + // Update this with the chain ID of the receiving chain + const sidechainID = Buffer.from('04000001', 'hex'); + // Update this with the recipient address const recipientLSKAddress = 'lskxz85sur2yo22dmcxybe39uvh2fg7s2ezxq4ny9'; const recipientAddress = address.getAddressFromLisk32Address(recipientLSKAddress); + // Connect to the mainchain node const mainchainClient = await apiClient.createIPCClient(`~/.lisk/mainchain-node-one`); + // Get node info data from mainchain const mainchainNodeInfo = await mainchainClient.invoke('system_getNodeInfo'); + // Get schema for the transferCrossChain command const { modules: modulesMetadata } = await mainchainClient.invoke<{ modules: ModulesMetadata; }>('system_getMetadata'); - const tokenMetadata = modulesMetadata.find(m => m.name === 'token'); - const ccTransferCMDSchema = tokenMetadata?.commands.filter( cmd => cmd.name == 'transferCrossChain', )[0].params as Schema; + // Define parameters for the cc transfer const params = { tokenID, amount: BigInt('10000000000'), @@ -41,11 +48,13 @@ type ModulesMetadata = [ messageFeeTokenID: tokenID, }; + // Get public key and nonce of the sender account const relayerkeyInfo = keys[2]; const { nonce } = await mainchainClient.invoke<{ nonce: string }>('auth_getAuthAccount', { address: address.getLisk32AddressFromPublicKey(Buffer.from(relayerkeyInfo.publicKey, 'hex')), }); + // Create transferCrossChain transaction const tx = new Transaction({ module: 'token', command: 'transferCrossChain', @@ -56,11 +65,13 @@ type ModulesMetadata = [ signatures: [], }); + // Sign the transaction tx.sign( Buffer.from(mainchainNodeInfo.chainID as string, 'hex'), Buffer.from(relayerkeyInfo.privateKey, 'hex'), ); + // Post the transaction to a mainchain node const result = await mainchainClient.invoke<{ transactionId: string; }>('txpool_postTransaction', { diff --git a/examples/interop/pos-sidechain-example-one/config/default/config.json b/examples/interop/pos-sidechain-example-one/config/default/config.json index b1cb2b0e91d..562a5b59e4b 100644 --- a/examples/interop/pos-sidechain-example-one/config/default/config.json +++ b/examples/interop/pos-sidechain-example-one/config/default/config.json @@ -48,6 +48,10 @@ "ccuFee": "500000000", "receivingChainIPCPath": "~/.lisk/mainchain-node-one", "registrationHeight": 10 + }, + "dashboard": { + "applicationUrl": "ws://127.0.0.1:7885/rpc-ws", + "port": 4006 } } } diff --git a/examples/interop/pos-sidechain-example-one/config/scripts/mainchain_registration.ts b/examples/interop/pos-sidechain-example-one/config/scripts/mainchain_registration.ts index f41598533a4..ba3439b6ed5 100644 --- a/examples/interop/pos-sidechain-example-one/config/scripts/mainchain_registration.ts +++ b/examples/interop/pos-sidechain-example-one/config/scripts/mainchain_registration.ts @@ -1,7 +1,10 @@ -import { keys as sidechainValidatorsKeys } from '../../config/default/dev-validators.json'; import { keys as sidechainDevValidators } from '../default/dev-validators.json'; import { registerMainchain } from '../../../common/mainchain_registration'; (async () => { - await registerMainchain('one', sidechainDevValidators, sidechainValidatorsKeys); + await registerMainchain( + 'mainchain-node-one', + 'pos-sidechain-example-one', + sidechainDevValidators, + ); })(); diff --git a/examples/interop/pos-sidechain-example-one/src/app/app.ts b/examples/interop/pos-sidechain-example-one/src/app/app.ts index 61d1e557edf..20cf00b39c8 100644 --- a/examples/interop/pos-sidechain-example-one/src/app/app.ts +++ b/examples/interop/pos-sidechain-example-one/src/app/app.ts @@ -2,6 +2,7 @@ import { Application, PartialApplicationConfig, NFTModule } from 'lisk-sdk'; import { TestNftModule } from './modules/testNft/module'; import { registerModules } from './modules'; import { registerPlugins } from './plugins'; +import { HelloModule } from './modules/hello/module'; export const getApplication = (config: PartialApplicationConfig): Application => { const { app, method } = Application.defaultApplication(config, false); @@ -18,6 +19,11 @@ export const getApplication = (config: PartialApplicationConfig): Application => app.registerModule(nftModule); app.registerModule(testNftModule); + const helloModule = new HelloModule(); + app.registerModule(helloModule); + + app.registerInteroperableModule(helloModule); + registerModules(app); registerPlugins(app); diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_commands/react_command.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_commands/react_command.ts new file mode 100644 index 00000000000..de26cd3409c --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_commands/react_command.ts @@ -0,0 +1,82 @@ +/* eslint-disable class-methods-use-this */ + +import { BaseCCCommand, CrossChainMessageContext, codec, cryptography, db } from 'lisk-sdk'; +import { crossChainReactParamsSchema, CCReactMessageParams } from '../schema'; +import { MAX_RESERVED_ERROR_STATUS, CROSS_CHAIN_COMMAND_NAME_REACT } from '../constants'; +import { ReactionStore, ReactionStoreData } from '../stores/reaction'; +import { MessageStore } from '../stores/message'; + +export class ReactCCCommand extends BaseCCCommand { + public schema = crossChainReactParamsSchema; + + public get name(): string { + return CROSS_CHAIN_COMMAND_NAME_REACT; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async verify(ctx: CrossChainMessageContext): Promise { + const { ccm } = ctx; + + if (ccm.status > MAX_RESERVED_ERROR_STATUS) { + throw new Error('Invalid CCM status code.'); + } + + const params = codec.decode(crossChainReactParamsSchema, ccm.params); + const messageCreatorAddress = cryptography.address.getAddressFromLisk32Address( + params.helloMessageID, + ); + if (!(await this.stores.get(MessageStore).has(ctx, messageCreatorAddress))) { + throw new Error('Message ID does not exists.'); + } + } + + public async execute(ctx: CrossChainMessageContext): Promise { + const { ccm, logger } = ctx; + logger.info('Executing React CCM'); + // const methodContext = ctx.getMethodContext(); + // const { sendingChainID, status, receivingChainID } = ccm; + const params = codec.decode(crossChainReactParamsSchema, ccm.params); + logger.info(params, 'parameters'); + const { helloMessageID, reactionType } = params; + const reactionSubstore = this.stores.get(ReactionStore); + + logger.info({ helloMessageID }, 'Contents of helloMessageID'); + const messageCreatorAddress = cryptography.address.getAddressFromLisk32Address(helloMessageID); + logger.info({ messageCreatorAddress }, 'Contents of messageCreatorAddress'); + + let msgReactions: ReactionStoreData; + + try { + msgReactions = await reactionSubstore.get(ctx, messageCreatorAddress); + } catch (error) { + if (!(error instanceof db.NotFoundError)) { + logger.info({ helloMessageID, crossChainCommand: this.name }, (error as Error).message); + logger.error({ error }, 'Error when getting the reaction substore'); + throw error; + } + + logger.info( + { helloMessageID, crossChainCommand: this.name }, + `No entry exists for given helloMessageID ${helloMessageID}. Creating a default entry.`, + ); + msgReactions = { reactions: { like: [] } }; + } + + logger.info( + { msgReactions }, + '+++++++++++++++++++++++++++++=============++++++++++++++++++++++++', + ); + logger.info({ msgReactions }, 'Contents of the reaction store PRE'); + logger.info(msgReactions, 'Contents of the reaction store PRE'); + if (reactionType === 0) { + // TODO: Check if the Likes array already contains the sender address. If yes, remove the address to unlike the post. + msgReactions.reactions.like.push(ctx.transaction.senderAddress); + } else { + logger.error({ reactionType }, 'invalid reaction type'); + } + + logger.info(msgReactions, 'Contents of the reaction store POST'); + logger.info({ msgReactions }, 'Contents of the reaction store POST'); + await reactionSubstore.set(ctx, messageCreatorAddress, msgReactions); + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_method.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_method.ts new file mode 100644 index 00000000000..f8535173f9a --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/cc_method.ts @@ -0,0 +1,3 @@ +import { BaseCCMethod } from 'lisk-sdk'; + +export class HelloInteroperableMethod extends BaseCCMethod {} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/commands/create_hello_command.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/commands/create_hello_command.ts new file mode 100644 index 00000000000..86cc22753bd --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/commands/create_hello_command.ts @@ -0,0 +1,89 @@ +/* eslint-disable class-methods-use-this */ + +import { + BaseCommand, + CommandVerifyContext, + CommandExecuteContext, + VerificationResult, + VerifyStatus, +} from 'lisk-sdk'; +import { createHelloSchema } from '../schema'; +import { MessageStore } from '../stores/message'; +import { counterKey, CounterStore, CounterStoreData } from '../stores/counter'; +import { ModuleConfig } from '../types'; +import { NewHelloEvent } from '../events/new_hello'; + +interface Params { + message: string; +} + +export class CreateHelloCommand extends BaseCommand { + public schema = createHelloSchema; + private _blacklist!: string[]; + + // eslint-disable-next-line @typescript-eslint/require-await + public async init(config: ModuleConfig): Promise { + // Set _blacklist to the value of the blacklist defined in the module config + this._blacklist = config.blacklist; + // Set the max message length to the value defined in the module config + this.schema.properties.message.maxLength = config.maxMessageLength; + // Set the min message length to the value defined in the module config + this.schema.properties.message.minLength = config.minMessageLength; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async verify(context: CommandVerifyContext): Promise { + let validation: VerificationResult; + const wordList = context.params.message.split(' '); + const found = this._blacklist.filter(value => wordList.includes(value)); + if (found.length > 0) { + context.logger.info('==== FOUND: Message contains a blacklisted word ===='); + throw new Error(`Illegal word in hello message: ${found.toString()}`); + } else { + context.logger.info('==== NOT FOUND: Message contains no blacklisted words ===='); + validation = { + status: VerifyStatus.OK, + }; + } + return validation; + } + + public async execute(context: CommandExecuteContext): Promise { + // 1. Get account data of the sender of the Hello transaction. + const { senderAddress } = context.transaction; + // 2. Get message and counter stores. + const messageSubstore = this.stores.get(MessageStore); + const counterSubstore = this.stores.get(CounterStore); + + // 3. Save the Hello message to the message store, using the senderAddress as key, and the message as value. + await messageSubstore.set(context, senderAddress, { + message: context.params.message, + }); + + // 3. Get the Hello counter from the counter store. + let helloCounter: CounterStoreData; + try { + helloCounter = await counterSubstore.get(context, counterKey); + } catch (error) { + helloCounter = { + counter: 0, + }; + } + // 5. Increment the Hello counter +1. + helloCounter.counter += 1; + + // 6. Save the Hello counter to the counter store. + await counterSubstore.set(context, counterKey, helloCounter); + + // 7. Emit a "New Hello" event + const newHelloEvent = this.events.get(NewHelloEvent); + newHelloEvent.add( + context, + { + senderAddress: context.transaction.senderAddress, + message: context.params.message, + }, + [context.transaction.senderAddress], + ); + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/constants.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/constants.ts new file mode 100644 index 00000000000..9b427343540 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/constants.ts @@ -0,0 +1,4 @@ +export const CROSS_CHAIN_COMMAND_NAME_REACT = 'reactCrossChain'; + +export const MAX_RESERVED_ERROR_STATUS = 63; +export const CCM_STATUS_OK = 0; diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/endpoint.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/endpoint.ts new file mode 100644 index 00000000000..4db39083be5 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/endpoint.ts @@ -0,0 +1,47 @@ +import { BaseEndpoint, ModuleEndpointContext, cryptography } from 'lisk-sdk'; +import { counterKey, CounterStore, CounterStoreData } from './stores/counter'; +import { MessageStore, MessageStoreData } from './stores/message'; +import { ReactionStore, ReactionStoreData } from './stores/reaction'; + +export class HelloEndpoint extends BaseEndpoint { + public async getHelloCounter(ctx: ModuleEndpointContext): Promise { + const counterSubStore = this.stores.get(CounterStore); + + const helloCounter = await counterSubStore.get(ctx, counterKey); + + return helloCounter; + } + + public async getReactions(ctx: ModuleEndpointContext): Promise { + const reactionSubStore = this.stores.get(ReactionStore); + + const { address } = ctx.params; + if (typeof address !== 'string') { + throw new Error('Parameter address must be a string.'); + } + cryptography.address.validateLisk32Address(address); + + const reactions = await reactionSubStore.get( + ctx, + cryptography.address.getAddressFromLisk32Address(address), + ); + + return reactions; + } + + public async getHello(ctx: ModuleEndpointContext): Promise { + const messageSubStore = this.stores.get(MessageStore); + + const { address } = ctx.params; + if (typeof address !== 'string') { + throw new Error('Parameter address must be a string.'); + } + cryptography.address.validateLisk32Address(address); + const helloMessage = await messageSubStore.get( + ctx, + cryptography.address.getAddressFromLisk32Address(address), + ); + + return helloMessage; + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/.gitkeep b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/new_hello.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/new_hello.ts new file mode 100644 index 00000000000..a613a9d18c0 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/events/new_hello.ts @@ -0,0 +1,39 @@ +/* + * Copyright © 2022 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseEvent } from 'lisk-sdk'; + +export const newHelloEventSchema = { + $id: '/hello/events/new_hello', + type: 'object', + required: ['senderAddress', 'message'], + properties: { + senderAddress: { + dataType: 'bytes', + fieldNumber: 1, + }, + message: { + dataType: 'string', + fieldNumber: 2, + }, + }, +}; + +export interface NewHelloEventData { + senderAddress: Buffer; + message: string; +} + +export class NewHelloEvent extends BaseEvent { + public schema = newHelloEventSchema; +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/method.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/method.ts new file mode 100644 index 00000000000..0ce458081f6 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/method.ts @@ -0,0 +1,14 @@ +import { BaseMethod, ImmutableMethodContext } from 'lisk-sdk'; +import { MessageStore, MessageStoreData } from './stores/message'; + +export class HelloMethod extends BaseMethod { + public async getHello( + methodContext: ImmutableMethodContext, + address: Buffer, + ): Promise { + const messageSubStore = this.stores.get(MessageStore); + const helloMessage = await messageSubStore.get(methodContext, address); + + return helloMessage; + } +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/module.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/module.ts new file mode 100644 index 00000000000..0711a02c6d0 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/module.ts @@ -0,0 +1,141 @@ +/* eslint-disable class-methods-use-this */ + +import { + validator, + BaseInteroperableModule, + BlockAfterExecuteContext, + BlockExecuteContext, + BlockVerifyContext, + GenesisBlockExecuteContext, + InsertAssetContext, + ModuleInitArgs, + ModuleMetadata, + TransactionExecuteContext, + TransactionVerifyContext, + utils, + VerificationResult, +} from 'lisk-sdk'; +import { CreateHelloCommand } from './commands/create_hello_command'; +import { ReactCCCommand } from './cc_commands/react_command'; +import { HelloEndpoint } from './endpoint'; +import { NewHelloEvent } from './events/new_hello'; +import { HelloMethod } from './method'; +import { + configSchema, + getHelloCounterResponseSchema, + getHelloRequestSchema, + getHelloResponseSchema, +} from './schema'; +import { CounterStore } from './stores/counter'; +import { MessageStore } from './stores/message'; +import { ReactionStore, reactionStoreSchema } from './stores/reaction'; +import { ModuleConfigJSON } from './types'; +import { HelloInteroperableMethod } from './cc_method'; + +export const defaultConfig = { + maxMessageLength: 256, + minMessageLength: 3, + blacklist: ['illegalWord1'], +}; + +export class HelloModule extends BaseInteroperableModule { + public constructor() { + super(); + // registration of stores and events + this.stores.register(CounterStore, new CounterStore(this.name, 0)); + this.stores.register(MessageStore, new MessageStore(this.name, 1)); + this.stores.register(ReactionStore, new ReactionStore(this.name, 2)); + this.events.register(NewHelloEvent, new NewHelloEvent(this.name)); + } + + public metadata(): ModuleMetadata { + return { + endpoints: [ + { + name: this.endpoint.getHello.name, + request: getHelloRequestSchema, + response: getHelloResponseSchema, + }, + { + name: this.endpoint.getReactions.name, + request: getHelloRequestSchema, + response: reactionStoreSchema, + }, + { + name: this.endpoint.getHelloCounter.name, + response: getHelloCounterResponseSchema, + }, + ], + commands: this.commands.map(command => ({ + name: command.name, + params: command.schema, + })), + events: this.events.values().map(v => ({ + name: v.name, + data: v.schema, + })), + assets: [], + stores: [], + }; + } + + // Lifecycle hooks + // eslint-disable-next-line @typescript-eslint/require-await + public async init(args: ModuleInitArgs): Promise { + // Get the module config defined in the config.json file + const { moduleConfig } = args; + // Overwrite the default module config with values from config.json, if set + const config = utils.objects.mergeDeep({}, defaultConfig, moduleConfig) as ModuleConfigJSON; + // Validate the provided config with the config schema + validator.validator.validate(configSchema, config); + // Call the command init() method with config values as parameters + this.commands[0].init(config).catch(err => { + // eslint-disable-next-line no-console + console.log('Error: ', err); + }); + } + + public async insertAssets(_context: InsertAssetContext) { + // initialize block generation, add asset + } + + public async verifyAssets(_context: BlockVerifyContext): Promise { + // verify block + } + + // Lifecycle hooks + // eslint-disable-next-line @typescript-eslint/require-await + public async verifyTransaction(context: TransactionVerifyContext): Promise { + // verify transaction will be called multiple times in the transaction pool + context.logger.info('TX VERIFICATION'); + const result = { + status: 1, + }; + return result; + } + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async beforeCommandExecute(_context: TransactionExecuteContext): Promise {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async afterCommandExecute(_context: TransactionExecuteContext): Promise {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async initGenesisState(_context: GenesisBlockExecuteContext): Promise {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async finalizeGenesisState(_context: GenesisBlockExecuteContext): Promise {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async beforeTransactionsExecute(_context: BlockExecuteContext): Promise {} + + // eslint-disable-next-line @typescript-eslint/no-empty-function + public async afterTransactionsExecute(_context: BlockAfterExecuteContext): Promise {} + + public endpoint = new HelloEndpoint(this.stores, this.offchainStores); + public method = new HelloMethod(this.stores, this.events); + public commands = [new CreateHelloCommand(this.stores, this.events)]; + public reactCCCommand = new ReactCCCommand(this.stores, this.events); + public crossChainMethod = new HelloInteroperableMethod(this.stores, this.events); + public crossChainCommand = [this.reactCCCommand]; +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/schema.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/schema.ts new file mode 100644 index 00000000000..9909d742031 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/schema.ts @@ -0,0 +1,116 @@ +export interface CreateHelloParams { + message: string; +} + +export const createHelloSchema = { + $id: 'hello/createHello-params', + title: 'CreateHelloCommand transaction parameter for the Hello module', + type: 'object', + required: ['message'], + properties: { + message: { + dataType: 'string', + fieldNumber: 1, + minLength: 3, + maxLength: 256, + }, + }, +}; + +export const configSchema = { + $id: '/hello/config', + type: 'object', + properties: { + maxMessageLength: { + type: 'integer', + format: 'uint32', + }, + minMessageLength: { + type: 'integer', + format: 'uint32', + }, + blacklist: { + type: 'array', + items: { + type: 'string', + minLength: 1, + maxLength: 40, + }, + }, + }, + required: ['maxMessageLength', 'minMessageLength', 'blacklist'], +}; + +export const getHelloCounterResponseSchema = { + $id: 'modules/hello/endpoint/getHelloCounter', + type: 'object', + required: ['counter'], + properties: { + counter: { + type: 'number', + format: 'uint32', + }, + }, +}; + +export const getHelloResponseSchema = { + $id: 'modules/hello/endpoint/getHello', + type: 'object', + required: ['message'], + properties: { + message: { + type: 'string', + format: 'utf8', + }, + }, +}; + +export const getHelloRequestSchema = { + $id: 'modules/hello/endpoint/getHelloRequest', + type: 'object', + required: ['address'], + properties: { + address: { + type: 'string', + format: 'lisk32', + }, + }, +}; + +/** + * Parameters of the cross-chain token transfer command + */ +export const crossChainReactParamsSchema = { + /** The unique identifier of the schema. */ + $id: '/lisk/hello/ccReactParams', + type: 'object', + /** The required parameters for the command. */ + required: ['reactionType', 'helloMessageID', 'data'], + /** A list describing the available parameters for the command. */ + properties: { + reactionType: { + dataType: 'uint32', + fieldNumber: 1, + }, + /** + * ID of the message. + */ + helloMessageID: { + dataType: 'string', + fieldNumber: 2, + }, + /** Optional field for data / messages. */ + data: { + dataType: 'string', + fieldNumber: 3, + minLength: 0, + maxLength: 64, + }, + }, +}; + +export interface CCReactMessageParams { + reactionType: number; + helloMessageID: string; + data: string; +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/.gitkeep b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/counter.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/counter.ts new file mode 100644 index 00000000000..254e4976b68 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/counter.ts @@ -0,0 +1,36 @@ +/* + * Copyright © 2022 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseStore } from 'lisk-sdk'; + +export interface CounterStoreData { + counter: number; +} + +export const counterKey = Buffer.alloc(0); + +export const counterStoreSchema = { + $id: '/hello/counter', + type: 'object', + required: ['counter'], + properties: { + counter: { + dataType: 'uint32', + fieldNumber: 1, + }, + }, +}; + +export class CounterStore extends BaseStore { + public schema = counterStoreSchema; +} diff --git a/framework/src/modules/dynamic_rewards/schemas.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/message.ts similarity index 56% rename from framework/src/modules/dynamic_rewards/schemas.ts rename to examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/message.ts index 4afcd15865c..55e6af81051 100644 --- a/framework/src/modules/dynamic_rewards/schemas.ts +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/message.ts @@ -1,5 +1,5 @@ /* - * Copyright © 2021 Lisk Foundation + * Copyright © 2022 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. @@ -11,17 +11,24 @@ * * Removal or modification of this copyright notice is prohibited. */ -import { configSchema as rewardConfigSchema } from '../reward/schemas'; +import { BaseStore } from 'lisk-sdk'; -export const configSchema = { - $id: '/dynamicReward/config', +export interface MessageStoreData { + message: string; +} + +export const messageStoreSchema = { + $id: '/hello/message', type: 'object', + required: ['message'], properties: { - ...rewardConfigSchema.properties, - factorMinimumRewardActiveValidators: { - type: 'integer', - minimum: 1, + message: { + dataType: 'string', + fieldNumber: 1, }, }, - required: [...rewardConfigSchema.required, 'factorMinimumRewardActiveValidators'], }; + +export class MessageStore extends BaseStore { + public schema = messageStoreSchema; +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/reaction.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/reaction.ts new file mode 100644 index 00000000000..867c4a97e99 --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/stores/reaction.ts @@ -0,0 +1,32 @@ +import { BaseStore } from 'lisk-sdk'; + +export interface ReactionStoreData { + reactions: { + like: Buffer[]; + }; +} + +export const reactionStoreSchema = { + $id: '/hello/reaction', + type: 'object', + required: ['reactions'], + properties: { + reactions: { + type: 'object', + fieldNumber: 1, + properties: { + like: { + type: 'array', + fieldNumber: 1, + items: { + dataType: 'bytes', + }, + }, + }, + }, + }, +}; + +export class ReactionStore extends BaseStore { + public schema = reactionStoreSchema; +} diff --git a/examples/interop/pos-sidechain-example-one/src/app/modules/hello/types.ts b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/types.ts new file mode 100644 index 00000000000..d1c1ddc9f3f --- /dev/null +++ b/examples/interop/pos-sidechain-example-one/src/app/modules/hello/types.ts @@ -0,0 +1,9 @@ +import { JSONObject } from 'lisk-sdk'; + +export interface ModuleConfig { + maxMessageLength: number; + minMessageLength: number; + blacklist: string[]; +} + +export type ModuleConfigJSON = JSONObject; diff --git a/examples/interop/pos-sidechain-example-two/config/default/config.json b/examples/interop/pos-sidechain-example-two/config/default/config.json index fcf83e45fd0..b918eb9de54 100644 --- a/examples/interop/pos-sidechain-example-two/config/default/config.json +++ b/examples/interop/pos-sidechain-example-two/config/default/config.json @@ -48,6 +48,10 @@ "ccuFee": "500000000", "receivingChainIPCPath": "~/.lisk/mainchain-node-two", "registrationHeight": 10 + }, + "dashboard": { + "applicationUrl": "ws://127.0.0.1:7886/rpc-ws", + "port": 4007 } } } diff --git a/examples/interop/pos-sidechain-example-two/config/scripts/mainchain_registration.ts b/examples/interop/pos-sidechain-example-two/config/scripts/mainchain_registration.ts index 54a30dc39cc..569177d7285 100644 --- a/examples/interop/pos-sidechain-example-two/config/scripts/mainchain_registration.ts +++ b/examples/interop/pos-sidechain-example-two/config/scripts/mainchain_registration.ts @@ -1,7 +1,10 @@ -import { keys as sidechainValidatorsKeys } from '../default/dev-validators.json'; import { keys as sidechainDevValidators } from '../default/dev-validators.json'; import { registerMainchain } from '../../../common/mainchain_registration'; (async () => { - await registerMainchain('two', sidechainDevValidators, sidechainValidatorsKeys); + await registerMainchain( + 'mainchain-node-two', + 'pos-sidechain-example-two', + sidechainDevValidators, + ); })(); diff --git a/examples/interop/pos-sidechain-example-two/config/scripts/transfer_lsk_mainchain.ts b/examples/interop/pos-sidechain-example-two/config/scripts/transfer_lsk_mainchain.ts index fd7765b979e..596e8cfda0b 100644 --- a/examples/interop/pos-sidechain-example-two/config/scripts/transfer_lsk_mainchain.ts +++ b/examples/interop/pos-sidechain-example-two/config/scripts/transfer_lsk_mainchain.ts @@ -11,7 +11,7 @@ type ModulesMetadata = [ (async () => { const { address } = cryptography; - const nodeAlias = 'one'; + const nodeAlias = 'two'; const tokenID = Buffer.from('0400000000000000', 'hex'); const mainchainID = Buffer.from('04000000', 'hex'); const recipientLSKAddress = 'lskzjzeam6szx4a65sxgavr98m9h4kctcx85nvy7h'; diff --git a/examples/interop/pos-sidechain-example-two/src/app/app.ts b/examples/interop/pos-sidechain-example-two/src/app/app.ts index d9dc8b2ad28..62a607b9357 100644 --- a/examples/interop/pos-sidechain-example-two/src/app/app.ts +++ b/examples/interop/pos-sidechain-example-two/src/app/app.ts @@ -1,9 +1,14 @@ import { Application, PartialApplicationConfig } from 'lisk-sdk'; import { registerModules } from './modules'; import { registerPlugins } from './plugins'; +import { ReactModule } from './modules/react/module'; export const getApplication = (config: PartialApplicationConfig): Application => { - const { app } = Application.defaultApplication(config); + const { app, method } = Application.defaultApplication(config); + const reactModule = new ReactModule(); + app.registerModule(reactModule); + app.registerInteroperableModule(reactModule); + reactModule.addDependencies(method.interoperability); registerModules(app); registerPlugins(app); diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/cc_method.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/cc_method.ts new file mode 100644 index 00000000000..b7a881b33db --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/cc_method.ts @@ -0,0 +1,17 @@ +/* + * Copyright © 2022 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { BaseCCMethod } from 'lisk-sdk'; + +export class ReactInteroperableMethod extends BaseCCMethod {} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/commands/react_command.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/commands/react_command.ts new file mode 100644 index 00000000000..fc737158a6c --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/commands/react_command.ts @@ -0,0 +1,108 @@ +/* eslint-disable class-methods-use-this */ + +import { + BaseCommand, + CommandVerifyContext, + CommandExecuteContext, + VerificationResult, + VerifyStatus, + codec, +} from 'lisk-sdk'; +import { ReactMethod } from '../method'; +import { CROSS_CHAIN_COMMAND_NAME_REACT } from '../constants'; +import { + crossChainReactParamsSchema, + CCReactMessageParams, + crossChainReactMessageSchema, +} from '../schemas'; +import { InteroperabilityMethod } from '../types'; + +interface Params { + reactionType: number; + helloMessageID: string; + amount: bigint; + receivingChainID: Buffer; + data: string; + messageFee: bigint; + messageFeeTokenID: Buffer; +} + +export class ReactCrossChainCommand extends BaseCommand { + private _interoperabilityMethod!: InteroperabilityMethod; + // private _moduleName!: string; + // private _method!: ReactMethod; + public schema = crossChainReactParamsSchema; + + public get name(): string { + return CROSS_CHAIN_COMMAND_NAME_REACT; + } + + public init(args: { + moduleName: string; + method: ReactMethod; + interoperabilityMethod: InteroperabilityMethod; + }) { + // this._moduleName = args.moduleName; + // this._method = args.method; + this._interoperabilityMethod = args.interoperabilityMethod; + } + + public addDependencies(interoperabilityMethod: InteroperabilityMethod) { + this._interoperabilityMethod = interoperabilityMethod; + } + + // eslint-disable-next-line @typescript-eslint/require-await + public async verify(context: CommandVerifyContext): Promise { + const { params, logger } = context; + + logger.info('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'); + logger.info(params); + logger.info('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'); + + try { + if (params.receivingChainID.equals(context.chainID)) { + throw new Error('Receiving chain cannot be the sending chain.'); + } + + const messageFeeTokenID = await this._interoperabilityMethod.getMessageFeeTokenID( + context.getMethodContext(), + params.receivingChainID, + ); + if (!messageFeeTokenID.equals(params.messageFeeTokenID)) { + throw new Error('Invalid message fee Token ID.'); + } + } catch (err) { + return { + status: VerifyStatus.FAIL, + error: err as Error, + }; + } + return { + status: VerifyStatus.OK, + }; + } + + public async execute(context: CommandExecuteContext): Promise { + const { + params, + transaction: { senderAddress }, + } = context; + + const reactCCM: CCReactMessageParams = { + reactionType: params.reactionType, + data: params.data, + helloMessageID: params.helloMessageID, + }; + + await this._interoperabilityMethod.send( + context.getMethodContext(), + senderAddress, + 'hello', + CROSS_CHAIN_COMMAND_NAME_REACT, + params.receivingChainID, + params.messageFee, + codec.encode(crossChainReactMessageSchema, reactCCM), + context.header.timestamp, + ); + } +} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/constants.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/constants.ts new file mode 100644 index 00000000000..9b427343540 --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/constants.ts @@ -0,0 +1,4 @@ +export const CROSS_CHAIN_COMMAND_NAME_REACT = 'reactCrossChain'; + +export const MAX_RESERVED_ERROR_STATUS = 63; +export const CCM_STATUS_OK = 0; diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/endpoint.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/endpoint.ts new file mode 100644 index 00000000000..160b0b59d92 --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/endpoint.ts @@ -0,0 +1,3 @@ +import { BaseEndpoint } from 'lisk-sdk'; + +export class ReactEndpoint extends BaseEndpoint {} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/events/.gitkeep b/examples/interop/pos-sidechain-example-two/src/app/modules/react/events/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/method.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/method.ts new file mode 100644 index 00000000000..69f80f11e97 --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/method.ts @@ -0,0 +1,3 @@ +import { BaseMethod } from 'lisk-sdk'; + +export class ReactMethod extends BaseMethod {} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/module.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/module.ts new file mode 100644 index 00000000000..5d5aa03d6bc --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/module.ts @@ -0,0 +1,85 @@ +/* eslint-disable class-methods-use-this */ +/* eslint-disable @typescript-eslint/member-ordering */ + +import { BaseInteroperableModule, ModuleMetadata, ModuleInitArgs } from 'lisk-sdk'; +import { ReactCrossChainCommand } from './commands/react_command'; +import { ReactEndpoint } from './endpoint'; +import { ReactMethod } from './method'; +import { ReactInteroperableMethod } from './cc_method'; +import { InteroperabilityMethod } from './types'; + +export class ReactModule extends BaseInteroperableModule { + public endpoint = new ReactEndpoint(this.stores, this.offchainStores); + public method = new ReactMethod(this.stores, this.events); + public commands = [new ReactCrossChainCommand(this.stores, this.events)]; + private _interoperabilityMethod!: InteroperabilityMethod; + + public crossChainMethod = new ReactInteroperableMethod(this.stores, this.events); + + /* public constructor() { + super(); + this.stores.register(ReactionStore, new ReactionStore(this.name, 0)); + } */ + + public metadata(): ModuleMetadata { + return { + ...this.baseMetadata(), + endpoints: [], + commands: this.commands.map(command => ({ + name: command.name, + params: command.schema, + })), + assets: [], + }; + } + + public addDependencies(interoperabilityMethod: InteroperabilityMethod) { + this._interoperabilityMethod = interoperabilityMethod; + } + + // Lifecycle hooks + // eslint-disable-next-line @typescript-eslint/require-await + public async init(_args: ModuleInitArgs) { + this.commands[0].init({ + interoperabilityMethod: this._interoperabilityMethod, + method: this.method, + moduleName: this.name, + }); + } + + // public async insertAssets(_context: InsertAssetContext) { + // // initialize block generation, add asset + // } + + // public async verifyAssets(_context: BlockVerifyContext): Promise { + // // verify block + // } + + // Lifecycle hooks + // public async verifyTransaction(_context: TransactionVerifyContext): Promise { + // verify transaction will be called multiple times in the transaction pool + // return { status: VerifyStatus.OK }; + // } + + // public async beforeCommandExecute(_context: TransactionExecuteContext): Promise { + // } + + // public async afterCommandExecute(_context: TransactionExecuteContext): Promise { + + // } + // public async initGenesisState(_context: GenesisBlockExecuteContext): Promise { + + // } + + // public async finalizeGenesisState(_context: GenesisBlockExecuteContext): Promise { + + // } + + // public async beforeTransactionsExecute(_context: BlockExecuteContext): Promise { + + // } + + // public async afterTransactionsExecute(_context: BlockAfterExecuteContext): Promise { + + // } +} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/schemas.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/schemas.ts new file mode 100644 index 00000000000..35d9c693eec --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/schemas.ts @@ -0,0 +1,94 @@ +/** + * Parameters of the cross-chain token transfer command + */ +export const crossChainReactParamsSchema = { + /** The unique identifier of the schema. */ + $id: '/lisk/react/ccReactParams', + type: 'object', + /** The required parameters for the command. */ + required: [ + 'reactionType', + 'helloMessageID', + 'receivingChainID', + 'data', + 'messageFee', + 'messageFeeTokenID', + ], + /** A list describing the available parameters for the command. */ + properties: { + reactionType: { + dataType: 'uint32', + fieldNumber: 1, + }, + /** + * ID of the message. + */ + helloMessageID: { + dataType: 'string', + fieldNumber: 2, + }, + /** + * The chain ID of the receiving chain. + * + * `maxLength` and `minLength` are equal to 4. + */ + receivingChainID: { + dataType: 'bytes', + fieldNumber: 3, + minLength: 4, + maxLength: 4, + }, + /** Optional field for data / messages. */ + data: { + dataType: 'string', + fieldNumber: 4, + minLength: 0, + maxLength: 64, + }, + messageFee: { + dataType: 'uint64', + fieldNumber: 5, + }, + messageFeeTokenID: { + dataType: 'bytes', + fieldNumber: 6, + minLength: 8, + maxLength: 8, + }, + }, +}; + +export const crossChainReactMessageSchema = { + /** The unique identifier of the schema. */ + $id: '/lisk/ccReactMessage', + type: 'object', + /** The required parameters for the command. */ + required: ['reactionType', 'helloMessageID', 'data'], + /** A list describing the available parameters for the command. */ + properties: { + reactionType: { + dataType: 'uint32', + fieldNumber: 1, + }, + /** + * ID of the message. + */ + helloMessageID: { + dataType: 'string', + fieldNumber: 2, + }, + /** Optional field for data / messages. */ + data: { + dataType: 'string', + fieldNumber: 3, + minLength: 0, + maxLength: 64, + }, + }, +}; + +export interface CCReactMessageParams { + reactionType: number; + helloMessageID: string; + data: string; +} diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/stores/.gitkeep b/examples/interop/pos-sidechain-example-two/src/app/modules/react/stores/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/examples/interop/pos-sidechain-example-two/src/app/modules/react/types.ts b/examples/interop/pos-sidechain-example-two/src/app/modules/react/types.ts new file mode 100644 index 00000000000..279823de2fe --- /dev/null +++ b/examples/interop/pos-sidechain-example-two/src/app/modules/react/types.ts @@ -0,0 +1,28 @@ +import { + MethodContext, + ImmutableMethodContext, + CCMsg, + ChannelData, + OwnChainAccount, +} from 'lisk-sdk'; + +export type TokenID = Buffer; + +export interface InteroperabilityMethod { + getOwnChainAccount(methodContext: ImmutableMethodContext): Promise; + send( + methodContext: MethodContext, + feeAddress: Buffer, + module: string, + crossChainCommand: string, + receivingChainID: Buffer, + fee: bigint, + parameters: Buffer, + timestamp?: number, + ): Promise; + error(methodContext: MethodContext, ccm: CCMsg, code: number): Promise; + terminateChain(methodContext: MethodContext, chainID: Buffer): Promise; + getChannel(methodContext: MethodContext, chainID: Buffer): Promise; + getMessageFeeTokenID(methodContext: ImmutableMethodContext, chainID: Buffer): Promise; + getMessageFeeTokenIDFromCCM(methodContext: ImmutableMethodContext, ccm: CCMsg): Promise; +} diff --git a/examples/interop/run_sidechains.json b/examples/interop/run_sidechains.json index b96ef8e9360..ea28f89c62e 100644 --- a/examples/interop/run_sidechains.json +++ b/examples/interop/run_sidechains.json @@ -3,12 +3,26 @@ { "name": "pos-sidechain-1", "script": "pos-sidechain-example-one/bin/run", - "args": ["start", "--api-ipc", "--api-http", "--enable-chain-connector-plugin"] + "args": [ + "start", + "--api-ipc", + "--api-http", + "--api-ws", + "--enable-dashboard-plugin", + "--enable-chain-connector-plugin" + ] }, { "name": "pos-sidechain-2", "script": "pos-sidechain-example-two/bin/run", - "args": ["start", "--api-ipc", "--api-http", "--enable-chain-connector-plugin"] + "args": [ + "start", + "--api-ipc", + "--api-http", + "--api-ws", + "--enable-dashboard-plugin", + "--enable-chain-connector-plugin" + ] } ] } diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/src/certificate_generation.ts b/framework-plugins/lisk-framework-chain-connector-plugin/src/certificate_generation.ts index 5d477d19298..da342b1a535 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/src/certificate_generation.ts +++ b/framework-plugins/lisk-framework-chain-connector-plugin/src/certificate_generation.ts @@ -32,7 +32,9 @@ export const getCertificateFromAggregateCommit = ( const blockHeader = blockHeaders.find(header => header.height === aggregateCommit.height); if (!blockHeader) { - throw new Error('No block header found for the given aggregate height.'); + throw new Error( + `No block header found for the given aggregate height ${aggregateCommit.height} when calling getCertificateFromAggregateCommit.`, + ); } return { @@ -55,7 +57,11 @@ export const checkChainOfTrust = ( ): boolean => { const blockHeader = blockHeaders.find(header => header.height === aggregateCommit.height - 1); if (!blockHeader) { - throw new Error('No block header found for the given aggregate height.'); + throw new Error( + `No block header found for the given the previous height ${ + aggregateCommit.height - 1 + } of aggregate commit at height ${aggregateCommit.height} when calling checkChainOfTrust.`, + ); } // Certificate signers and certificate threshold for aggregateCommit are those authenticated by the last certificate @@ -68,7 +74,11 @@ export const checkChainOfTrust = ( data.validatorsHash.equals(blockHeader.validatorsHash), ); if (!validatorData) { - throw new Error('No validators data found for the given validatorsHash.'); + throw new Error( + `No validators data found for the given validatorsHash ${blockHeader.validatorsHash.toString( + 'hex', + )}.`, + ); } for (let i = 0; i < validatorData.validators.length; i += 1) { @@ -100,14 +110,20 @@ export const getNextCertificateFromAggregateCommits = ( header => header.height === lastCertificate.height, ); if (!blockHeaderAtLastCertifiedHeight) { - throw new Error('No block header found for the last certified height.'); + throw new Error( + `No block header found for the last certified height ${lastCertificate.height}.`, + ); } const validatorDataAtLastCertifiedHeight = validatorsHashPreimage.find(data => data.validatorsHash.equals(blockHeaderAtLastCertifiedHeight?.validatorsHash), ); if (!validatorDataAtLastCertifiedHeight) { - throw new Error('No validatorsHash preimage data present for the given validatorsHash.'); + throw new Error( + `No validatorsHash preimage data present for the given validatorsHash ${blockHeaderAtLastCertifiedHeight?.validatorsHash.toString( + 'hex', + )}.`, + ); } const blsKeyToBFTWeight: Record = {}; diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/src/chain_connector_plugin.ts b/framework-plugins/lisk-framework-chain-connector-plugin/src/chain_connector_plugin.ts index d88aeda2f43..284eb49c3cb 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/src/chain_connector_plugin.ts +++ b/framework-plugins/lisk-framework-chain-connector-plugin/src/chain_connector_plugin.ts @@ -212,6 +212,7 @@ export class ChainConnectorPlugin extends BasePlugin let chainAccountJSON: ChainAccountJSON; // Save blockHeader, aggregateCommit, validatorsData and cross chain messages if any. try { + const nodeInfo = await this._sendingChainClient.node.getNodeInfo(); // Fetch last certificate from the receiving chain and update the _lastCertificate try { chainAccountJSON = await this._receivingChainClient.invoke( @@ -232,7 +233,7 @@ export class ChainConnectorPlugin extends BasePlugin await this._saveDataOnNewBlock(newBlockHeader); await this._initializeReceivingChainClient(); this.logger.error( - error, + { err: error as Error }, 'Error occurred while accessing receivingChainAPIClient but all data is saved on newBlock.', ); @@ -243,34 +244,41 @@ export class ChainConnectorPlugin extends BasePlugin const { aggregateCommits, blockHeaders, validatorsHashPreimage, crossChainMessages } = await this._saveDataOnNewBlock(newBlockHeader); - // When all the relevant data is saved successfully then try to create CCU - if (this._ccuFrequency <= newBlockHeader.height - this._lastCertificate.height) { - const computedCCUParams = await this._computeCCUParams( - blockHeaders, - aggregateCommits, - validatorsHashPreimage, - crossChainMessages, + const numOfBlocksSinceLastCertificate = newBlockHeader.height - this._lastCertificate.height; + if (nodeInfo.syncing || this._ccuFrequency > numOfBlocksSinceLastCertificate) { + this.logger.debug( + { + syncing: nodeInfo.syncing, + ccuFrequency: this._ccuFrequency, + nextPossibleCCUHeight: this._ccuFrequency - numOfBlocksSinceLastCertificate, + }, + 'No attempt to create CCU either due to ccuFrequency or the node is syncing', ); - if (computedCCUParams) { - try { - await this._submitCCU(codec.encode(ccuParamsSchema, computedCCUParams.ccuParams)); - // If CCU was sent successfully then save the lastSentCCM if any - // TODO: Add function to check on the receiving chain whether last sent CCM was accepted or not - if (computedCCUParams.lastCCMToBeSent) { - await this._chainConnectorStore.setLastSentCCM(computedCCUParams.lastCCMToBeSent); - } - } catch (error) { - this.logger.info( - { err: error }, - `Error occured while submitting CCU for the blockHeader at height: ${newBlockHeader.height}`, - ); - return; + return; + } + // When all the relevant data is saved successfully then try to create CCU + const computedCCUParams = await this._computeCCUParams( + blockHeaders, + aggregateCommits, + validatorsHashPreimage, + crossChainMessages, + ); + + if (computedCCUParams) { + try { + await this._submitCCU(codec.encode(ccuParamsSchema, computedCCUParams.ccuParams)); + // If CCU was sent successfully then save the lastSentCCM if any + // TODO: Add function to check on the receiving chain whether last sent CCM was accepted or not + if (computedCCUParams.lastCCMToBeSent) { + await this._chainConnectorStore.setLastSentCCM(computedCCUParams.lastCCMToBeSent); } - } else { + } catch (error) { this.logger.info( - `No valid CCU can be generated for the height: ${newBlockHeader.height}`, + { err: error }, + `Error occured while submitting CCU for the blockHeader at height: ${newBlockHeader.height}`, ); + return; } } } catch (error) { @@ -453,7 +461,11 @@ export class ChainConnectorPlugin extends BasePlugin if (this._lastCertificate.height === 0) { for (const aggregateCommit of aggregateCommits) { - if (aggregateCommit.height < this._registrationHeight) { + // If blockHeader corresponding to aggregateCommit height does not exist then try with the next aggregCommit. + const blockHeaderExist = blockHeaders.find( + header => header.height === aggregateCommit.height, + ); + if (!blockHeaderExist || aggregateCommit.height < this._registrationHeight) { continue; } diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/certificate_generation.spec.ts b/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/certificate_generation.spec.ts index f14bb2cdf40..60c6f7ec4a9 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/certificate_generation.spec.ts +++ b/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/certificate_generation.spec.ts @@ -151,7 +151,9 @@ describe('certificate generation', () => { [lastCertifiedBlock], [validatorsDataAtLastCertifiedHeight], ), - ).toThrow('No block header found for the given aggregate height'); + ).toThrow( + 'No block header found for the given the previous height 5 of aggregate commit at height 6 when calling checkChainOfTrust.', + ); }); it('should throw error when there is no validatorsData at {aggregateCommit.height - 1}', () => { diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/plugin.spec.ts b/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/plugin.spec.ts index 291a2570e93..3e4c1f6f711 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/plugin.spec.ts +++ b/framework-plugins/lisk-framework-chain-connector-plugin/test/unit/plugin.spec.ts @@ -157,6 +157,7 @@ describe('ChainConnectorPlugin', () => { invoke: jest.fn(), subscribe: jest.fn(), connect: jest.fn(), + node: { getNodeInfo: jest.fn() }, }); const chainConnectorStoreMock = { @@ -228,6 +229,10 @@ describe('ChainConnectorPlugin', () => { chainID: ownChainID.toString('hex'), }); + jest.spyOn(sendingChainAPIClientMock.node, 'getNodeInfo').mockResolvedValue({ + syncing: false, + } as never); + when(receivingChainAPIClientMock.invoke) .calledWith('interoperability_getOwnChainAccount') .mockResolvedValue({ @@ -677,6 +682,47 @@ describe('ChainConnectorPlugin', () => { expect(chainConnectorPlugin['_submitCCU']).toHaveBeenCalled(); }); + it('should not computeCCUParams if node is syncing', async () => { + jest + .spyOn(certificateGenerationUtil, 'getNextCertificateFromAggregateCommits') + .mockReturnValue(sampleNextCertificate); + + jest.spyOn(testing.mocks.loggerMock, 'debug'); + await initChainConnectorPlugin(chainConnectorPlugin, defaultConfig); + chainConnectorPlugin['_apiClient'] = sendingChainAPIClientMock; + await chainConnectorPlugin.load(); + + const saveDataOnNewBlockMock = jest.fn(); + chainConnectorPlugin['_saveDataOnNewBlock'] = saveDataOnNewBlockMock; + saveDataOnNewBlockMock.mockResolvedValue({ + aggregateCommits: [], + blockHeaders: [], + validatorsHashPreimage: [], + crossChainMessages: [], + }); + jest.spyOn(sendingChainAPIClientMock.node, 'getNodeInfo').mockResolvedValue({ + syncing: true, + } as never); + await chainConnectorPlugin['_newBlockHandler']({ + blockHeader: block.header.toJSON(), + }); + + const numOfBlocksSinceLastCertificate = + block.header.height - chainConnectorPlugin['_lastCertificate'].height; + expect(chainConnectorPlugin['logger'].debug).toHaveBeenCalledWith( + { + syncing: true, + ccuFrequency: chainConnectorPlugin['_ccuFrequency'], + nextPossibleCCUHeight: + chainConnectorPlugin['_ccuFrequency'] - numOfBlocksSinceLastCertificate, + }, + 'No attempt to create CCU either due to ccuFrequency or the node is syncing', + ); + // For chain_newBlock and chain_deleteBlock + expect(sendingChainAPIClientMock.subscribe).toHaveBeenCalledTimes(2); + expect(chainConnectorPlugin['_submitCCU']).not.toHaveBeenCalled(); + }); + it('should invoke "chain_getEvents" on _sendingChainClient', async () => { jest .spyOn(certificateGenerationUtil, 'getNextCertificateFromAggregateCommits') @@ -830,6 +876,7 @@ describe('ChainConnectorPlugin', () => { * 5. consensus_getBFTHeights */ expect(sendingChainAPIClientMock.invoke).toHaveBeenCalledTimes(5); + expect(sendingChainAPIClientMock.node.getNodeInfo).toHaveBeenCalledTimes(1); /** * Two calls to below RPC through receivingChainAPIClient * 1. interoperability_getChainAccount: in load() function diff --git a/framework-plugins/lisk-framework-monitor-plugin/src/controllers/transactions.ts b/framework-plugins/lisk-framework-monitor-plugin/src/controllers/transactions.ts index 7facc6aaaf7..af3f211320b 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/src/controllers/transactions.ts +++ b/framework-plugins/lisk-framework-monitor-plugin/src/controllers/transactions.ts @@ -37,6 +37,7 @@ export const getTransactionStats = async ( state: SharedState, ): Promise => ({ transactions: state.transactions, - connectedPeers: (await client.invoke>('app_getConnectedPeers')).length, + connectedPeers: (await client.invoke>('network_getConnectedPeers')) + .length, averageReceivedTransactions: getAverage(state.transactions), }); diff --git a/framework/src/application.ts b/framework/src/application.ts index f9266997838..56eb4f86044 100644 --- a/framework/src/application.ts +++ b/framework/src/application.ts @@ -61,7 +61,7 @@ import { BaseInteroperableModule, MODULE_NAME_INTEROPERABILITY, } from './modules/interoperability'; -import { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_rewards'; +import { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_reward'; import { Engine } from './engine'; import { BaseInteroperabilityModule } from './modules/interoperability/base_interoperability_module'; diff --git a/framework/src/controller/constants.ts b/framework/src/controller/constants.ts index 524cdfd8660..3b6ed5d2b59 100644 --- a/framework/src/controller/constants.ts +++ b/framework/src/controller/constants.ts @@ -15,6 +15,6 @@ export const IPC_EVENTS = { RPC_EVENT: 'invoke', REGISTER_CHANNEL: 'registerChannel', - RPC_REQUEST_TIMEOUT: 2000, + RPC_REQUEST_TIMEOUT: 10000, }; -export const IPC_CONNECTION_TIME_OUT = 2000; +export const IPC_CONNECTION_TIMEOUT = 5000; diff --git a/framework/src/controller/controller.ts b/framework/src/controller/controller.ts index 6df0c21927c..6f4462b7ec6 100644 --- a/framework/src/controller/controller.ts +++ b/framework/src/controller/controller.ts @@ -30,6 +30,7 @@ import { import { Bus } from './bus'; import { BaseChannel, InMemoryChannel } from './channels'; import { IPCServer } from './ipc/ipc_server'; +import { IPC_EVENTS } from './constants'; export interface ControllerOptions { readonly appConfig: ApplicationConfigForPlugin; @@ -332,7 +333,7 @@ export class Controller { new Promise((_, reject) => { setTimeout(() => { reject(new Error('Child process plugin loading timeout')); - }, 2000); + }, IPC_EVENTS.RPC_REQUEST_TIMEOUT); }), ]); } @@ -366,6 +367,7 @@ export class Controller { 'message', ({ action, err }: { action: string; err?: Error }) => { if (action !== 'unloaded' && action !== 'unloadedWithError') { + resolve(); return; } delete this._childProcesses[name]; @@ -385,7 +387,7 @@ export class Controller { this._childProcesses[name].kill('SIGTERM'); delete this._childProcesses[name]; reject(new Error('Child process plugin unload timeout')); - }, 2000); + }, IPC_EVENTS.RPC_REQUEST_TIMEOUT); }), ]); } diff --git a/framework/src/controller/ipc/ipc_client.ts b/framework/src/controller/ipc/ipc_client.ts index f6243295cc3..1305bb0a2c9 100644 --- a/framework/src/controller/ipc/ipc_client.ts +++ b/framework/src/controller/ipc/ipc_client.ts @@ -14,7 +14,7 @@ import { Dealer, Publisher, Subscriber } from 'zeromq'; import { IPCSocket } from './ipc_socket'; -import { IPC_CONNECTION_TIME_OUT } from '../constants'; +import { IPC_CONNECTION_TIMEOUT } from '../constants'; interface ClientSocketPaths { readonly pub: string; @@ -58,7 +58,7 @@ export class IPCClient extends IPCSocket { 'IPC Pub Socket client connection timeout. Please check if IPC server is running.', ), ); - }, IPC_CONNECTION_TIME_OUT); + }, IPC_CONNECTION_TIMEOUT); this.pubSocket.events.on('bind:error', err => { reject(err); @@ -77,7 +77,7 @@ export class IPCClient extends IPCSocket { 'IPC Sub Socket client connection timeout. Please check if IPC server is running.', ), ); - }, IPC_CONNECTION_TIME_OUT); + }, IPC_CONNECTION_TIMEOUT); this.subSocket.events.on('bind:error', err => { reject(err); @@ -95,7 +95,7 @@ export class IPCClient extends IPCSocket { 'IPC Sub Socket client connection timeout. Please check if IPC server is running.', ), ); - }, IPC_CONNECTION_TIME_OUT); + }, IPC_CONNECTION_TIMEOUT); this.rpcClient.events.on('bind:error', err => { reject(err); diff --git a/framework/src/engine/bft/method.ts b/framework/src/engine/bft/method.ts index 1e9add09108..e91203210ce 100644 --- a/framework/src/engine/bft/method.ts +++ b/framework/src/engine/bft/method.ts @@ -218,13 +218,15 @@ export class BFTMethod { throw new Error('Invalid certificateThreshold input.'); } - sortValidatorsByBLSKey(validators); - const validatorsHash = computeValidatorsHash( - validators - .filter(v => v.bftWeight > BigInt(0)) - .map(v => ({ bftWeight: v.bftWeight, blsKey: v.blsKey })), - certificateThreshold, - ); + // Prepare a separate sorted list of validators for computing validatorsHash + // without modifying the existing validators array + const validatorsWithBFTWeight = validators + .filter(validator => validator.bftWeight > BigInt(0)) + .map(validator => ({ bftWeight: validator.bftWeight, blsKey: validator.blsKey })); + sortValidatorsByBLSKey(validatorsWithBFTWeight); + + const validatorsHash = computeValidatorsHash(validatorsWithBFTWeight, certificateThreshold); + const bftParams: BFTParameters = { prevoteThreshold: (BigInt(2) * aggregateBFTWeight) / BigInt(3) + BigInt(1), precommitThreshold, diff --git a/framework/src/engine/consensus/certificate_generation/commit_list.ts b/framework/src/engine/consensus/certificate_generation/commit_list.ts index d7f9ab30085..447fee63eb6 100644 --- a/framework/src/engine/consensus/certificate_generation/commit_list.ts +++ b/framework/src/engine/consensus/certificate_generation/commit_list.ts @@ -71,6 +71,10 @@ export class CommitList { const index = commitList.findIndex( c => c.blockID.equals(commit.blockID) && c.validatorAddress.equals(commit.validatorAddress), ); + // if the commit does not exist, do nothing + if (index < 0) { + return; + } commitList.splice(index, 1); diff --git a/framework/src/engine/consensus/certificate_generation/commit_pool.ts b/framework/src/engine/consensus/certificate_generation/commit_pool.ts index 434c8655b24..6ad181c0f09 100644 --- a/framework/src/engine/consensus/certificate_generation/commit_pool.ts +++ b/framework/src/engine/consensus/certificate_generation/commit_pool.ts @@ -104,7 +104,7 @@ export class CommitPool { } // Validation Step 2 - const maxRemovalHeight = await this._getMaxRemovalHeight(); + const maxRemovalHeight = await this.getMaxRemovalHeight(); if (commit.height <= maxRemovalHeight) { return false; } @@ -307,6 +307,13 @@ export class CommitPool { }; } + public async getMaxRemovalHeight() { + const blockHeader = await this._chain.dataAccess.getBlockHeaderByHeight( + this._chain.finalizedHeight, + ); + return Math.max(blockHeader.aggregateCommit.height, this._minCertifyHeight - 1); + } + private async _selectAggregateCommit(methodContext: StateStore): Promise { const { maxHeightCertified, maxHeightPrecommitted } = await this._bftMethod.getBFTHeights( methodContext, @@ -368,7 +375,7 @@ export class CommitPool { } private async _job(methodContext: StateStore): Promise { - const removalHeight = await this._getMaxRemovalHeight(); + const removalHeight = await this.getMaxRemovalHeight(); const currentHeight = this._chain.lastBlock.header.height; const { maxHeightPrecommitted } = await this._bftMethod.getBFTHeights(methodContext); @@ -515,13 +522,6 @@ export class CommitPool { return deleteHeights; } - private async _getMaxRemovalHeight() { - const blockHeader = await this._chain.dataAccess.getBlockHeaderByHeight( - this._chain.finalizedHeight, - ); - return blockHeader.aggregateCommit.height; - } - private _getAllCommits(): SingleCommit[] { // Flattened list of all the single commits from both gossiped and non gossiped list sorted by ascending order of height return [ diff --git a/framework/src/engine/consensus/consensus.ts b/framework/src/engine/consensus/consensus.ts index 41ac71952e8..31646c73d6c 100644 --- a/framework/src/engine/consensus/consensus.ts +++ b/framework/src/engine/consensus/consensus.ts @@ -396,10 +396,7 @@ export class Consensus { } public async getMaxRemovalHeight(): Promise { - const finalizedBlockHeader = await this._chain.dataAccess.getBlockHeaderByHeight( - this._chain.finalizedHeight, - ); - return finalizedBlockHeader.aggregateCommit.height; + return this._commitPool.getMaxRemovalHeight(); } private async _execute(block: Block, peerID: string): Promise { diff --git a/framework/src/engine/engine.ts b/framework/src/engine/engine.ts index aa427d092aa..72349538789 100644 --- a/framework/src/engine/engine.ts +++ b/framework/src/engine/engine.ts @@ -137,6 +137,7 @@ export class Engine { await this._network.stop(); await this._generator.stop(); await this._consensus.stop(); + this._legacyChainHandler.stop(); this._rpcServer.stop(); this._closeDB(); this._logger.info('Engine cleanup completed'); @@ -228,6 +229,7 @@ export class Engine { const legacyEndpoint = new LegacyEndpoint({ db: this._legacyDB, + legacyConfig: this._config.legacy, }); const chainEndpoint = new ChainEndpoint({ diff --git a/framework/src/engine/generator/endpoint.ts b/framework/src/engine/generator/endpoint.ts index 77ffe2de699..a31225ee6df 100644 --- a/framework/src/engine/generator/endpoint.ts +++ b/framework/src/engine/generator/endpoint.ts @@ -53,6 +53,7 @@ import { RequestContext } from '../rpc/rpc_server'; import { ABI } from '../../abi'; import { JSONObject } from '../../types'; import { NotFoundError } from './errors'; +import { SingleCommitHandler } from './single_commit_handler'; interface EndpointArgs { keypair: dataStructures.BufferMap; @@ -64,6 +65,7 @@ interface EndpointArgs { interface EndpointInit { generatorDB: Database; + singleCommitHandler: SingleCommitHandler; genesisHeight: number; } @@ -77,6 +79,7 @@ export class Endpoint { private _generatorDB!: Database; private _genesisHeight!: number; + private _singleCommitHandler!: SingleCommitHandler; public constructor(args: EndpointArgs) { this._keypairs = args.keypair; @@ -89,6 +92,7 @@ export class Endpoint { public init(args: EndpointInit) { this._generatorDB = args.generatorDB; this._genesisHeight = args.genesisHeight; + this._singleCommitHandler = args.singleCommitHandler; } public async getStatus(_ctx: RequestContext): Promise { @@ -96,8 +100,11 @@ export class Endpoint { const list = await getGeneratedInfo(generatorStore); const status = []; for (const info of list) { + const keys = this._keypairs.get(info.address); status.push({ ...info, + generatorKey: keys?.publicKey.toString('hex') ?? '', + blsKey: keys?.blsPublicKey.toString('hex') ?? '', address: cryptoAddress.getLisk32AddressFromAddress(info.address), enabled: this._keypairs.has(info.address), }); @@ -196,6 +203,8 @@ export class Endpoint { ); } + await this._singleCommitHandler.initSingleCommits(address); + ctx.logger.info(`Block generation enabled on address: ${req.address}`); return { diff --git a/framework/src/engine/generator/generator.ts b/framework/src/engine/generator/generator.ts index c27dcd1914c..931971a16af 100644 --- a/framework/src/engine/generator/generator.ts +++ b/framework/src/engine/generator/generator.ts @@ -74,6 +74,7 @@ import { BFTModule } from '../bft'; import { isEmptyConsensusUpdate } from '../consensus'; import { getPathFromDataPath } from '../../utils/path'; import { defaultMetrics } from '../metrics/metrics'; +import { SingleCommitHandler } from './single_commit_handler'; interface GeneratorArgs { config: EngineConfig; @@ -111,7 +112,6 @@ export class Generator { private readonly _forgingStrategy: HighFeeGenerationStrategy; private readonly _blockTime: number; private readonly _metrics = { - signedCommits: defaultMetrics.counter('generator_signedCommits'), blockGeneration: defaultMetrics.counter('generator_blockGeneration'), }; @@ -119,6 +119,7 @@ export class Generator { private _generatorDB!: Database; private _blockchainDB!: Database; private _genesisHeight!: number; + private _singleCommitHandler!: SingleCommitHandler; public constructor(args: GeneratorArgs) { this._abi = args.abi; @@ -174,12 +175,22 @@ export class Generator { this._blockchainDB = args.blockchainDB; this._genesisHeight = args.genesisHeight; + this._singleCommitHandler = new SingleCommitHandler( + this._logger, + this._chain, + this._consensus, + this._bft, + this._keypairs, + this._blockchainDB, + ); + this._broadcaster.init({ logger: this._logger, }); this._endpoint.init({ generatorDB: this._generatorDB, genesisHeight: this._genesisHeight, + singleCommitHandler: this._singleCommitHandler, }); this._networkEndpoint.init({ logger: this._logger, @@ -209,14 +220,7 @@ export class Generator { this.events.emit(GENERATOR_EVENT_NEW_TRANSACTION, e); }); - const stateStore = new StateStore(this._blockchainDB); - - // On node start, it re generates certificate from maxRemovalHeight to maxHeightPrecommitted. - // in the _handleFinalizedHeightChanged, it loops between maxRemovalHeight + 1 and maxHeightPrecommitted. - // @see https://github.com/LiskHQ/lips/blob/main/proposals/lip-0061.md#initial-single-commit-creation - const maxRemovalHeight = await this._consensus.getMaxRemovalHeight(); - const { maxHeightPrecommitted } = await this._bft.method.getBFTHeights(stateStore); - await Promise.all(this._handleFinalizedHeightChanged(maxRemovalHeight, maxHeightPrecommitted)); + await this._singleCommitHandler.initAllSingleCommits(); } public get endpoint(): Endpoint { @@ -258,9 +262,9 @@ export class Generator { this._consensus.events.on( CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED, ({ from, to }: { from: number; to: number }) => { - Promise.all(this._handleFinalizedHeightChanged(from, to)).catch((err: Error) => - this._logger.error({ err }, 'Fail to certify single commit'), - ); + this._singleCommitHandler + .handleFinalizedHeightChanged(from, to) + .catch((err: Error) => this._logger.error({ err }, 'Fail to certify single commit')); }, ); } @@ -645,82 +649,6 @@ export class Generator { return generatedBlock; } - private _handleFinalizedHeightChanged(from: number, to: number): Promise[] { - if (from >= to) { - return []; - } - const promises = []; - const stateStore = new StateStore(this._blockchainDB); - for (const [address, pairs] of this._keypairs.entries()) { - for (let height = from + 1; height < to; height += 1) { - promises.push( - this._certifySingleCommitForChangedHeight( - stateStore, - height, - address, - pairs.blsPublicKey, - pairs.blsSecretKey, - ), - ); - } - promises.push( - this._certifySingleCommit(stateStore, to, address, pairs.blsPublicKey, pairs.blsSecretKey), - ); - } - return promises; - } - - private async _certifySingleCommitForChangedHeight( - stateStore: StateStore, - height: number, - generatorAddress: Buffer, - blsPK: Buffer, - blsSK: Buffer, - ): Promise { - const paramExist = await this._bft.method.existBFTParameters(stateStore, height + 1); - if (!paramExist) { - return; - } - await this._certifySingleCommit(stateStore, height, generatorAddress, blsPK, blsSK); - } - - private async _certifySingleCommit( - stateStore: StateStore, - height: number, - generatorAddress: Buffer, - blsPK: Buffer, - blsSK: Buffer, - ): Promise { - const params = await this._bft.method.getBFTParametersActiveValidators(stateStore, height); - const registeredValidator = params.validators.find(v => v.address.equals(generatorAddress)); - if (!registeredValidator) { - return; - } - if (!registeredValidator.blsKey.equals(blsPK)) { - this._logger.warn( - { address: addressUtil.getLisk32AddressFromAddress(generatorAddress) }, - 'Validator does not have registered BLS key', - ); - return; - } - - const blockHeader = await this._chain.dataAccess.getBlockHeaderByHeight(height); - const validatorInfo = { - address: generatorAddress, - blsPublicKey: blsPK, - blsSecretKey: blsSK, - }; - this._consensus.certifySingleCommit(blockHeader, validatorInfo); - this._logger.debug( - { - height, - generator: addressUtil.getLisk32AddressFromAddress(generatorAddress), - }, - 'Certified single commit', - ); - this._metrics.signedCommits.inc(1); - } - private async _executeTransactions( contextID: Buffer, header: BlockHeader, diff --git a/framework/src/engine/generator/schemas.ts b/framework/src/engine/generator/schemas.ts index 661686af644..cd638db1865 100644 --- a/framework/src/engine/generator/schemas.ts +++ b/framework/src/engine/generator/schemas.ts @@ -85,6 +85,8 @@ export interface GetStatusResponse { height: number; maxHeightPrevoted: number; maxHeightGenerated: number; + blsKey: string; + generatorKey: string; enabled: boolean; }[]; } diff --git a/framework/src/engine/generator/single_commit_handler.ts b/framework/src/engine/generator/single_commit_handler.ts new file mode 100644 index 00000000000..64288d1f4cc --- /dev/null +++ b/framework/src/engine/generator/single_commit_handler.ts @@ -0,0 +1,166 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { Chain, StateStore } from '@liskhq/lisk-chain'; +import { dataStructures } from '@liskhq/lisk-utils'; +import { address as addressUtil } from '@liskhq/lisk-cryptography'; +import { Database } from '@liskhq/lisk-db'; +import { BFTModule } from '../bft'; +import { Consensus, Keypair } from './types'; +import { Logger } from '../../logger'; +import { defaultMetrics } from '../metrics/metrics'; + +export class SingleCommitHandler { + private readonly _logger: Logger; + private readonly _bft: BFTModule; + private readonly _chain: Chain; + private readonly _consensus: Consensus; + private readonly _keypairs: dataStructures.BufferMap; + private readonly _blockchainDB: Database; + + private readonly _metrics = { + signedCommits: defaultMetrics.counter('generator_signedCommits'), + }; + + public constructor( + logger: Logger, + chain: Chain, + consensus: Consensus, + bft: BFTModule, + keypairs: dataStructures.BufferMap, + blockchainDB: Database, + ) { + this._logger = logger; + this._chain = chain; + this._consensus = consensus; + this._bft = bft; + this._keypairs = keypairs; + this._blockchainDB = blockchainDB; + } + + // On node start, it re generates certificate from maxRemovalHeight to maxHeightPrecommitted. + // in the _handleFinalizedHeightChanged, it loops between maxRemovalHeight + 1 and maxHeightPrecommitted. + // @see https://github.com/LiskHQ/lips/blob/main/proposals/lip-0061.md#initial-single-commit-creation + public async initAllSingleCommits() { + for (const [address] of this._keypairs.entries()) { + await this.initSingleCommits(address); + } + } + + public async initSingleCommits(address: Buffer) { + const maxRemovalHeight = await this._consensus.getMaxRemovalHeight(); + const stateStore = new StateStore(this._blockchainDB); + const { maxHeightPrecommitted } = await this._bft.method.getBFTHeights(stateStore); + await Promise.all( + this._handleFinalizedHeightChanged(address, maxRemovalHeight, maxHeightPrecommitted), + ); + } + + public async handleFinalizedHeightChanged(from: number, to: number): Promise { + const maxRemovalHeight = await this._consensus.getMaxRemovalHeight(); + const cappedFrom = Math.max(maxRemovalHeight, from); + if (cappedFrom >= to) { + return; + } + for (const [address] of this._keypairs.entries()) { + await Promise.all(this._handleFinalizedHeightChanged(address, cappedFrom, to)); + } + } + + private _handleFinalizedHeightChanged( + address: Buffer, + from: number, + to: number, + ): Promise[] { + if (from >= to) { + return []; + } + const promises = []; + const stateStore = new StateStore(this._blockchainDB); + const pairs = this._keypairs.get(address); + if (!pairs) { + this._logger.warn( + { address: addressUtil.getLisk32AddressFromAddress(address) }, + 'Validator does not have registered BLS key on this node', + ); + return []; + } + for (let height = from + 1; height < to; height += 1) { + promises.push( + this._certifySingleCommitForChangedHeight( + stateStore, + height, + address, + pairs.blsPublicKey, + pairs.blsSecretKey, + ), + ); + } + promises.push( + this._certifySingleCommit(stateStore, to, address, pairs.blsPublicKey, pairs.blsSecretKey), + ); + return promises; + } + + private async _certifySingleCommitForChangedHeight( + stateStore: StateStore, + height: number, + generatorAddress: Buffer, + blsPK: Buffer, + blsSK: Buffer, + ): Promise { + const paramExist = await this._bft.method.existBFTParameters(stateStore, height + 1); + if (!paramExist) { + return; + } + await this._certifySingleCommit(stateStore, height, generatorAddress, blsPK, blsSK); + } + + private async _certifySingleCommit( + stateStore: StateStore, + height: number, + generatorAddress: Buffer, + blsPK: Buffer, + blsSK: Buffer, + ): Promise { + const params = await this._bft.method.getBFTParametersActiveValidators(stateStore, height); + const registeredValidator = params.validators.find(v => v.address.equals(generatorAddress)); + if (!registeredValidator) { + return; + } + if (!registeredValidator.blsKey.equals(blsPK)) { + this._logger.warn( + { address: addressUtil.getLisk32AddressFromAddress(generatorAddress) }, + 'Validator does not have registered BLS key', + ); + return; + } + + const blockHeader = await this._chain.dataAccess.getBlockHeaderByHeight(height); + const validatorInfo = { + address: generatorAddress, + blsPublicKey: blsPK, + blsSecretKey: blsSK, + }; + this._consensus.certifySingleCommit(blockHeader, validatorInfo); + this._logger.debug( + { + height, + generator: addressUtil.getLisk32AddressFromAddress(generatorAddress), + }, + 'Certified single commit', + ); + this._metrics.signedCommits.inc(1); + } +} diff --git a/framework/src/engine/legacy/codec.ts b/framework/src/engine/legacy/codec.ts index 0aa0e24e7d9..b1f41f5f60f 100644 --- a/framework/src/engine/legacy/codec.ts +++ b/framework/src/engine/legacy/codec.ts @@ -29,6 +29,7 @@ import { LegacyBlockHeaderWithID, LegacyTransaction, LegacyTransactionJSON, + LegacyBlockHeader, } from './types'; interface LegacyBlockSchema { @@ -43,6 +44,14 @@ export const blockSchemaMap: Record = { }, }; +export const getBlockSchema = (version: number) => { + const blockSchema = blockSchemaMap[version]; + if (!blockSchema) { + throw new Error(`Legacy block version ${version} is not registered.`); + } + return blockSchema; +}; + // Implement read version logic when adding more versions const readVersion = (): number => 2; @@ -50,10 +59,7 @@ export const decodeBlock = ( data: Buffer, ): { block: LegacyBlockWithID; schema: LegacyBlockSchema } => { const version = readVersion(); - const blockSchema = blockSchemaMap[version]; - if (!blockSchema) { - throw new Error(`Legacy block version ${version} is not registered.`); - } + const blockSchema = getBlockSchema(version); const rawBlock = codec.decode(blockSchema.block, data); const id = utils.hash(rawBlock.header); return { @@ -68,6 +74,17 @@ export const decodeBlock = ( }; }; +export const decodeBlockHeader = (blockHeader: Buffer): LegacyBlockHeaderWithID => { + const version = readVersion(); + const blockSchema = getBlockSchema(version); + const id = utils.hash(blockHeader); + + return { + ...codec.decode(blockSchema.header, blockHeader), + id, + }; +}; + export const decodeBlockJSON = ( data: Buffer, ): { block: LegacyBlockJSON; schema: LegacyBlockSchema } => { @@ -100,10 +117,7 @@ export const getLegacyTransactionJSONWithSchema = ( }; export const encodeBlock = (data: LegacyBlock): Buffer => { - const blockSchema = blockSchemaMap[data.header.version]; - if (!blockSchema) { - throw new Error(`Legacy block version ${data.header.version} is not registered.`); - } + const blockSchema = getBlockSchema(data.header.version); const headerBytes = codec.encode(blockSchema.header, data.header); return codec.encode(blockSchema.block, { @@ -112,5 +126,13 @@ export const encodeBlock = (data: LegacyBlock): Buffer => { }); }; +export const encodeBlockHeader = (blockHeader: LegacyBlockHeader): Buffer => { + const blockSchema = getBlockSchema(blockHeader.version); + return codec.encode(blockSchema.header, blockHeader); +}; + export const encodeLegacyChainBracketInfo = (data: LegacyChainBracketInfo): Buffer => codec.encode(legacyChainBracketInfoSchema, data); + +export const decodeLegacyChainBracketInfo = (data: Buffer): LegacyChainBracketInfo => + codec.decode(legacyChainBracketInfoSchema, data); diff --git a/framework/src/engine/legacy/constants.ts b/framework/src/engine/legacy/constants.ts index 128aae0ae95..2367fe60562 100644 --- a/framework/src/engine/legacy/constants.ts +++ b/framework/src/engine/legacy/constants.ts @@ -12,8 +12,16 @@ * Removal or modification of this copyright notice is prohibited. */ -export const DB_KEY_BLOCKS_ID = Buffer.from('blocks:id'); -export const DB_KEY_BLOCKS_HEIGHT = Buffer.from('blocks:height'); -export const DB_KEY_TRANSACTIONS_BLOCK_ID = Buffer.from('transactions:blockID'); -export const DB_KEY_TRANSACTIONS_ID = Buffer.from('transactions:id'); +export const DB_KEY_BLOCKS_ID = 'blocks:id'; +export const DB_KEY_BLOCKS_HEIGHT = 'blocks:height'; +export const DB_KEY_TRANSACTIONS_BLOCK_ID = 'transactions:blockID'; +export const DB_KEY_TRANSACTIONS_ID = 'transactions:id'; export const DB_KEY_LEGACY_BRACKET = Buffer.from([2]); + +// When no peer was found then resyncing after 12 seconds, 1000 * 12 ms +export const FAILED_SYNC_RETRY_TIMEOUT = 12000; +// To avoid syncing with the same peer frequently and get banned due to RPC limit, resync after 5 seconds, 5 * 1000 ms +export const SUCCESS_SYNC_RETRY_TIMEOUT = 5000; +export const MAX_FAILED_ATTEMPTS = 10; +export const ENGINE_LEGACY_MODULE_NAME = 'legacy'; +export const LOG_OBJECT_ENGINE_LEGACY_MODULE = { engineModule: ENGINE_LEGACY_MODULE_NAME }; diff --git a/framework/src/engine/legacy/endpoint.ts b/framework/src/engine/legacy/endpoint.ts index 8caf10c4512..153d06cedef 100644 --- a/framework/src/engine/legacy/endpoint.ts +++ b/framework/src/engine/legacy/endpoint.ts @@ -15,21 +15,29 @@ import { Database } from '@liskhq/lisk-db'; import { isHexString } from '@liskhq/lisk-validator'; import { RequestContext } from '../rpc/rpc_server'; -import { LegacyBlockJSON, LegacyTransactionJSON } from './types'; +import { + LegacyBlockJSON, + LegacyChainBracketInfoWithSnapshotBlockID, + LegacyTransactionJSON, +} from './types'; import { Storage } from './storage'; import { decodeBlockJSON, getLegacyTransactionJSONWithSchema } from './codec'; +import { LegacyConfig } from '../../types'; interface EndpointArgs { db: Database; + legacyConfig: LegacyConfig; } export class LegacyEndpoint { [key: string]: unknown; public readonly storage: Storage; + private readonly _legacyConfig: LegacyConfig; public constructor(args: EndpointArgs) { this.storage = new Storage(args.db); + this._legacyConfig = args.legacyConfig; } public async getTransactionByID(context: RequestContext): Promise { @@ -77,4 +85,21 @@ export class LegacyEndpoint { return decodeBlockJSON(await this.storage.getBlockByHeight(height)).block; } + + public async getLegacyBrackets( + _context: RequestContext, + ): Promise { + return Promise.all( + this._legacyConfig.brackets.map(async bracket => { + const bracketInfo = await this.storage.getBracketInfo( + Buffer.from(bracket.snapshotBlockID, 'hex'), + ); + + return { + ...bracketInfo, + snapshotBlockID: bracket.snapshotBlockID, + }; + }), + ); + } } diff --git a/framework/src/engine/legacy/errors.ts b/framework/src/engine/legacy/errors.ts index 5fac9084991..856777e97aa 100644 --- a/framework/src/engine/legacy/errors.ts +++ b/framework/src/engine/legacy/errors.ts @@ -12,9 +12,11 @@ * Removal or modification of this copyright notice is prohibited. */ -export class PeerNotFoundWithLegacyInfo extends Error { +import { FAILED_SYNC_RETRY_TIMEOUT } from './constants'; + +export class FailSyncError extends Error { public constructor(message: string) { - super(message); + super(`${message}: Attempting to sync again after ${FAILED_SYNC_RETRY_TIMEOUT} ms`); this.name = this.constructor.name; } } diff --git a/framework/src/engine/legacy/legacy_chain_handler.ts b/framework/src/engine/legacy/legacy_chain_handler.ts index 4ebbe2d66e3..80aae02904e 100644 --- a/framework/src/engine/legacy/legacy_chain_handler.ts +++ b/framework/src/engine/legacy/legacy_chain_handler.ts @@ -19,12 +19,19 @@ import { LegacyConfig } from '../../types'; import { Network } from '../network'; import { getBlocksFromIdResponseSchema } from '../consensus/schema'; import { Storage } from './storage'; -import { LegacyBlock, LegacyBlockBracket, Peer, LegacyChainBracketInfo } from './types'; -import { decodeBlock, encodeBlock } from './codec'; -import { PeerNotFoundWithLegacyInfo } from './errors'; +import { LegacyBlock, LegacyBlockBracket, Peer } from './types'; +import { decodeBlock, encodeBlockHeader } from './codec'; +import { FailSyncError } from './errors'; import { validateLegacyBlock } from './validate'; -import { legacyChainBracketInfoSchema } from './schemas'; import { Logger } from '../../logger'; +import { + FAILED_SYNC_RETRY_TIMEOUT, + LOG_OBJECT_ENGINE_LEGACY_MODULE, + MAX_FAILED_ATTEMPTS, + SUCCESS_SYNC_RETRY_TIMEOUT, +} from './constants'; +import { getLegacyBlocksFromIdRequestSchema } from './schemas'; +import { NETWORK_LEGACY_GET_BLOCKS_FROM_ID } from '../consensus/constants'; interface LegacyChainHandlerArgs { legacyConfig: LegacyConfig; @@ -36,11 +43,19 @@ interface LegacyHandlerInitArgs { db: Database; } +const wait = async (duration: number): Promise => + new Promise(resolve => { + const timeout = setTimeout(() => { + resolve(timeout); + }, duration); + }); + export class LegacyChainHandler { private readonly _network: Network; private _storage!: Storage; private readonly _legacyConfig: LegacyConfig; private readonly _logger: Logger; + private readonly _syncedBrackets: Buffer[] = []; private _syncTimeout!: NodeJS.Timeout; public constructor(args: LegacyChainHandlerArgs) { @@ -52,67 +67,105 @@ export class LegacyChainHandler { public async init(args: LegacyHandlerInitArgs): Promise { this._storage = new Storage(args.db); - for (const bracket of this._legacyConfig.brackets) { + for (const bracketInfo of this._legacyConfig.brackets) { try { - await this._storage.getLegacyChainBracketInfo(Buffer.from(bracket.snapshotBlockID, 'hex')); - } catch (err) { - if (!(err instanceof NotFoundError)) { - throw err; + const bracketStorageKey = Buffer.from(bracketInfo.snapshotBlockID, 'hex'); + const bracketExists = await this._storage.hasBracketInfo(bracketStorageKey); + + if (!bracketExists) { + await this._storage.setBracketInfo(bracketStorageKey, { + startHeight: bracketInfo.startHeight, + snapshotBlockHeight: bracketInfo.snapshotHeight, + // if start block already exists then assign to lastBlockHeight + lastBlockHeight: bracketInfo.snapshotHeight, + }); + continue; } - // Save config brackets in advance, these will be used in next step (`sync`) - await this._storage.setLegacyChainBracketInfo(Buffer.from(bracket.snapshotBlockID), { - startHeight: bracket.startHeight, - snapshotBlockHeight: bracket.snapshotHeight, - lastBlockHeight: bracket.snapshotHeight, + + const storedBracketInfo = await this._storage.getBracketInfo(bracketStorageKey); + const startBlock = await this._storage.getBlockByHeight(bracketInfo.startHeight); + + // In case a user wants to indirectly update the bracketInfo stored in legacyDB + await this._storage.setBracketInfo(bracketStorageKey, { + ...storedBracketInfo, + startHeight: bracketInfo.startHeight, + snapshotBlockHeight: bracketInfo.snapshotHeight, + // if start block already exists then assign to lastBlockHeight + lastBlockHeight: startBlock ? bracketInfo.startHeight : bracketInfo.snapshotHeight, }); + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } } } } + public stop() { + clearTimeout(this._syncTimeout); + } + public async sync() { for (const bracket of this._legacyConfig.brackets) { - const encodedBracketInfo = await this._storage.getLegacyChainBracketInfo( + const bracketInfo = await this._storage.getBracketInfo( Buffer.from(bracket.snapshotBlockID, 'hex'), ); - const bracketInfo = codec.decode( - legacyChainBracketInfoSchema, - encodedBracketInfo, - ); // means this bracket is already synced/parsed (in next `syncBlocks` step) if (bracket.startHeight === bracketInfo.lastBlockHeight) { + this._syncedBrackets.push(Buffer.from(bracket.snapshotBlockID, 'hex')); + this._network.applyNodeInfo({ + legacy: [...this._syncedBrackets], + }); continue; } - const lastBlock = decodeBlock( - await this._storage.getBlockByHeight(bracketInfo.lastBlockHeight), - ).block; + let lastBlockID; + try { + const lastBlock = decodeBlock( + await this._storage.getBlockByHeight(bracketInfo.lastBlockHeight), + ).block; + lastBlockID = lastBlock.header.id; + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } + // If lastBlock does not exist then sync from the beginning + lastBlockID = Buffer.from(bracket.snapshotBlockID, 'hex'); + } + this._logger.info( + LOG_OBJECT_ENGINE_LEGACY_MODULE, + `Started syncing legacy blocks for bracket with snapshotBlockID ${bracket.snapshotBlockID}`, + ); // start parsing bracket from `lastBlock` height` - await this._trySyncBlocks(bracket, lastBlock); + this._trySyncBlocks(bracket, lastBlockID).catch((err: Error) => + this._logger.error({ err }, 'Failed to sync block with error'), + ); } - - // when ALL brackets are synced/parsed, finally update node with it's `legacy` property - this._network.applyNodeInfo({ - legacy: this._legacyConfig.brackets.map(bracket => - Buffer.from(bracket.snapshotBlockID, 'hex'), - ), - }); - - clearTimeout(this._syncTimeout); } - private async _trySyncBlocks(bracket: LegacyBlockBracket, lastBlock: LegacyBlock) { + private async _trySyncBlocks( + bracket: LegacyBlockBracket, + lastBlockID: Buffer, + syncRetryCounter = 0, + ) { try { - await this.syncBlocks(bracket, lastBlock); - } catch (err) { - if (err instanceof PeerNotFoundWithLegacyInfo) { - // eslint-disable-next-line @typescript-eslint/no-misused-promises - this._syncTimeout = setTimeout(async () => { - await this._trySyncBlocks(bracket, lastBlock); - }, 120000); // 2 mints = (60 * 2) * 1000 + await this._syncBlocks(bracket, lastBlockID, syncRetryCounter); + } catch (error) { + if (error instanceof FailSyncError) { + this._logger.debug( + LOG_OBJECT_ENGINE_LEGACY_MODULE, + `Retrying syncing legacy blocks for bracket with snapshotBlockID ${bracket.snapshotBlockID}`, + ); + clearTimeout(this._syncTimeout); + this._syncTimeout = await wait(FAILED_SYNC_RETRY_TIMEOUT); } else { - throw err; + this._logger.debug( + { ...LOG_OBJECT_ENGINE_LEGACY_MODULE, error: (error as Error).message }, + `Retrying syncing legacy blocks for bracket with snapshotBlockID ${bracket.snapshotBlockID}`, + ); } + await this._trySyncBlocks(bracket, lastBlockID); } } @@ -132,89 +185,134 @@ export class LegacyChainHandler { * If last block height equals bracket.startHeight, simply save bracket with `lastBlockHeight: lastBlock?.header.height` */ // eslint-disable-next-line @typescript-eslint/member-ordering - public async syncBlocks(bracket: LegacyBlockBracket, legacyBlock: LegacyBlock): Promise { + private async _syncBlocks( + bracket: LegacyBlockBracket, + lastBlockID: Buffer, + failedAttempts = 0, + ): Promise { const connectedPeers = this._network.getConnectedPeers() as unknown as Peer[]; const peersWithLegacyInfo = connectedPeers.filter( peer => - !!(peer.options as { legacy: Buffer[] }).legacy.find(snapshotBlockID => - snapshotBlockID.equals(Buffer.from(bracket.snapshotBlockID, 'hex')), + !!(peer.options as { legacy: string[] }).legacy.find( + snapshotBlockID => snapshotBlockID === bracket.snapshotBlockID, ), ); - if (!peersWithLegacyInfo) { - throw new PeerNotFoundWithLegacyInfo('No peer found with legacy info.'); + if (peersWithLegacyInfo.length === 0) { + const errorMessage = 'No peer found with legacy info.'; + this._logger.warn({ ...LOG_OBJECT_ENGINE_LEGACY_MODULE, method: 'syncBlocks' }, errorMessage); + throw new FailSyncError(errorMessage); } const randomPeerIndex = Math.trunc(Math.random() * peersWithLegacyInfo.length - 1); const { peerId } = peersWithLegacyInfo[randomPeerIndex]; + const requestData = codec.encode(getLegacyBlocksFromIdRequestSchema, { + blockID: lastBlockID, + snapshotBlockID: Buffer.from(bracket.snapshotBlockID, 'hex'), + }); const p2PRequestPacket: P2PRequestPacket = { - procedure: 'getLegacyBlocksFromId', - data: legacyBlock.header.id, + procedure: NETWORK_LEGACY_GET_BLOCKS_FROM_ID, + data: requestData, }; - const response = await this._network.requestFromPeer({ ...p2PRequestPacket, peerId }); + let syncRetryCounter = failedAttempts; + let response; + try { + response = await this._network.requestFromPeer({ ...p2PRequestPacket, peerId }); + // Reset counter on success + syncRetryCounter = 0; + } catch (error) { + // eslint-disable-next-line no-param-reassign + syncRetryCounter += 1; + if (syncRetryCounter > MAX_FAILED_ATTEMPTS) { + const errorMessage = `Failed ${MAX_FAILED_ATTEMPTS} times to request from peer.`; + this._logger.warn( + { ...LOG_OBJECT_ENGINE_LEGACY_MODULE, peerId, method: 'requestFromPeer' }, + errorMessage, + ); + + throw new FailSyncError(errorMessage); + } + return this._trySyncBlocks(bracket, lastBlockID, syncRetryCounter); + } // `data` is expected to hold blocks in DESC order - const { data } = response; + const { data } = response as { data: Buffer }; let legacyBlocks: LegacyBlock[]; - const applyPenaltyAndRepeat = async (msg: string) => { - this._logger.warn({ peerId }, `${msg} Applying a penalty to the peer`); - this._network.applyPenaltyOnPeer({ peerId, penalty: 100 }); - await this.syncBlocks(bracket, legacyBlock); - }; - try { // this part is needed to make sure `data` returns ONLY `{ blocks: Buffer[] }` & not any extra field(s) - const { blocks } = codec.decode<{ blocks: Buffer[] }>( - getBlocksFromIdResponseSchema, - data as Buffer, - ); + const { blocks } = codec.decode<{ blocks: Buffer[] }>(getBlocksFromIdResponseSchema, data); if (blocks.length === 0) { - await applyPenaltyAndRepeat('Received empty response'); + this.applyPenaltyOnSyncFailure('Received empty response', peerId); + + return this._trySyncBlocks(bracket, lastBlockID, syncRetryCounter); } this._applyValidation(blocks); legacyBlocks = blocks.map(block => decodeBlock(block).block); - if (legacyBlocks.length === 0) { - await applyPenaltyAndRepeat('received empty blocks'); - } } catch (err) { - await applyPenaltyAndRepeat((err as Error).message); // catch validation error + this.applyPenaltyOnSyncFailure((err as Error).message, peerId); + + return this._trySyncBlocks(bracket, lastBlockID, syncRetryCounter); } - // @ts-expect-error Variable 'legacyBlocks' is used before being assigned. for (const block of legacyBlocks) { - if (block.header.height > bracket.startHeight) { + if (block.header.height >= bracket.startHeight) { const payload = block.payload.length ? block.payload : []; await this._storage.saveBlock( block.header.id as Buffer, block.header.height, - encodeBlock(block), + encodeBlockHeader(block.header), payload, ); } } - // @ts-expect-error Variable 'legacyBlocks' is used before being assigned. const lastBlock = legacyBlocks[legacyBlocks.length - 1]; if (lastBlock && lastBlock.header.height > bracket.startHeight) { + this._logger.debug( + LOG_OBJECT_ENGINE_LEGACY_MODULE, + `Saved blocks from ${legacyBlocks[0].header.height} to ${lastBlock.header.height}`, + ); await this._updateBracketInfo(lastBlock, bracket); - await this.syncBlocks(bracket, lastBlock); + clearTimeout(this._syncTimeout); + this._syncTimeout = await wait(SUCCESS_SYNC_RETRY_TIMEOUT); + await this._trySyncBlocks(bracket, lastBlock.header.id as Buffer, syncRetryCounter); + } else { + // Syncing is finished + this._logger.info( + LOG_OBJECT_ENGINE_LEGACY_MODULE, + `Finished syncing legacy blocks for bracket with snapshotBlockID ${bracket.snapshotBlockID}`, + ); + + // After successful sync of a bracket, communicate to the network + this._syncedBrackets.push(Buffer.from(bracket.snapshotBlockID, 'hex')); + this._network.applyNodeInfo({ + legacy: [...this._syncedBrackets], + }); } - await this._updateBracketInfo(lastBlock, bracket); + return this._updateBracketInfo(lastBlock, bracket); } private async _updateBracketInfo(lastBlock: LegacyBlock, bracket: LegacyBlockBracket) { - await this._storage.setLegacyChainBracketInfo(Buffer.from(bracket.snapshotBlockID, 'hex'), { + await this._storage.setBracketInfo(Buffer.from(bracket.snapshotBlockID, 'hex'), { startHeight: bracket.startHeight, lastBlockHeight: lastBlock?.header.height, snapshotBlockHeight: bracket.snapshotHeight, }); } + private applyPenaltyOnSyncFailure(msg: string, peerId: string) { + this._logger.warn( + { ...LOG_OBJECT_ENGINE_LEGACY_MODULE, peerId }, + `${msg}: Applying a penalty to the peer`, + ); + this._network.applyPenaltyOnPeer({ peerId, penalty: 100 }); + } + private _applyValidation(blocks: Buffer[]) { const sortedBlocks = []; for (let i = blocks.length - 1; i >= 0; i -= 1) { diff --git a/framework/src/engine/legacy/network_endpoint.ts b/framework/src/engine/legacy/network_endpoint.ts index c127182fe22..985b023cee0 100644 --- a/framework/src/engine/legacy/network_endpoint.ts +++ b/framework/src/engine/legacy/network_endpoint.ts @@ -12,20 +12,18 @@ * Removal or modification of this copyright notice is prohibited. */ -import { Database } from '@liskhq/lisk-db'; +import { Database, NotFoundError } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; import { validator } from '@liskhq/lisk-validator'; import { Logger } from '../../logger'; import { Network } from '../network'; import { BaseNetworkEndpoint } from '../network/base_network_endpoint'; import { NETWORK_LEGACY_GET_BLOCKS_FROM_ID } from '../consensus/constants'; -import { - getBlocksFromIdRequestSchema, - getBlocksFromIdResponseSchema, - RPCBlocksByIdData, -} from '../consensus/schema'; +import { getBlocksFromIdResponseSchema } from '../consensus/schema'; import { Storage } from './storage'; import { decodeBlock } from './codec'; +import { getLegacyBlocksFromIdRequestSchema } from './schemas'; +import { RPCLegacyBlocksByIdData } from './types'; const LEGACY_BLOCKS_FROM_IDS_RATE_LIMIT_FREQUENCY = 100; @@ -49,17 +47,17 @@ export class LegacyNetworkEndpoint extends BaseNetworkEndpoint { // return 100 blocks desc starting from the id // eslint-disable-next-line @typescript-eslint/require-await - public async handleRPCGetLegacyBlocksFromID(data: unknown, peerID: string): Promise { + public async handleRPCGetLegacyBlocksFromID(data: unknown, peerId: string): Promise { this.addRateLimit( NETWORK_LEGACY_GET_BLOCKS_FROM_ID, - peerID, + peerId, LEGACY_BLOCKS_FROM_IDS_RATE_LIMIT_FREQUENCY, ); - let rpcBlocksByIdData: RPCBlocksByIdData; + let rpcBlocksByIdData: RPCLegacyBlocksByIdData; try { - rpcBlocksByIdData = codec.decode( - getBlocksFromIdRequestSchema, + rpcBlocksByIdData = codec.decode( + getLegacyBlocksFromIdRequestSchema, data as never, ); } catch (error) { @@ -67,50 +65,87 @@ export class LegacyNetworkEndpoint extends BaseNetworkEndpoint { { err: error as Error, req: data, - peerID, + peerId, }, `${NETWORK_LEGACY_GET_BLOCKS_FROM_ID} response failed on decoding. Applying a penalty to the peer`, ); this._network.applyPenaltyOnPeer({ - peerId: peerID, + peerId, penalty: 100, }); throw error; } try { - validator.validate(getBlocksFromIdRequestSchema, rpcBlocksByIdData); + validator.validate(getLegacyBlocksFromIdRequestSchema, rpcBlocksByIdData); } catch (error) { this._logger.warn( { err: error as Error, req: data, - peerID, + peerId, }, `${NETWORK_LEGACY_GET_BLOCKS_FROM_ID} response failed on validation. Applying a penalty to the peer`, ); this._network.applyPenaltyOnPeer({ - peerId: peerID, + peerId, penalty: 100, }); throw error; } - const { blockId } = rpcBlocksByIdData; + const { blockID: lastBlockID, snapshotBlockID } = rpcBlocksByIdData; - let lastBlockHeader; + let bracketInfo; try { - const block = await this._storage.getBlockByID(blockId); - lastBlockHeader = decodeBlock(block).block.header; + bracketInfo = await this._storage.getBracketInfo(snapshotBlockID); + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } + // Peer should be banned if the request is coming for invalid snapshotBlockID which does not exist + // Peers should always choose peers with snapshotBlockID present in their nodeInfo + this._logger.warn( + { peerId }, + `Received invalid snapshotBlockID: Applying a penalty to the peer`, + ); + this._network.applyPenaltyOnPeer({ peerId, penalty: 100 }); + + throw error; + } + + let fromBlockHeight; + try { + // if the requested blockID is the same as snapshotBlockID then start from a block before snapshotBlock + if (snapshotBlockID.equals(lastBlockID)) { + fromBlockHeight = bracketInfo.snapshotBlockHeight; + } else { + const { + block: { + header: { height }, + }, + } = decodeBlock(await this._storage.getBlockByID(lastBlockID)); + fromBlockHeight = height; + } } catch (errors) { return codec.encode(getBlocksFromIdResponseSchema, { blocks: [] }); } - const lastBlockHeight = lastBlockHeader.height; - const fetchUntilHeight = lastBlockHeight + 100; + // we have to sync backwards so if lastBlockHeight is 171, then node responds with blocks from [71, 170] + // so lastBlockHeight = 170 and fetchFromHeight should be (lastBlockHeight - 99) = 71 + // where blocks at 71 and 170 are inclusive so in total 100 blocks + const lastBlockHeight = fromBlockHeight - 1; + const fetchFromHeight = + bracketInfo.startHeight >= lastBlockHeight - 99 + ? bracketInfo.startHeight + : lastBlockHeight - 100; + this._logger.debug( + { peerId, engineModule: 'legacy' }, + `Responding to "${NETWORK_LEGACY_GET_BLOCKS_FROM_ID}" with blocks from height ${fetchFromHeight} to ${lastBlockHeight}`, + ); const encodedBlocks = await this._storage.getBlocksByHeightBetween( + fetchFromHeight, lastBlockHeight, - fetchUntilHeight, ); return codec.encode(getBlocksFromIdResponseSchema, { blocks: encodedBlocks }); diff --git a/framework/src/engine/legacy/schemas.ts b/framework/src/engine/legacy/schemas.ts index f7867317a63..b00d2bc874d 100644 --- a/framework/src/engine/legacy/schemas.ts +++ b/framework/src/engine/legacy/schemas.ts @@ -135,3 +135,24 @@ export const legacyChainBracketInfoSchema = { }, required: ['startHeight', 'snapshotBlockHeight', 'lastBlockHeight'], }; + +export const getLegacyBlocksFromIdRequestSchema = { + $id: '/legacy/getBlocksFromIdRequest', + title: 'Get Blocks From Id Request', + type: 'object', + required: ['blockID', 'snapshotBlockID'], + properties: { + blockID: { + fieldNumber: 1, + dataType: 'bytes', + minLength: 32, + maxLength: 32, + }, + snapshotBlockID: { + fieldNumber: 2, + dataType: 'bytes', + minLength: 32, + maxLength: 32, + }, + }, +}; diff --git a/framework/src/engine/legacy/storage.ts b/framework/src/engine/legacy/storage.ts index 36d42e09e4f..8ce020d4154 100644 --- a/framework/src/engine/legacy/storage.ts +++ b/framework/src/engine/legacy/storage.ts @@ -12,9 +12,10 @@ * Removal or modification of this copyright notice is prohibited. */ -import { Batch, Database } from '@liskhq/lisk-db'; +import { Batch, Database, NotFoundError } from '@liskhq/lisk-db'; import { utils } from '@liskhq/lisk-cryptography'; -import { encodeLegacyChainBracketInfo } from './codec'; +import { codec } from '@liskhq/lisk-codec'; +import { decodeLegacyChainBracketInfo, encodeLegacyChainBracketInfo } from './codec'; import { LegacyChainBracketInfo } from './types'; import { buildBlockIDDbKey, @@ -23,6 +24,7 @@ import { buildLegacyBracketDBKey, buildTxsBlockIDDbKey, } from './utils'; +import { blockSchemaV2 } from './schemas'; export class Storage { private readonly _db: Database; @@ -37,7 +39,20 @@ export class Storage { } public async getBlockByID(id: Buffer): Promise { - return this._db.get(buildBlockIDDbKey(id)); + const blockHeader = await this._db.get(buildBlockIDDbKey(id)); + let payload: Buffer[] = []; + try { + payload = await this.getTransactionsByBlockID(id); + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } + } + + return codec.encode(blockSchemaV2, { + header: blockHeader, + payload, + }); } public async getBlockByHeight(height: number): Promise { @@ -70,7 +85,7 @@ export class Storage { // each txID is hashed value of 32 length const idLength = 32; for (let i = 0; i < txIdsBuffer.length; i += idLength) { - const txId = txIdsBuffer.subarray(i, (i += idLength)); + const txId = txIdsBuffer.subarray(i, i + idLength); txIds.push(txId); } @@ -102,11 +117,13 @@ export class Storage { await this._db.write(batch); } - public async getLegacyChainBracketInfo(snapshotBlockID: Buffer): Promise { - return this._db.get(buildLegacyBracketDBKey(snapshotBlockID)); + public async getBracketInfo(snapshotBlockID: Buffer): Promise { + const encodedBracketInfo = await this._db.get(buildLegacyBracketDBKey(snapshotBlockID)); + + return decodeLegacyChainBracketInfo(encodedBracketInfo); } - public async setLegacyChainBracketInfo( + public async setBracketInfo( snapshotBlockID: Buffer, bracketInfo: LegacyChainBracketInfo, ): Promise { @@ -116,6 +133,20 @@ export class Storage { ); } + public async hasBracketInfo(snapshotBlockID: Buffer): Promise { + try { + const bracketInfo = await this.getBracketInfo(snapshotBlockID); + + return !!bracketInfo; + } catch (error) { + if (!(error instanceof NotFoundError)) { + throw error; + } + + return false; + } + } + private async _getBlockIDsBetweenHeights( fromHeight: number, toHeight: number, diff --git a/framework/src/engine/legacy/types.ts b/framework/src/engine/legacy/types.ts index 1e7d7d99e51..a6070c47ad3 100644 --- a/framework/src/engine/legacy/types.ts +++ b/framework/src/engine/legacy/types.ts @@ -73,9 +73,18 @@ export interface LegacyChainBracketInfo { lastBlockHeight: number; } +export interface LegacyChainBracketInfoWithSnapshotBlockID extends LegacyChainBracketInfo { + snapshotBlockID: string; +} + export interface Peer { readonly peerId: string; readonly options: { - readonly legacy: Buffer[]; + readonly legacy: string[]; }; } + +export interface RPCLegacyBlocksByIdData { + readonly blockID: Buffer; + readonly snapshotBlockID: Buffer; +} diff --git a/framework/src/engine/legacy/utils.ts b/framework/src/engine/legacy/utils.ts index 55507225777..adf61e67a02 100644 --- a/framework/src/engine/legacy/utils.ts +++ b/framework/src/engine/legacy/utils.ts @@ -21,15 +21,17 @@ import { } from './constants'; // INFO: Here ID refers to hashed value of 32 length -export const buildTxIDDbKey = (id: Buffer): Buffer => Buffer.concat([DB_KEY_TRANSACTIONS_ID, id]); +export const buildTxIDDbKey = (id: Buffer): Buffer => + Buffer.from(`${DB_KEY_TRANSACTIONS_ID}:${id.toString('binary')}`); + +export const buildBlockIDDbKey = (id: Buffer): Buffer => + Buffer.from(`${DB_KEY_BLOCKS_ID}:${id.toString('binary')}`); -export const buildBlockIDDbKey = (id: Buffer): Buffer => Buffer.concat([DB_KEY_BLOCKS_ID, id]); export const buildTxsBlockIDDbKey = (id: Buffer): Buffer => - Buffer.concat([DB_KEY_TRANSACTIONS_BLOCK_ID, id]); + Buffer.from(`${DB_KEY_TRANSACTIONS_BLOCK_ID}:${id.toString('binary')}`); -// INFO: Generated Buffer is further used as `ID` for ```getBlockByID (ID:Buffer)``` export const buildBlockHeightDbKey = (height: number): Buffer => - Buffer.concat([DB_KEY_BLOCKS_HEIGHT, utils.intToBuffer(height, 4)]); + Buffer.from(`${DB_KEY_BLOCKS_HEIGHT}:${utils.intToBuffer(height, 4).toString('binary')}`); export const buildLegacyBracketDBKey = (snapshotBlockID: Buffer): Buffer => Buffer.concat([DB_KEY_LEGACY_BRACKET, snapshotBlockID]); diff --git a/framework/src/genesis_block.ts b/framework/src/genesis_block.ts index afebea4d925..f698d4a2524 100644 --- a/framework/src/genesis_block.ts +++ b/framework/src/genesis_block.ts @@ -68,7 +68,7 @@ export const generateGenesisBlock = async ( impliesMaxPrevotes: true, assetRoot, aggregateCommit: { - height: 0, + height, aggregationBits: EMPTY_BUFFER, certificateSignature: EMPTY_BUFFER, }, diff --git a/framework/src/index.ts b/framework/src/index.ts index d9c98e9af1e..653c3574842 100644 --- a/framework/src/index.ts +++ b/framework/src/index.ts @@ -141,7 +141,7 @@ export { RecoverContext, } from './modules/interoperability'; export { RewardMethod, RewardModule } from './modules/reward'; -export { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_rewards'; +export { DynamicRewardMethod, DynamicRewardModule } from './modules/dynamic_reward'; export { FeeMethod, FeeModule } from './modules/fee'; export { RandomMethod, RandomModule } from './modules/random'; export { PoAModule, PoAMethod } from './modules/poa'; diff --git a/framework/src/modules/dynamic_rewards/constants.ts b/framework/src/modules/dynamic_reward/constants.ts similarity index 100% rename from framework/src/modules/dynamic_rewards/constants.ts rename to framework/src/modules/dynamic_reward/constants.ts diff --git a/framework/src/modules/dynamic_rewards/endpoint.ts b/framework/src/modules/dynamic_reward/endpoint.ts similarity index 96% rename from framework/src/modules/dynamic_rewards/endpoint.ts rename to framework/src/modules/dynamic_reward/endpoint.ts index e8e59d3bb52..c6c817b05d7 100644 --- a/framework/src/modules/dynamic_rewards/endpoint.ts +++ b/framework/src/modules/dynamic_reward/endpoint.ts @@ -70,9 +70,9 @@ export class DynamicRewardEndpoint extends RewardEndpoint { this._config, defaultReward, ); - const stakeRewardActiveValidators = getStakeRewardActiveValidators( + const stakeRewardActiveValidators = await getStakeRewardActiveValidators( context, - this._posMethod, + this._validatorMethod, defaultReward, minimalRewardActiveValidators, ); diff --git a/framework/src/modules/dynamic_rewards/index.ts b/framework/src/modules/dynamic_reward/index.ts similarity index 100% rename from framework/src/modules/dynamic_rewards/index.ts rename to framework/src/modules/dynamic_reward/index.ts diff --git a/framework/src/modules/dynamic_rewards/method.ts b/framework/src/modules/dynamic_reward/method.ts similarity index 100% rename from framework/src/modules/dynamic_rewards/method.ts rename to framework/src/modules/dynamic_reward/method.ts diff --git a/framework/src/modules/dynamic_rewards/module.ts b/framework/src/modules/dynamic_reward/module.ts similarity index 96% rename from framework/src/modules/dynamic_rewards/module.ts rename to framework/src/modules/dynamic_reward/module.ts index ded25bb000f..0f327db7e39 100644 --- a/framework/src/modules/dynamic_rewards/module.ts +++ b/framework/src/modules/dynamic_reward/module.ts @@ -44,7 +44,11 @@ import { } from '../../state_machine'; import { DynamicRewardMethod } from './method'; import { DynamicRewardEndpoint } from './endpoint'; -import { configSchema } from './schemas'; +import { + configSchema, + getExpectedValidatorRewardsRequestSchema, + getExpectedValidatorRewardsResponseSchema, +} from './schemas'; import { RewardMintedEvent } from '../reward/events/reward_minted'; import { EndOfRoundTimestampStore } from './stores/end_of_round_timestamp'; import { @@ -61,10 +65,6 @@ import { getAnnualInflationRequestSchema, } from '../reward/schemas'; import { getMinimalRewardActiveValidators, getStakeRewardActiveValidators } from './utils'; -import { - getExpectedSharedRewardsRequestSchema, - getExpectedSharedRewardsResponseSchema, -} from '../pos/schemas'; export class DynamicRewardModule extends BaseModule { public method = new DynamicRewardMethod(this.stores, this.events); @@ -116,8 +116,8 @@ export class DynamicRewardModule extends BaseModule { }, { name: this.endpoint.getExpectedValidatorRewards.name, - request: getExpectedSharedRewardsRequestSchema, - response: getExpectedSharedRewardsResponseSchema, + request: getExpectedValidatorRewardsRequestSchema, + response: getExpectedValidatorRewardsResponseSchema, }, ], }; @@ -280,9 +280,9 @@ export class DynamicRewardModule extends BaseModule { if (!bftValidator) { throw new Error('Invalid generator. Validator params does not include the validator.'); } - const stakeRewardActiveValidators = getStakeRewardActiveValidators( + const stakeRewardActiveValidators = await getStakeRewardActiveValidators( context, - this._posMethod, + this._validatorMethod, defaultReward, minimalRewardActiveValidators, ); diff --git a/framework/src/modules/dynamic_reward/schemas.ts b/framework/src/modules/dynamic_reward/schemas.ts new file mode 100644 index 00000000000..e52e2229e95 --- /dev/null +++ b/framework/src/modules/dynamic_reward/schemas.ts @@ -0,0 +1,59 @@ +/* + * Copyright © 2021 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { configSchema as rewardConfigSchema } from '../reward/schemas'; + +export const configSchema = { + $id: '/dynamicReward/config', + type: 'object', + properties: { + ...rewardConfigSchema.properties, + factorMinimumRewardActiveValidators: { + type: 'integer', + minimum: 1, + }, + }, + required: [...rewardConfigSchema.required, 'factorMinimumRewardActiveValidators'], +}; + +export const getExpectedValidatorRewardsRequestSchema = { + $id: '/dynamicReward/endpoint/getExpectedValidatorRewardsRequestSchema', + type: 'object', + required: ['validatorAddress'], + properties: { + validatorAddress: { + type: 'string', + format: 'lisk32', + }, + }, +}; + +export const getExpectedValidatorRewardsResponseSchema = { + $id: '/dynamicReward/endpoint/getExpectedValidatorRewardsResponseSchema', + type: 'object', + required: ['blockReward', 'dailyReward', 'monthlyReward', 'yearlyReward'], + properties: { + blockReward: { + type: 'string', + }, + dailyReward: { + type: 'string', + }, + monthlyReward: { + type: 'string', + }, + yearlyReward: { + type: 'string', + }, + }, +}; diff --git a/framework/src/modules/dynamic_rewards/stores/end_of_round_timestamp.ts b/framework/src/modules/dynamic_reward/stores/end_of_round_timestamp.ts similarity index 100% rename from framework/src/modules/dynamic_rewards/stores/end_of_round_timestamp.ts rename to framework/src/modules/dynamic_reward/stores/end_of_round_timestamp.ts diff --git a/framework/src/modules/dynamic_rewards/types.ts b/framework/src/modules/dynamic_reward/types.ts similarity index 96% rename from framework/src/modules/dynamic_rewards/types.ts rename to framework/src/modules/dynamic_reward/types.ts index 830eee8ab6e..95af518c4f7 100644 --- a/framework/src/modules/dynamic_rewards/types.ts +++ b/framework/src/modules/dynamic_reward/types.ts @@ -55,7 +55,6 @@ export interface ValidatorsMethod { export interface PoSMethod { getRoundLength(methodContext: ImmutableMethodContext): number; - getNumberOfActiveValidators(methodContext: ImmutableMethodContext): number; updateSharedRewards( methodContext: MethodContext, generatorAddress: Buffer, diff --git a/framework/src/modules/dynamic_rewards/utils.ts b/framework/src/modules/dynamic_reward/utils.ts similarity index 67% rename from framework/src/modules/dynamic_rewards/utils.ts rename to framework/src/modules/dynamic_reward/utils.ts index 39060d48776..8f03466b41e 100644 --- a/framework/src/modules/dynamic_rewards/utils.ts +++ b/framework/src/modules/dynamic_reward/utils.ts @@ -14,7 +14,7 @@ import { ImmutableMethodContext } from '../../state_machine'; import { DECIMAL_PERCENT_FACTOR } from './constants'; -import { ModuleConfig, PoSMethod } from './types'; +import { ModuleConfig, ValidatorsMethod } from './types'; export const getMinimalRewardActiveValidators = ( moduleConfig: ModuleConfig, @@ -23,15 +23,16 @@ export const getMinimalRewardActiveValidators = ( (defaultReward * BigInt(moduleConfig.factorMinimumRewardActiveValidators)) / DECIMAL_PERCENT_FACTOR; -export const getStakeRewardActiveValidators = ( +export const getStakeRewardActiveValidators = async ( context: ImmutableMethodContext, - posMethod: PoSMethod, + validatorMethod: ValidatorsMethod, defaultReward: bigint, minimalRewardActiveValidators: bigint, ) => { - const numberOfActiveValidators = posMethod.getNumberOfActiveValidators(context); - const totalRewardActiveValidators = defaultReward * BigInt(numberOfActiveValidators); - return ( - totalRewardActiveValidators - BigInt(numberOfActiveValidators) * minimalRewardActiveValidators + const { validators } = await validatorMethod.getValidatorsParams(context); + const numberOfActiveValidators = validators.reduce( + (prev, curr) => (curr.bftWeight > BigInt(0) ? prev + 1 : prev), + 0, ); + return BigInt(numberOfActiveValidators) * (defaultReward - minimalRewardActiveValidators); }; diff --git a/framework/src/modules/interoperability/base_cross_chain_update_command.ts b/framework/src/modules/interoperability/base_cross_chain_update_command.ts index 9899ecc71c5..0b9059efec5 100644 --- a/framework/src/modules/interoperability/base_cross_chain_update_command.ts +++ b/framework/src/modules/interoperability/base_cross_chain_update_command.ts @@ -19,7 +19,7 @@ import { CommandExecuteContext, CommandVerifyContext } from '../../state_machine import { BaseInteroperabilityCommand } from './base_interoperability_command'; import { BaseInteroperabilityInternalMethod } from './base_interoperability_internal_methods'; import { BaseInteroperabilityMethod } from './base_interoperability_method'; -import { CCMStatusCode, EMPTY_BYTES, EmptyCCM } from './constants'; +import { CCMStatusCode, EMPTY_BYTES, EVENT_TOPIC_CCM_EXECUTION, EmptyCCM } from './constants'; import { CCMProcessedCode, CcmProcessedEvent, CCMProcessedResult } from './events/ccm_processed'; import { CcmSendSuccessEvent } from './events/ccm_send_success'; import { ccmSchema, crossChainUpdateTransactionParams } from './schemas'; @@ -34,6 +34,7 @@ import { ChainAccountStore, ChainStatus } from './stores/chain_account'; import { emptyActiveValidatorsUpdate, getEncodedCCMAndID, + getIDFromCCMBytes, getMainchainID, isInboxUpdateEmpty, validateFormat, @@ -126,52 +127,34 @@ export abstract class BaseCrossChainUpdateCommand< context: CommandExecuteContext, isMainchain: boolean, ): Promise<[CCMsg[], boolean]> { - const { params, transaction } = context; + const { params } = context; const { inboxUpdate } = params; - // Verify certificate signature. We do it here because if it fails, the transaction fails rather than being invalid. - await this.internalMethod.verifyCertificateSignature(context, params); - - if (!isInboxUpdateEmpty(inboxUpdate)) { - // This check is expensive. Therefore, it is done in the execute step instead of the verify - // step. Otherwise, a malicious relayer could spam the transaction pool with computationally - // costly CCU verifications without paying fees. - try { - await this.internalMethod.verifyPartnerChainOutboxRoot(context, params); - } catch (error) { - return [[], false]; - } - - // Initialize the relayer account for the message fee token. - // This is necessary to ensure that the relayer can receive the CCM fees - // If the account already exists, nothing is done. - const messageFeeTokenID = await this._interopsMethod.getMessageFeeTokenID( - context, - params.sendingChainID, - ); - await this._tokenMethod.initializeUserAccount( - context, - transaction.senderAddress, - messageFeeTokenID, - ); - } - const ccms: CCMsg[] = []; let ccm: CCMsg; // Process cross-chain messages in inbox update. // First process basic checks for all CCMs. for (const ccmBytes of inboxUpdate.crossChainMessages) { + const ccmID = getIDFromCCMBytes(ccmBytes); + const ccmContext = { + ...context, + eventQueue: context.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ), + }; try { // Verify general format. Past this point, we can access ccm root properties. ccm = codec.decode(ccmSchema, ccmBytes); } catch (error) { - await this.internalMethod.terminateChainInternal(context, params.sendingChainID); - this.events.get(CcmProcessedEvent).log(context, params.sendingChainID, context.chainID, { - ccm: EmptyCCM, - result: CCMProcessedResult.DISCARDED, - code: CCMProcessedCode.INVALID_CCM_DECODING_EXCEPTION, - }); + await this.internalMethod.terminateChainInternal(ccmContext, params.sendingChainID); + this.events + .get(CcmProcessedEvent) + .log(ccmContext, params.sendingChainID, ccmContext.chainID, { + ccm: EmptyCCM, + result: CCMProcessedResult.DISCARDED, + code: CCMProcessedCode.INVALID_CCM_DECODING_EXCEPTION, + }); // In this case, we do not even update the chain account with the new certificate. return [[], false]; } @@ -179,11 +162,11 @@ export abstract class BaseCrossChainUpdateCommand< try { validateFormat(ccm); } catch (error) { - await this.internalMethod.terminateChainInternal(context, params.sendingChainID); + await this.internalMethod.terminateChainInternal(ccmContext, params.sendingChainID); ccm = { ...ccm, params: EMPTY_BYTES }; this.events .get(CcmProcessedEvent) - .log(context, params.sendingChainID, ccm.receivingChainID, { + .log(ccmContext, params.sendingChainID, ccm.receivingChainID, { ccm, result: CCMProcessedResult.DISCARDED, code: CCMProcessedCode.INVALID_CCM_VALIDATION_EXCEPTION, @@ -193,15 +176,13 @@ export abstract class BaseCrossChainUpdateCommand< } try { - // Verify whether the CCM respects the routing rules, - // which differ on mainchain and sidechains. - this._verifyRoutingRules(context, isMainchain, ccm); + this.verifyRoutingRules(ccm, params, ccmContext.chainID, isMainchain); ccms.push(ccm); } catch (error) { - await this.internalMethod.terminateChainInternal(context, params.sendingChainID); + await this.internalMethod.terminateChainInternal(ccmContext, params.sendingChainID); this.events .get(CcmProcessedEvent) - .log(context, params.sendingChainID, ccm.receivingChainID, { + .log(ccmContext, params.sendingChainID, ccm.receivingChainID, { ccm, result: CCMProcessedResult.DISCARDED, code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, @@ -214,36 +195,31 @@ export abstract class BaseCrossChainUpdateCommand< return [ccms, true]; } - // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0053.md#verifyroutingrules - private _verifyRoutingRules( - context: CommandExecuteContext, - isMainchain: boolean, + protected verifyRoutingRules( ccm: CCMsg, + ccuParams: CrossChainUpdateTransactionParams, + ownChainID: Buffer, + isMainchain: boolean, ) { - // Sending and receiving chains must differ. - if (ccm.receivingChainID.equals(ccm.sendingChainID)) { - throw new Error('Sending and receiving chains must differ.'); - } - - // Processing on the mainchain + // The CCM must come from the sending chain. if (isMainchain) { - // The CCM must come from the sending chain. - if (!ccm.sendingChainID.equals(context.params.sendingChainID)) { + if (!ccm.sendingChainID.equals(ccuParams.sendingChainID)) { throw new Error('CCM is not from the sending chain.'); } if (ccm.status === CCMStatusCode.CHANNEL_UNAVAILABLE) { throw new Error('CCM status channel unavailable can only be set on the mainchain.'); } - } else { - // The CCM must come be directed to the sidechain, unless it was bounced on the mainchain. - // eslint-disable-next-line no-lonely-if - if (!context.chainID.equals(ccm.receivingChainID)) { - throw new Error('CCM is not directed to the sidechain.'); - } + } else if (!ownChainID.equals(ccm.receivingChainID)) { + // The CCM must be directed to the sidechain. + throw new Error('CCM is not directed to the sidechain.'); + } + // Sending and receiving chains must differ. + if (ccm.receivingChainID.equals(ccm.sendingChainID)) { + throw new Error('Sending and receiving chains must differ.'); } } - protected async afterCrossChainMessagesExecution( + protected async afterCrossChainMessagesExecute( context: CommandExecuteContext, ) { const { params } = context; @@ -620,4 +596,33 @@ export abstract class BaseCrossChainUpdateCommand< return false; } } + + // verifyCertificateSignature and verifyPartnerChainOutboxRoot checks are expensive. Therefore, it is done in the execute step instead of the verify + // step. Otherwise, a malicious relayer could spam the transaction pool with computationally + // costly CCU verifications without paying fees. + protected async verifyCertificateSignatureAndPartnerChainOutboxRoot( + context: CommandExecuteContext, + ) { + const { params, transaction } = context; + const { inboxUpdate } = params; + // Verify certificate signature. We do it here because if it fails, the transaction fails rather than being invalid. + await this.internalMethod.verifyCertificateSignature(context, params); + + if (!isInboxUpdateEmpty(inboxUpdate)) { + await this.internalMethod.verifyPartnerChainOutboxRoot(context, params); + + // Initialize the relayer account for the message fee token. + // This is necessary to ensure that the relayer can receive the CCM fees + // If the account already exists, nothing is done. + const messageFeeTokenID = await this._interopsMethod.getMessageFeeTokenID( + context, + params.sendingChainID, + ); + await this._tokenMethod.initializeUserAccount( + context, + transaction.senderAddress, + messageFeeTokenID, + ); + } + } } diff --git a/framework/src/modules/interoperability/base_interoperability_internal_methods.ts b/framework/src/modules/interoperability/base_interoperability_internal_methods.ts index a5cdb9fca63..5230db5d31e 100644 --- a/framework/src/modules/interoperability/base_interoperability_internal_methods.ts +++ b/framework/src/modules/interoperability/base_interoperability_internal_methods.ts @@ -26,6 +26,7 @@ import { MODULE_NAME_INTEROPERABILITY, EMPTY_HASH, MAX_NUM_VALIDATORS, + MAX_UINT64, } from './constants'; import { ccmSchema } from './schemas'; import { CCMsg, CrossChainUpdateTransactionParams, ChainAccount, ChainValidators } from './types'; @@ -47,7 +48,12 @@ import { TerminatedOutboxAccount, TerminatedOutboxStore } from './stores/termina import { ChainAccountUpdatedEvent } from './events/chain_account_updated'; import { TerminatedStateCreatedEvent } from './events/terminated_state_created'; import { BaseInternalMethod } from '../BaseInternalMethod'; -import { MethodContext, ImmutableMethodContext, NotFoundError } from '../../state_machine'; +import { + MethodContext, + ImmutableMethodContext, + NotFoundError, + CommandExecuteContext, +} from '../../state_machine'; import { ChainValidatorsStore } from './stores/chain_validators'; import { certificateSchema } from '../../engine/consensus/certificate_generation/schema'; import { Certificate } from '../../engine/consensus/certificate_generation/types'; @@ -59,6 +65,8 @@ import { TerminatedOutboxCreatedEvent } from './events/terminated_outbox_created import { BaseCCMethod } from './base_cc_method'; import { verifyAggregateCertificateSignature } from '../../engine/consensus/certificate_generation/utils'; import { InvalidCertificateSignatureEvent } from './events/invalid_certificate_signature'; +import { InvalidSMTVerificationEvent } from './events/invalid_smt_verification'; +import { InvalidOutboxRootVerificationEvent } from './events/invalid_outbox_root_verification'; export abstract class BaseInteroperabilityInternalMethod extends BaseInternalMethod { protected readonly interoperableModuleMethods = new Map(); @@ -369,9 +377,28 @@ export abstract class BaseInteroperabilityInternalMethod extends BaseInternalMet ); } + let totalWeight = BigInt(0); + for (const currentValidator of newActiveValidators) { + if (currentValidator.bftWeight === BigInt(0)) { + throw new Error('Validator bft weight must be positive integer.'); + } + totalWeight += currentValidator.bftWeight; + if (totalWeight > MAX_UINT64) { + throw new Error('Total BFT weight exceeds maximum value.'); + } + } const certificate = codec.decode(certificateSchema, ccu.certificate); validator.validate(certificateSchema, certificate); + const { certificateThreshold } = ccu; + + if (certificateThreshold < totalWeight / BigInt(3) + BigInt(1)) { + throw new Error('Certificate threshold is too small.'); + } + if (certificateThreshold > totalWeight) { + throw new Error('Certificate threshold is too large.'); + } + const newValidatorsHash = computeValidatorsHash(newActiveValidators, ccu.certificateThreshold); if (!certificate.validatorsHash.equals(newValidatorsHash)) { throw new Error('ValidatorsHash in certificate and the computed values do not match.'); @@ -645,7 +672,7 @@ export abstract class BaseInteroperabilityInternalMethod extends BaseInternalMet * @see https://github.com/LiskHQ/lips/blob/main/proposals/lip-0053.md#verifypartnerchainoutboxroot */ public async verifyPartnerChainOutboxRoot( - context: ImmutableMethodContext, + context: CommandExecuteContext, params: CrossChainUpdateTransactionParams, ): Promise { const channel = await this.stores.get(ChannelDataStore).get(context, params.sendingChainID); @@ -669,6 +696,10 @@ export abstract class BaseInteroperabilityInternalMethod extends BaseInternalMet if (params.certificate.length === 0) { if (!newInboxRoot.equals(channel.partnerChainOutboxRoot)) { + this.events.get(InvalidOutboxRootVerificationEvent).error(context, params.sendingChainID, { + inboxRoot: newInboxRoot, + partnerChainOutboxRoot: channel.partnerChainOutboxRoot, + }); throw new Error('Inbox root does not match partner chain outbox root.'); } return; @@ -693,6 +724,7 @@ export abstract class BaseInteroperabilityInternalMethod extends BaseInternalMet const smt = new SparseMerkleTree(); const valid = await smt.verifyInclusionProof(certificate.stateRoot, [outboxKey], proof); if (!valid) { + this.events.get(InvalidSMTVerificationEvent).error(context); throw new Error('Invalid inclusion proof for inbox update.'); } } diff --git a/framework/src/modules/interoperability/base_interoperability_module.ts b/framework/src/modules/interoperability/base_interoperability_module.ts index cb29d3b7313..c5dfbabb6d4 100644 --- a/framework/src/modules/interoperability/base_interoperability_module.ts +++ b/framework/src/modules/interoperability/base_interoperability_module.ts @@ -35,12 +35,7 @@ import { OwnChainAccountStore } from './stores/own_chain_account'; import { RegisteredNamesStore } from './stores/registered_names'; import { TerminatedOutboxStore } from './stores/terminated_outbox'; import { TerminatedStateStore } from './stores/terminated_state'; -import { - ChainInfo, - GenesisInteroperability, - OwnChainAccount, - TerminatedStateAccountWithChainID, -} from './types'; +import { ChainInfo, GenesisInteroperability, OwnChainAccount } from './types'; import { computeValidatorsHash, getTokenIDLSK } from './utils'; import { genesisInteroperabilitySchema } from './schemas'; import { CcmProcessedEvent } from './events/ccm_processed'; @@ -51,8 +46,9 @@ import { InvalidCertificateSignatureEvent } from './events/invalid_certificate_s import { InvalidRegistrationSignatureEvent } from './events/invalid_registration_signature'; import { TerminatedOutboxCreatedEvent } from './events/terminated_outbox_created'; import { TerminatedStateCreatedEvent } from './events/terminated_state_created'; -import { InvalidSMTVerification } from './events/invalid_smt_verification'; -import { InvalidRMTVerification } from './events/invalid_rmt_verification'; +import { InvalidSMTVerificationEvent } from './events/invalid_smt_verification'; +import { InvalidRMTVerificationEvent } from './events/invalid_rmt_verification'; +import { InvalidOutboxRootVerificationEvent } from './events/invalid_outbox_root_verification'; export abstract class BaseInteroperabilityModule extends BaseInteroperableModule { protected interoperableCCCommands = new Map(); @@ -82,12 +78,16 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule ); this.events.register(TerminatedStateCreatedEvent, new TerminatedStateCreatedEvent(this.name)); this.events.register(TerminatedOutboxCreatedEvent, new TerminatedOutboxCreatedEvent(this.name)); - this.events.register(InvalidSMTVerification, new InvalidSMTVerification(this.name)); - this.events.register(InvalidRMTVerification, new InvalidRMTVerification(this.name)); + this.events.register(InvalidSMTVerificationEvent, new InvalidSMTVerificationEvent(this.name)); + this.events.register(InvalidRMTVerificationEvent, new InvalidRMTVerificationEvent(this.name)); this.events.register( InvalidCertificateSignatureEvent, new InvalidCertificateSignatureEvent(this.name), ); + this.events.register( + InvalidOutboxRootVerificationEvent, + new InvalidOutboxRootVerificationEvent(this.name), + ); } // Common name for mainchain/sidechain interoperability module @@ -133,12 +133,12 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule } // activeValidators must be ordered lexicographically by blsKey property - if (!objectUtils.isBufferArrayOrdered(activeValidators.map(v => v.blsKey))) { + const blsKeys = activeValidators.map(v => v.blsKey); + if (!objectUtils.isBufferArrayOrdered(blsKeys)) { throw new Error('activeValidators must be ordered lexicographically by blsKey property.'); } // all blsKey properties must be pairwise distinct - const blsKeys = activeValidators.map(v => v.blsKey); if (!objectUtils.bufferArrayUniqueItems(blsKeys)) { throw new Error(`All blsKey properties must be pairwise distinct.`); } @@ -186,28 +186,16 @@ export abstract class BaseInteroperabilityModule extends BaseInteroperableModule } } - protected _verifyTerminatedStateAccountsCommon( - terminatedStateAccounts: TerminatedStateAccountWithChainID[], - mainchainID: Buffer, - ) { + protected _verifyTerminatedStateAccountsIDs(chainIDs: Buffer[]) { // Each entry stateAccount in terminatedStateAccounts has a unique stateAccount.chainID - const chainIDs = terminatedStateAccounts.map(a => a.chainID); if (!objectUtils.bufferArrayUniqueItems(chainIDs)) { throw new Error(`terminatedStateAccounts don't hold unique chainID.`); } // terminatedStateAccounts is ordered lexicographically by stateAccount.chainID - if ( - !objectUtils.isBufferArrayOrdered( - terminatedStateAccounts.map(accountWithChainID => accountWithChainID.chainID), - ) - ) { + if (!objectUtils.isBufferArrayOrdered(chainIDs)) { throw new Error('terminatedStateAccounts must be ordered lexicographically by chainID.'); } - - for (const stateAccountWithChainID of terminatedStateAccounts) { - this._verifyChainID(stateAccountWithChainID.chainID, mainchainID, 'stateAccount.'); - } } // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#genesis-state-processing diff --git a/framework/src/modules/interoperability/base_state_recovery.ts b/framework/src/modules/interoperability/base_state_recovery.ts index 17e9279fee4..0cd116393d8 100644 --- a/framework/src/modules/interoperability/base_state_recovery.ts +++ b/framework/src/modules/interoperability/base_state_recovery.ts @@ -29,7 +29,7 @@ import { TerminatedStateStore } from './stores/terminated_state'; import { computeStorePrefix } from '../base_store'; import { BaseCCMethod } from './base_cc_method'; import { BaseInteroperabilityInternalMethod } from './base_interoperability_internal_methods'; -import { InvalidSMTVerification } from './events/invalid_smt_verification'; +import { InvalidSMTVerificationEvent } from './events/invalid_smt_verification'; // LIP: https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#state-recovery-command export class BaseStateRecoveryCommand< @@ -138,7 +138,7 @@ export class BaseStateRecoveryCommand< ); if (!smtVerified) { - this.events.get(InvalidSMTVerification).error(context); + this.events.get(InvalidSMTVerificationEvent).error(context); throw new Error('State recovery proof of inclusion is not valid.'); } diff --git a/framework/src/modules/interoperability/certificates.ts b/framework/src/modules/interoperability/certificates.ts index 8bab338cb17..784970b96e9 100644 --- a/framework/src/modules/interoperability/certificates.ts +++ b/framework/src/modules/interoperability/certificates.ts @@ -1,6 +1,6 @@ /** * TODO: Now that we have `certificates.ts`, these methods could be moved there - * (checkCertificateTimestamp, checkCertificateValidity, checkValidatorsHashWithCertificate, + * (checkCertificateTimestamp, * isCertificateEmpty, verifyCertificateSignature) */ import { LastCertificate, LastCertificateJSON } from './types'; diff --git a/framework/src/modules/interoperability/constants.ts b/framework/src/modules/interoperability/constants.ts index b188019e39d..ecc787c3cc9 100644 --- a/framework/src/modules/interoperability/constants.ts +++ b/framework/src/modules/interoperability/constants.ts @@ -90,8 +90,10 @@ export const EVENT_NAME_CHAIN_ACCOUNT_UPDATED = 'chainAccountUpdated'; export const EVENT_NAME_CCM_PROCESSED = 'ccmProcessed'; export const EVENT_NAME_CCM_SEND_SUCCESS = 'ccmSendSucess'; export const EVENT_NAME_INVALID_CERTIFICATE_SIGNATURE = 'invalidCertificateSignature'; +export const EVENT_NAME_INVALID_OUTBOX_ROOT_VERIFICATION = 'invalidOutboxRootVerification'; export const CONTEXT_STORE_KEY_CCM_PROCESSING = 'CONTEXT_STORE_KEY_CCM_PROCESSING'; +export const EVENT_TOPIC_CCM_EXECUTION = Buffer.from([5]); // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#empty-cross-chain-message export const EmptyCCM = { diff --git a/framework/src/modules/interoperability/events/invalid_outbox_root_verification.ts b/framework/src/modules/interoperability/events/invalid_outbox_root_verification.ts new file mode 100644 index 00000000000..8b6b8bf5945 --- /dev/null +++ b/framework/src/modules/interoperability/events/invalid_outbox_root_verification.ts @@ -0,0 +1,48 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { BaseEvent, EventQueuer } from '../../base_event'; +import { HASH_LENGTH } from '../constants'; + +export interface InvalidOutboxRootVerificationData { + inboxRoot: Buffer; + partnerChainOutboxRoot: Buffer; +} + +export const invalidOutboxRootVerificationSchema = { + $id: '/interoperability/events/invalidOutboxRootVerification', + type: 'object', + required: ['inboxRoot', 'partnerChainOutboxRoot'], + properties: { + inboxRoot: { + dataType: 'bytes', + fieldNumber: 1, + minLength: HASH_LENGTH, + maxLength: HASH_LENGTH, + }, + partnerChainOutboxRoot: { + dataType: 'bytes', + fieldNumber: 2, + minLength: HASH_LENGTH, + maxLength: HASH_LENGTH, + }, + }, +}; + +export class InvalidOutboxRootVerificationEvent extends BaseEvent { + public schema = invalidOutboxRootVerificationSchema; + + public error(ctx: EventQueuer, chainID: Buffer, data: InvalidOutboxRootVerificationData): void { + this.add(ctx, data, [chainID], true); + } +} diff --git a/framework/src/modules/interoperability/events/invalid_rmt_verification.ts b/framework/src/modules/interoperability/events/invalid_rmt_verification.ts index 8dbaddef5d7..c50903692d7 100644 --- a/framework/src/modules/interoperability/events/invalid_rmt_verification.ts +++ b/framework/src/modules/interoperability/events/invalid_rmt_verification.ts @@ -13,7 +13,7 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -export class InvalidRMTVerification extends BaseEvent { +export class InvalidRMTVerificationEvent extends BaseEvent { public error(ctx: EventQueuer): void { this.add(ctx, undefined); } diff --git a/framework/src/modules/interoperability/events/invalid_smt_verification.ts b/framework/src/modules/interoperability/events/invalid_smt_verification.ts index b81f16a0e5a..f3d0c1391bd 100644 --- a/framework/src/modules/interoperability/events/invalid_smt_verification.ts +++ b/framework/src/modules/interoperability/events/invalid_smt_verification.ts @@ -13,7 +13,7 @@ */ import { BaseEvent, EventQueuer } from '../../base_event'; -export class InvalidSMTVerification extends BaseEvent { +export class InvalidSMTVerificationEvent extends BaseEvent { public error(ctx: EventQueuer): void { this.add(ctx, undefined); } diff --git a/framework/src/modules/interoperability/mainchain/commands/initialize_message_recovery.ts b/framework/src/modules/interoperability/mainchain/commands/initialize_message_recovery.ts index b729719a5b8..854826eee75 100644 --- a/framework/src/modules/interoperability/mainchain/commands/initialize_message_recovery.ts +++ b/framework/src/modules/interoperability/mainchain/commands/initialize_message_recovery.ts @@ -33,6 +33,7 @@ import { ChainAccountStore } from '../../stores/chain_account'; import { TerminatedStateStore } from '../../stores/terminated_state'; import { ChannelDataStore, channelSchema } from '../../stores/channel_data'; import { TerminatedOutboxStore } from '../../stores/terminated_outbox'; +import { InvalidSMTVerificationEvent } from '../../events/invalid_smt_verification'; export interface MessageRecoveryInitializationParams { chainID: Buffer; @@ -50,8 +51,8 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman ): Promise { const { params } = context; + // The command fails if the channel parameter is not a valid serialized channel. const deserializedChannel = codec.decode(channelSchema, params.channel); - validator.validate(channelSchema, deserializedChannel); const ownchainAccount = await this.stores.get(OwnChainAccountStore).get(context, EMPTY_BYTES); @@ -63,6 +64,7 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman }; } + // The command fails if the chain is not registered. const chainAccountExist = await this.stores.get(ChainAccountStore).has(context, params.chainID); if (!chainAccountExist) { return { @@ -71,6 +73,7 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman }; } + // The command fails if the chain is not terminated. const terminatedAccountExists = await this.stores .get(TerminatedStateStore) .has(context, params.chainID); @@ -81,10 +84,7 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman }; } - const terminatedAccount = await this.stores - .get(TerminatedStateStore) - .get(context, params.chainID); - + // The command fails if there exist already a terminated outbox account. const terminatedOutboxAccountExists = await this.stores .get(TerminatedOutboxStore) .has(context, params.chainID); @@ -95,18 +95,31 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman }; } - const ownChainAccount = await this.stores.get(OwnChainAccountStore).get(context, EMPTY_BYTES); + return { + status: VerifyStatus.OK, + }; + } + + public async execute( + context: CommandExecuteContext, + ): Promise { + const { params } = context; + const terminatedAccount = await this.stores + .get(TerminatedStateStore) + .get(context, params.chainID); + const queryKey = Buffer.concat([ // key contains both module and store key this.stores.get(ChannelDataStore).key, - utils.hash(ownChainAccount.chainID), + utils.hash(context.chainID), ]); const query = { key: queryKey, value: utils.hash(params.channel), bitmap: params.bitmap, }; - + // The SMT verification step is computationally expensive. Therefore, it is done in the + // execution step such that the transaction fee must be paid. const smt = new SparseMerkleTree(); const valid = await smt.verifyInclusionProof(terminatedAccount.stateRoot, [queryKey], { siblingHashes: params.siblingHashes, @@ -114,21 +127,9 @@ export class InitializeMessageRecoveryCommand extends BaseInteroperabilityComman }); if (!valid) { - return { - status: VerifyStatus.FAIL, - error: new Error('Message recovery initialization proof of inclusion is not valid.'), - }; + this.events.get(InvalidSMTVerificationEvent).error(context); + throw new Error('Message recovery initialization proof of inclusion is not valid.'); } - - return { - status: VerifyStatus.OK, - }; - } - - public async execute( - context: CommandExecuteContext, - ): Promise { - const { params } = context; const partnerChannel = codec.decode(channelSchema, params.channel); const channel = await this.stores.get(ChannelDataStore).get(context, params.chainID); await this.internalMethod.createTerminatedOutboxAccount( diff --git a/framework/src/modules/interoperability/mainchain/commands/recover_message.ts b/framework/src/modules/interoperability/mainchain/commands/recover_message.ts index 0a2de685bf3..ebcac70eaed 100644 --- a/framework/src/modules/interoperability/mainchain/commands/recover_message.ts +++ b/framework/src/modules/interoperability/mainchain/commands/recover_message.ts @@ -31,7 +31,11 @@ import { getEncodedCCMAndID, getDecodedCCMAndID, } from '../../utils'; -import { CCMStatusCode, CONTEXT_STORE_KEY_CCM_PROCESSING } from '../../constants'; +import { + CCMStatusCode, + CONTEXT_STORE_KEY_CCM_PROCESSING, + EVENT_TOPIC_CCM_EXECUTION, +} from '../../constants'; import { ccmSchema, messageRecoveryParamsSchema } from '../../schemas'; import { TerminatedOutboxAccount, TerminatedOutboxStore } from '../../stores/terminated_outbox'; import { @@ -39,7 +43,7 @@ import { CcmProcessedEvent, CCMProcessedResult, } from '../../events/ccm_processed'; -import { InvalidRMTVerification } from '../../events/invalid_rmt_verification'; +import { InvalidRMTVerificationEvent } from '../../events/invalid_rmt_verification'; // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#message-recovery-command export class RecoverMessageCommand extends BaseInteroperabilityCommand { @@ -188,7 +192,7 @@ export class RecoverMessageCommand extends BaseInteroperabilityCommand { @@ -81,11 +83,14 @@ export class SubmitMainchainCrossChainUpdateCommand extends BaseCrossChainUpdate public async execute( context: CommandExecuteContext, ): Promise { + const { params } = context; + + // This call can throw error and fails a transaction + await this.verifyCertificateSignatureAndPartnerChainOutboxRoot(context); const [decodedCCMs, ok] = await this.beforeCrossChainMessagesExecution(context, true); if (!ok) { return; } - const { params } = context; try { // Update the context to indicate that now we start the CCM processing. @@ -98,7 +103,9 @@ export class SubmitMainchainCrossChainUpdateCommand extends BaseCrossChainUpdate const ccmContext = { ...context, ccm, - eventQueue: context.eventQueue.getChildQueue(ccmID), + eventQueue: context.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ), }; // If the receiving chain is the mainchain, apply the CCM @@ -114,12 +121,14 @@ export class SubmitMainchainCrossChainUpdateCommand extends BaseCrossChainUpdate // would refer to an inbox where the message has not been appended yet). await this.internalMethod.appendToInboxTree(context, params.sendingChainID, ccmBytes); } + } catch (error) { + panic(context.logger, error as Error); } finally { // Update the context to indicate that now we stop the CCM processing. context.contextStore.delete(CONTEXT_STORE_KEY_CCM_PROCESSING); } - await this.afterCrossChainMessagesExecution(context); + await this.afterCrossChainMessagesExecute(context); } private async _beforeCrossChainMessageForwarding( diff --git a/framework/src/modules/interoperability/mainchain/module.ts b/framework/src/modules/interoperability/mainchain/module.ts index 9c4e9e30558..034dd3db388 100644 --- a/framework/src/modules/interoperability/mainchain/module.ts +++ b/framework/src/modules/interoperability/mainchain/module.ts @@ -261,7 +261,15 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule throw new Error(`ownChainName must be equal to ${CHAIN_NAME_MAINCHAIN}.`); } - this._verifyChainInfos(ctx, chainInfos, ownChainNonce); + // if chainInfos is empty, then ownChainNonce == 0 + // If chainInfos is non-empty, ownChainNonce > 0 + if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { + throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); + } else if (chainInfos.length !== 0 && ownChainNonce <= BigInt(0)) { + throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); + } + + this._verifyChainInfos(ctx, chainInfos, terminatedStateAccounts); this._verifyTerminatedStateAccounts(chainInfos, terminatedStateAccounts, mainchainID); this._verifyTerminatedOutboxAccounts( chainInfos, @@ -276,16 +284,8 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule private _verifyChainInfos( ctx: GenesisBlockExecuteContext, chainInfos: ChainInfo[], - ownChainNonce: bigint, + terminatedStateAccounts: TerminatedStateAccountWithChainID[], ) { - // if chainInfos is empty, then ownChainNonce == 0 - // If chainInfos is non-empty, ownChainNonce > 0 - if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { - throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); - } else if (chainInfos.length !== 0 && ownChainNonce <= 0) { - throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); - } - // Each entry chainInfo in chainInfos has a unique chainInfo.chainID const chainIDs = chainInfos.map(info => info.chainID); if (!objectUtils.bufferArrayUniqueItems(chainIDs)) { @@ -293,11 +293,8 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule } // chainInfos should be ordered lexicographically by chainInfo.chainID - const sortedByChainID = [...chainInfos].sort((a, b) => a.chainID.compare(b.chainID)); - for (let i = 0; i < chainInfos.length; i += 1) { - if (!chainInfos[i].chainID.equals(sortedByChainID[i].chainID)) { - throw new Error('chainInfos is not ordered lexicographically by chainID.'); - } + if (!objectUtils.isBufferArrayOrdered(chainIDs)) { + throw new Error('chainInfos is not ordered lexicographically by chainID.'); } // The entries chainData.name must be pairwise distinct @@ -311,13 +308,17 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule // verify root level properties for (const chainInfo of chainInfos) { this._verifyChainID(chainInfo.chainID, mainchainID, 'chainInfo.'); - this._verifyChainData(ctx, chainInfo); + this._verifyChainData(ctx, chainInfo, terminatedStateAccounts); this._verifyChannelData(ctx, chainInfo); this._verifyChainValidators(chainInfo); } } - private _verifyChainData(ctx: GenesisBlockExecuteContext, chainInfo: ChainInfo) { + private _verifyChainData( + ctx: GenesisBlockExecuteContext, + chainInfo: ChainInfo, + terminatedStateAccounts: TerminatedStateAccountWithChainID[], + ) { const validStatuses = [ChainStatus.REGISTERED, ChainStatus.ACTIVE, ChainStatus.TERMINATED]; const { chainData } = chainInfo; @@ -336,6 +337,17 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule if (!validStatuses.includes(chainData.status)) { throw new Error(`chainData.status must be one of ${validStatuses.join(', ')}`); } + + if (chainData.status === ChainStatus.TERMINATED) { + const accountWithChainID = terminatedStateAccounts.find(accountWithChainIDTemp => + accountWithChainIDTemp.chainID.equals(chainInfo.chainID), + ); + if (!accountWithChainID) { + throw new Error( + 'For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.', + ); + } + } } // https://github.com/LiskHQ/lips/blob/main/proposals/lip-0045.md#mainchain @@ -344,10 +356,14 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule terminatedStateAccounts: TerminatedStateAccountWithChainID[], mainchainID: Buffer, ) { + this._verifyTerminatedStateAccountsIDs(terminatedStateAccounts.map(a => a.chainID)); + // Sanity check to fulfill if-and-only-if situation - for (const terminatedStateAccount of terminatedStateAccounts) { + for (const terminatedStateAccountWithChainID of terminatedStateAccounts) { + this._verifyChainID(terminatedStateAccountWithChainID.chainID, mainchainID, 'stateAccount.'); + const correspondingChainInfo = chainInfos.find(chainInfo => - chainInfo.chainID.equals(terminatedStateAccount.chainID), + chainInfo.chainID.equals(terminatedStateAccountWithChainID.chainID), ); if ( !correspondingChainInfo || @@ -357,59 +373,30 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state.', ); } - } - for (const chainInfo of chainInfos) { - // For each entry chainInfo in chainInfos, chainInfo.chainData.status == CHAIN_STATUS_TERMINATED - // if and only if a corresponding entry (i.e., with chainID == chainInfo.chainID) exists in terminatedStateAccounts. - if (chainInfo.chainData.status === ChainStatus.TERMINATED) { - const correspondingTerminatedAccount = terminatedStateAccounts.find( - terminatedStateAccount => terminatedStateAccount.chainID.equals(chainInfo.chainID), - ); - if (!correspondingTerminatedAccount) { - throw new Error( - 'For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.', - ); - } - } - } - - this._verifyTerminatedStateAccountsCommon(terminatedStateAccounts, mainchainID); - - /** - * For each entry stateAccount in terminatedStateAccounts holds - * stateAccount.terminatedStateAccount.mainchainStateRoot == EMPTY_HASH, and stateAccount.terminatedStateAccount.initialized == True. - * - * Moreover, let chainInfo be the corresponding entry in chainInfos (i.e., with chainInfo.chainID == stateAccount.chainID); then it holds that - * stateAccount.terminatedStateAccount.stateRoot == chainInfo.chainData.lastCertificate.stateRoot. - */ - for (const terminatedStateAccountWithChainID of terminatedStateAccounts) { + const stateAccount = terminatedStateAccountWithChainID.terminatedStateAccount; + // For each entry stateAccount in terminatedStateAccounts holds + // stateAccount.stateRoot == chainData.lastCertificate.stateRoot, + // stateAccount.mainchainStateRoot == EMPTY_HASH, and + // stateAccount.initialized == True. + // Here chainData is the corresponding entry (i.e., with chainID == stateAccount.chainID) in chainInfos. if ( - !terminatedStateAccountWithChainID.terminatedStateAccount.mainchainStateRoot.equals( - EMPTY_HASH, - ) + !stateAccount.stateRoot.equals(correspondingChainInfo.chainData.lastCertificate.stateRoot) ) { throw new Error( - `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, + "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", ); } - if (!terminatedStateAccountWithChainID.terminatedStateAccount.initialized) { - throw new Error('stateAccount is not initialized.'); - } - - const correspondingChainInfo = chainInfos.find(chainInfo => - chainInfo.chainID.equals(terminatedStateAccountWithChainID.chainID), - ) as ChainInfo; // at this point, it's not undefined, since similar check already applied above - if ( - !terminatedStateAccountWithChainID.terminatedStateAccount.stateRoot.equals( - correspondingChainInfo.chainData.lastCertificate.stateRoot, - ) - ) { + if (!stateAccount.mainchainStateRoot.equals(EMPTY_HASH)) { throw new Error( - "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", + `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, ); } + + if (!stateAccount.initialized) { + throw new Error('stateAccount is not initialized.'); + } } } @@ -426,13 +413,8 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule } // terminatedOutboxAccounts is ordered lexicographically by outboxAccount.chainID - const sortedByChainID = [...terminatedOutboxAccounts].sort((a, b) => - a.chainID.compare(b.chainID), - ); - for (let i = 0; i < terminatedOutboxAccounts.length; i += 1) { - if (!terminatedOutboxAccounts[i].chainID.equals(sortedByChainID[i].chainID)) { - throw new Error('terminatedOutboxAccounts must be ordered lexicographically by chainID.'); - } + if (!objectUtils.isBufferArrayOrdered(chainIDs)) { + throw new Error('terminatedOutboxAccounts must be ordered lexicographically by chainID.'); } // Furthermore, an entry outboxAccount in terminatedOutboxAccounts must have a corresponding entry diff --git a/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts b/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts index 02a144988f9..c4319d01ce4 100644 --- a/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts +++ b/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts @@ -31,7 +31,7 @@ import { TerminatedStateAccount, TerminatedStateStore } from '../../stores/termi import { ChainAccount, StateRecoveryInitParams } from '../../types'; import { getMainchainID } from '../../utils'; import { SidechainInteroperabilityInternalMethod } from '../internal_method'; -import { InvalidSMTVerification } from '../../events/invalid_smt_verification'; +import { InvalidSMTVerificationEvent } from '../../events/invalid_smt_verification'; export class InitializeStateRecoveryCommand extends BaseInteroperabilityCommand { public schema = stateRecoveryInitParamsSchema; @@ -117,7 +117,7 @@ export class InitializeStateRecoveryCommand extends BaseInteroperabilityCommand< const verified = await smt.verifyInclusionProof(stateRoot, [queryKey], proofOfInclusion); if (!verified) { - this.events.get(InvalidSMTVerification).error(context); + this.events.get(InvalidSMTVerificationEvent).error(context); throw new Error('State recovery initialization proof of inclusion is not valid.'); } diff --git a/framework/src/modules/interoperability/sidechain/commands/register_mainchain.ts b/framework/src/modules/interoperability/sidechain/commands/register_mainchain.ts index 2e69ad3cd61..b924bcf9150 100644 --- a/framework/src/modules/interoperability/sidechain/commands/register_mainchain.ts +++ b/framework/src/modules/interoperability/sidechain/commands/register_mainchain.ts @@ -112,7 +112,7 @@ export class RegisterMainchainCommand extends BaseInteroperabilityCommand, ): Promise { + const { params } = context; + + // This call can throw error and fails a transaction + await this.verifyCertificateSignatureAndPartnerChainOutboxRoot(context); + const [decodedCCMs, ok] = await this.beforeCrossChainMessagesExecution(context, false); if (!ok) { return; } - const { params } = context; try { // Update the context to indicate that now we start the CCM processing. @@ -57,7 +62,9 @@ export class SubmitSidechainCrossChainUpdateCommand extends BaseCrossChainUpdate const ccmContext = { ...context, ccm, - eventQueue: context.eventQueue.getChildQueue(ccmID), + eventQueue: context.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ), }; await this.apply(ccmContext); @@ -67,11 +74,13 @@ export class SubmitSidechainCrossChainUpdateCommand extends BaseCrossChainUpdate // would refer to an inbox where the message has not been appended yet). await this.internalMethod.appendToInboxTree(context, params.sendingChainID, ccmBytes); } + } catch (error) { + panic(context.logger, error as Error); } finally { // Update the context to indicate that now we stop the CCM processing. context.contextStore.delete(CONTEXT_STORE_KEY_CCM_PROCESSING); } - await this.afterCrossChainMessagesExecution(context); + await this.afterCrossChainMessagesExecute(context); } } diff --git a/framework/src/modules/interoperability/sidechain/module.ts b/framework/src/modules/interoperability/sidechain/module.ts index bd554eccce8..08796d0c185 100644 --- a/framework/src/modules/interoperability/sidechain/module.ts +++ b/framework/src/modules/interoperability/sidechain/module.ts @@ -328,9 +328,11 @@ export class SidechainInteroperabilityModule extends BaseInteroperabilityModule terminatedStateAccounts: TerminatedStateAccountWithChainID[], mainchainID: Buffer, ) { - this._verifyTerminatedStateAccountsCommon(terminatedStateAccounts, mainchainID); + this._verifyTerminatedStateAccountsIDs(terminatedStateAccounts.map(a => a.chainID)); for (const stateAccount of terminatedStateAccounts) { + this._verifyChainID(stateAccount.chainID, mainchainID, 'stateAccount.'); + // and stateAccount.chainID != OWN_CHAIN_ID. if (stateAccount.chainID.equals(ctx.chainID)) { throw new Error(`stateAccount.chainID must not be equal to OWN_CHAIN_ID.`); diff --git a/framework/src/modules/interoperability/utils.ts b/framework/src/modules/interoperability/utils.ts index 478fba37db1..b4a43d7951f 100644 --- a/framework/src/modules/interoperability/utils.ts +++ b/framework/src/modules/interoperability/utils.ts @@ -22,7 +22,6 @@ import { CCMsg, ChainAccount, CrossChainUpdateTransactionParams, - ChainValidators, InboxUpdate, OutboxRootWitness, ActiveValidatorsUpdate, @@ -119,7 +118,7 @@ export const isInboxUpdateEmpty = (inboxUpdate: InboxUpdate) => export const isOutboxRootWitnessEmpty = (outboxRootWitness: OutboxRootWitness) => outboxRootWitness.siblingHashes.length === 0 || outboxRootWitness.bitmap.length === 0; -export const checkLivenessRequirementFirstCCU = ( +export const checkLivenessRequirement = ( partnerChainAccount: ChainAccount, txParams: CrossChainUpdateTransactionParams, ): VerificationResult => { @@ -142,39 +141,6 @@ export const checkLivenessRequirementFirstCCU = ( }; }; -export const checkCertificateValidity = ( - partnerChainAccount: ChainAccount, - encodedCertificate: Buffer, -): VerificationResult => { - if (encodedCertificate.equals(EMPTY_BYTES)) { - return { - status: VerifyStatus.OK, - }; - } - - const certificate = codec.decode(certificateSchema, encodedCertificate); - try { - validator.validate(certificateSchema, certificate); - } catch (err) { - return { - status: VerifyStatus.FAIL, - error: new Error('Certificate is missing required values.'), - }; - } - - // Last certificate height should be less than new certificate height - if (partnerChainAccount.lastCertificate.height >= certificate.height) { - return { - status: VerifyStatus.FAIL, - error: new Error('Certificate height should be greater than last certificate height.'), - }; - } - - return { - status: VerifyStatus.OK, - }; -}; - export const checkCertificateTimestamp = ( txParams: CrossChainUpdateTransactionParams, certificate: Certificate, @@ -190,60 +156,6 @@ export const checkCertificateTimestamp = ( } }; -export const checkValidatorsHashWithCertificate = ( - txParams: CrossChainUpdateTransactionParams, - partnerValidators: ChainValidators, -): VerificationResult => { - if ( - !emptyActiveValidatorsUpdate(txParams.activeValidatorsUpdate) || - txParams.certificateThreshold > BigInt(0) - ) { - if (txParams.certificate.equals(EMPTY_BYTES)) { - return { - status: VerifyStatus.FAIL, - error: new Error( - 'Certificate cannot be empty when activeValidatorsUpdate or certificateThreshold has a non-empty value.', - ), - }; - } - let certificate: Certificate; - try { - certificate = codec.decode(certificateSchema, txParams.certificate); - validator.validate(certificateSchema, certificate); - } catch (error) { - return { - status: VerifyStatus.FAIL, - error: new Error( - 'Certificate should have all required values when activeValidatorsUpdate or certificateThreshold has a non-empty value.', - ), - }; - } - - const newActiveValidators = calculateNewActiveValidators( - partnerValidators.activeValidators, - txParams.activeValidatorsUpdate.blsKeysUpdate, - txParams.activeValidatorsUpdate.bftWeightsUpdate, - txParams.activeValidatorsUpdate.bftWeightsUpdateBitmap, - ); - - const validatorsHash = computeValidatorsHash( - newActiveValidators, - txParams.certificateThreshold || partnerValidators.certificateThreshold, - ); - - if (!certificate.validatorsHash.equals(validatorsHash)) { - return { - status: VerifyStatus.FAIL, - error: new Error('Validators hash given in the certificate is incorrect.'), - }; - } - } - - return { - status: VerifyStatus.OK, - }; -}; - export const chainAccountToJSON = (chainAccount: ChainAccount) => { const { lastCertificate, name, status } = chainAccount; diff --git a/framework/src/modules/pos/constants.ts b/framework/src/modules/pos/constants.ts index 783f0c3795d..6ec10218510 100644 --- a/framework/src/modules/pos/constants.ts +++ b/framework/src/modules/pos/constants.ts @@ -15,10 +15,10 @@ export const MODULE_NAME_POS = 'pos'; export const COMMAND_NAME_VALIDATOR_REGISTRATION = 'registerValidator'; -export const LOCKING_PERIOD_STAKING = 26_000; -export const LOCKING_PERIOD_SELF_STAKING = 260_000; -export const PUNISHMENT_WINDOW_STAKING = 241_920; -export const PUNISHMENT_WINDOW_SELF_STAKING = 725_760; +export const LOCKING_PERIOD_STAKING = 25_920; // (3 * 24 * 3600 // BLOCK_TIME) +export const LOCKING_PERIOD_SELF_STAKING = 241_920; // (28 * 24 * 3600 // BLOCK_TIME) +export const PUNISHMENT_WINDOW_STAKING = 241_920; // (28 * 24 * 3600 // BLOCK_TIME) +export const PUNISHMENT_WINDOW_SELF_STAKING = 725_760; // (725,760 = 3 * PUNISHMENT_WINDOW_STAKING) export const MAX_LENGTH_NAME = 20; export const BASE_STAKE_AMOUNT = BigInt(10) * BigInt(10) ** BigInt(8); export const MAX_NUMBER_SENT_STAKES = 10; @@ -35,11 +35,11 @@ export const LOCAL_ID_LENGTH = 4; export const TOKEN_ID_LENGTH = CHAIN_ID_LENGTH + LOCAL_ID_LENGTH; export const MAX_NUMBER_BYTES_Q96 = 24; export const COMMISSION = 10000; -export const COMMISSION_INCREASE_PERIOD = 260_000; +export const COMMISSION_INCREASE_PERIOD = 241_920; // (28 * 24 * 3600 // BLOCK_TIME) export const MAX_COMMISSION_INCREASE_RATE = 500; // MAX_COMMISSION_INCREASE in LIP-0063 export const FACTOR_SELF_STAKES = 10; // FACTOR_SELF_STAKING in LIP-0063 const FAIL_SAFE_MISSED_BLOCKS = 50; -const FAIL_SAFE_INACTIVE_WINDOW = 260_000; // 120960 in LIP-0063 +const FAIL_SAFE_INACTIVE_WINDOW = 120_960; // (14 * 24 * 3600 // BLOCK_TIME) const MAX_BFT_WEIGHT_CAP = 500; // 1000 in LIP-0063 const USE_INVALID_BLS_KEY = false; // true in LIP-0063 const NUMBER_ACTIVE_VALIDATORS = 101; diff --git a/framework/src/modules/pos/endpoint.ts b/framework/src/modules/pos/endpoint.ts index 30ffa60ce54..3a9d95793ea 100644 --- a/framework/src/modules/pos/endpoint.ts +++ b/framework/src/modules/pos/endpoint.ts @@ -36,16 +36,16 @@ import { GetUnlockHeightResponse, GetValidatorsByStakeRequest, ModuleConfig, - ModuleConfigJSON, TokenMethod, StakerData, StakerDataJSON, GetExpectedSharedRewardsRequest, PunishmentLockingPeriods, + GetConstantsResponse, } from './types'; import { getPunishTime, getWaitTime, isCertificateGenerated, calculateStakeRewards } from './utils'; import { GenesisDataStore } from './stores/genesis'; -import { EMPTY_KEY } from './constants'; +import { COMMISSION, EMPTY_KEY } from './constants'; import { EligibleValidator, EligibleValidatorsStore } from './stores/eligible_validators'; import { getClaimableRewardsRequestSchema, @@ -154,7 +154,7 @@ export class PoSEndpoint extends BaseEndpoint { } // eslint-disable-next-line @typescript-eslint/require-await - public async getConstants(): Promise { + public async getConstants(): Promise { return { factorSelfStakes: this._moduleConfig.factorSelfStakes, maxLengthName: this._moduleConfig.maxLengthName, @@ -180,6 +180,7 @@ export class PoSEndpoint extends BaseEndpoint { reportMisbehaviorReward: this._moduleConfig.reportMisbehaviorReward.toString(), reportMisbehaviorLimitBanned: this._moduleConfig.reportMisbehaviorLimitBanned, weightScaleFactor: this._moduleConfig.weightScaleFactor.toString(), + defaultCommission: COMMISSION, }; } diff --git a/framework/src/modules/pos/module.ts b/framework/src/modules/pos/module.ts index 70c6c16eb82..3feb7bdd462 100644 --- a/framework/src/modules/pos/module.ts +++ b/framework/src/modules/pos/module.ts @@ -55,6 +55,7 @@ import { getRegistrationFeeResponseSchema, getExpectedSharedRewardsRequestSchema, getExpectedSharedRewardsResponseSchema, + getConstantsResponseSchema, } from './schemas'; import { RandomMethod, @@ -206,7 +207,7 @@ export class PoSModule extends BaseModule { }, { name: this.endpoint.getConstants.name, - response: configSchema, + response: getConstantsResponseSchema, }, { name: this.endpoint.getPoSTokenID.name, diff --git a/framework/src/modules/pos/schemas.ts b/framework/src/modules/pos/schemas.ts index 04c556b5e44..38622d3c589 100644 --- a/framework/src/modules/pos/schemas.ts +++ b/framework/src/modules/pos/schemas.ts @@ -545,6 +545,19 @@ export const getValidatorResponseSchema = { ...validatorJSONSchema, }; +export const getConstantsResponseSchema = { + $id: 'modules/pos/endpoint/getConstantsResponseSchema', + type: 'object', + properties: { + ...configSchema.properties, + defaultCommission: { + type: 'integer', + format: 'uint32', + }, + }, + required: [...configSchema.required, 'defaultCommission'], +}; + export const getStakerRequestSchema = getValidatorRequestSchema; export const getStakerResponseSchema = { diff --git a/framework/src/modules/pos/types.ts b/framework/src/modules/pos/types.ts index 596238e19c8..68166637ba2 100644 --- a/framework/src/modules/pos/types.ts +++ b/framework/src/modules/pos/types.ts @@ -50,6 +50,8 @@ export interface ModuleConfig { export type ModuleConfigJSON = JSONObject; +export type GetConstantsResponse = JSONObject; + export interface PunishmentLockingPeriods { punishmentWindowStaking: number; punishmentWindowSelfStaking: number; diff --git a/framework/src/modules/token/cc_method.ts b/framework/src/modules/token/cc_method.ts index 5ceff2c38ef..ae48543c884 100644 --- a/framework/src/modules/token/cc_method.ts +++ b/framework/src/modules/token/cc_method.ts @@ -12,6 +12,7 @@ * Removal or modification of this copyright notice is prohibited. */ +import { validator } from '@liskhq/lisk-validator'; import { codec } from '@liskhq/lisk-codec'; import { BaseCCMethod } from '../interoperability/base_cc_method'; import { @@ -200,6 +201,7 @@ export class TokenInteroperableMethod extends BaseCCMethod { try { account = codec.decode(userStoreSchema, ctx.storeValue); + validator.validate(userStoreSchema, account); } catch (error) { this.events .get(RecoverEvent) diff --git a/framework/src/modules/token/module.ts b/framework/src/modules/token/module.ts index 2cab61f7681..2912177ae3d 100644 --- a/framework/src/modules/token/module.ts +++ b/framework/src/modules/token/module.ts @@ -286,12 +286,6 @@ export class TokenModule extends BaseInteroperableModule { )} has duplicate module in locked balances.`, ); } - // Validate userSubstore not to be empty - if (userData.lockedBalances.length === 0 && userData.availableBalance === BigInt(0)) { - throw new Error( - `Address ${address.getLisk32AddressFromAddress(userData.address)} has empty data.`, - ); - } await userStore.save(context, userData.address, userData.tokenID, userData); } diff --git a/framework/src/state_machine/constants.ts b/framework/src/state_machine/constants.ts index 854eca30ab6..4604e7da35a 100644 --- a/framework/src/state_machine/constants.ts +++ b/framework/src/state_machine/constants.ts @@ -19,3 +19,4 @@ export const EVENT_INDEX_BEFORE_TRANSACTIONS = Buffer.from([2]); export const EVENT_INDEX_AFTER_TRANSACTIONS = Buffer.from([3]); export const EVENT_TRANSACTION_NAME = 'commandExecutionResult'; +export const EVENT_TOPIC_TRANSACTION_EXECUTION = Buffer.from([4]); diff --git a/framework/src/state_machine/event_queue.ts b/framework/src/state_machine/event_queue.ts index a498fb07576..2e3408307c6 100644 --- a/framework/src/state_machine/event_queue.ts +++ b/framework/src/state_machine/event_queue.ts @@ -74,8 +74,7 @@ export class EventQueue { } public getChildQueue(topicID: Buffer): EventQueue { - const allTopics = [...this._defaultTopics, topicID]; - return new EventQueue(this._height, this._events, allTopics); + return new EventQueue(this._height, this._events, [topicID]); } public createSnapshot(): number { diff --git a/framework/src/state_machine/state_machine.ts b/framework/src/state_machine/state_machine.ts index a91ad8f6709..d8a5b7350c8 100644 --- a/framework/src/state_machine/state_machine.ts +++ b/framework/src/state_machine/state_machine.ts @@ -24,7 +24,7 @@ import { GenerationContext } from './generator_context'; import { GenesisBlockContext } from './genesis_block_context'; import { TransactionContext } from './transaction_context'; import { VerifyStatus, VerificationResult } from './types'; -import { EVENT_TRANSACTION_NAME } from './constants'; +import { EVENT_TOPIC_TRANSACTION_EXECUTION, EVENT_TRANSACTION_NAME } from './constants'; export class StateMachine { private readonly _modules: BaseModule[] = []; @@ -218,7 +218,7 @@ export class StateMachine { ctx.transaction.module, EVENT_TRANSACTION_NAME, codec.encode(standardEventDataSchema, { success: status === TransactionExecutionResult.OK }), - [ctx.transaction.id], + [Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, ctx.transaction.id])], ); return status; diff --git a/framework/src/state_machine/transaction_context.ts b/framework/src/state_machine/transaction_context.ts index 8b69a08f610..2f888e81dbb 100644 --- a/framework/src/state_machine/transaction_context.ts +++ b/framework/src/state_machine/transaction_context.ts @@ -26,6 +26,7 @@ import { BlockHeader, BlockAssets, } from './types'; +import { EVENT_TOPIC_TRANSACTION_EXECUTION } from './constants'; interface ContextParams { chainID: Buffer; @@ -77,7 +78,9 @@ export class TransactionContext { if (!this._assets) { throw new Error('Transaction Execution requires block assets in the context.'); } - const childQueue = this._eventQueue.getChildQueue(this._transaction.id); + const childQueue = this._eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, this._transaction.id]), + ); return { logger: this._logger, chainID: this._chainID, @@ -132,7 +135,9 @@ export class TransactionContext { if (!this._assets) { throw new Error('Transaction Execution requires block assets in the context.'); } - const childQueue = this._eventQueue.getChildQueue(this._transaction.id); + const childQueue = this._eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, this._transaction.id]), + ); return { logger: this._logger, chainID: this._chainID, diff --git a/framework/src/testing/block_processing_env.ts b/framework/src/testing/block_processing_env.ts index 51ba1033202..3e2530a4522 100644 --- a/framework/src/testing/block_processing_env.ts +++ b/framework/src/testing/block_processing_env.ts @@ -60,7 +60,7 @@ import { systemDirs } from '../system_dirs'; import { PrefixedStateReadWriter } from '../state_machine/prefixed_state_read_writer'; import { createLogger } from '../logger'; import { MainchainInteroperabilityModule } from '../modules/interoperability'; -import { DynamicRewardModule } from '../modules/dynamic_rewards'; +import { DynamicRewardModule } from '../modules/dynamic_reward'; type Options = { genesis?: GenesisConfig; diff --git a/framework/src/utils/panic.ts b/framework/src/utils/panic.ts new file mode 100644 index 00000000000..6162ac8bc12 --- /dev/null +++ b/framework/src/utils/panic.ts @@ -0,0 +1,23 @@ +/* + * Copyright © 2023 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ + +import { Logger } from '../logger'; + +export const panic = (logger: Logger, error?: Error): void => { + logger.fatal( + { error: error ?? new Error('Something unexpected happened') }, + 'Raising panic and shutting down the application', + ); + process.exit(1); +}; diff --git a/framework/test/integration/node/processor/pos.spec.ts b/framework/test/integration/node/processor/pos.spec.ts index 499fc84a7c0..6b29fbfef6b 100644 --- a/framework/test/integration/node/processor/pos.spec.ts +++ b/framework/test/integration/node/processor/pos.spec.ts @@ -26,6 +26,7 @@ import * as testing from '../../../../src/testing'; import { defaultConfig } from '../../../../src/modules/token/constants'; import { ValidatorAccountJSON } from '../../../../src/modules/pos/stores/validator'; import { StakerDataJSON } from '../../../../src/modules/pos/types'; +import { EVENT_TOPIC_TRANSACTION_EXECUTION } from '../../../../src/state_machine/constants'; describe('PoS and reward', () => { let processEnv: testing.BlockProcessingEnv; @@ -96,10 +97,10 @@ describe('PoS and reward', () => { const events = await processEnv.getEvents(newBlock.header.height); expect(events.find(e => e.name === 'generatorKeyRegistration')?.topics[0]).toEqual( - registrationTx.id.toString('hex'), + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, registrationTx.id]).toString('hex'), ); expect(events.find(e => e.name === 'blsKeyRegistration')?.topics[0]).toEqual( - registrationTx.id.toString('hex'), + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, registrationTx.id]).toString('hex'), ); expect(events.find(e => e.name === 'commandExecutionResult')).toHaveProperty('data', '0801'); @@ -140,10 +141,10 @@ describe('PoS and reward', () => { const events = await processEnv.getEvents(newBlock.header.height); expect(events.find(e => e.name === 'generatorKeyRegistration')?.topics[0]).toEqual( - registrationTx.id.toString('hex'), + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, registrationTx.id]).toString('hex'), ); expect(events.find(e => e.name === 'blsKeyRegistration')?.topics[0]).toEqual( - registrationTx.id.toString('hex'), + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, registrationTx.id]).toString('hex'), ); expect(events.find(e => e.name === 'commandExecutionResult')).toHaveProperty('data', '0801'); diff --git a/framework/test/unit/controller/controller.spec.ts b/framework/test/unit/controller/controller.spec.ts index 5f7ec21af28..c9161dfa3dd 100644 --- a/framework/test/unit/controller/controller.spec.ts +++ b/framework/test/unit/controller/controller.spec.ts @@ -235,6 +235,12 @@ describe('Controller Class', () => { jest.spyOn(InMemoryChannel.prototype, 'publish'); await controller.start(); childProcessMock.connected = true; + childProcessMock.on.mockImplementationOnce((_name, cb) => { + cb({ action: 'loaded' }); + }); + childProcessMock.on.mockImplementationOnce((_name, cb) => { + cb({ action: 'unloaded' }); + }); }); describe('unload in-memory plugins', () => { diff --git a/framework/test/unit/engine/bft/method.spec.ts b/framework/test/unit/engine/bft/method.spec.ts index fccde45f312..f67ac46b7f2 100644 --- a/framework/test/unit/engine/bft/method.spec.ts +++ b/framework/test/unit/engine/bft/method.spec.ts @@ -706,10 +706,32 @@ describe('BFT Method', () => { ], validatorsHash: utils.getRandomBytes(32), }); + const generatorAddress = utils.getRandomBytes(20); const params20 = createParam(); const params30 = createParam(); + const validators = [ + { + address: generatorAddress, + bftWeight: BigInt(50), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + { + address: utils.getRandomBytes(20), + bftWeight: BigInt(50), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + { + address: utils.getRandomBytes(20), + bftWeight: BigInt(3), + blsKey: utils.getRandomBytes(48), + generatorKey: utils.getRandomBytes(32), + }, + ]; + beforeEach(async () => { validatorsMethod.getValidatorKeys.mockResolvedValue({ blsKey: utils.getRandomBytes(32) }); stateStore = new StateStore(new InMemoryDatabase()); @@ -789,45 +811,45 @@ describe('BFT Method', () => { }); it('should throw when validator addresses are not unique', async () => { - const validators = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ + const validatorsAddressNotUnique = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ address: utils.getRandomBytes(20), bftWeight: BigInt(1), blsKey: utils.getRandomBytes(48), generatorKey: utils.getRandomBytes(32), })); - validators[8].address = validators[12].address; + validatorsAddressNotUnique[8].address = validatorsAddressNotUnique[12].address; await expect( - bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validators), + bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validatorsAddressNotUnique), ).rejects.toThrow('Provided validator addresses are not unique.'); }); it('should throw when validator BLS keys are not unique', async () => { - const validators = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ + const validatorsBLSKeysNotUnique = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ address: utils.getRandomBytes(20), bftWeight: BigInt(1), blsKey: utils.getRandomBytes(48), generatorKey: utils.getRandomBytes(32), })); - validators[13].blsKey = validators[7].blsKey; + validatorsBLSKeysNotUnique[13].blsKey = validatorsBLSKeysNotUnique[7].blsKey; await expect( - bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validators), + bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validatorsBLSKeysNotUnique), ).rejects.toThrow('Provided validator BLS keys are not unique.'); }); it('should not throw when validator BLS keys are not unique only with invalid keys', async () => { - const validators = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ + const validatorsInvalidBLSKeys = new Array(bftMethod['_batchSize']).fill(0).map(() => ({ address: utils.getRandomBytes(20), bftWeight: BigInt(1), blsKey: utils.getRandomBytes(48), generatorKey: utils.getRandomBytes(32), })); - validators[7].blsKey = Buffer.alloc(48, 0); - validators[13].blsKey = Buffer.alloc(48, 0); + validatorsInvalidBLSKeys[7].blsKey = Buffer.alloc(48, 0); + validatorsInvalidBLSKeys[13].blsKey = Buffer.alloc(48, 0); await expect( - bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validators), + bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validatorsInvalidBLSKeys), ).not.toReject(); }); @@ -849,147 +871,60 @@ describe('BFT Method', () => { it('should throw when less than 1/3 of aggregateBFTWeight for precommitThreshold is given', async () => { await expect( - bftMethod.setBFTParameters(stateStore, BigInt(34), BigInt(68), [ - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - ]), + bftMethod.setBFTParameters(stateStore, BigInt(34), BigInt(68), validators), ).rejects.toThrow('Invalid precommitThreshold input.'); }); it('should throw when precommitThreshold is given is greater than aggregateBFTWeight', async () => { await expect( - bftMethod.setBFTParameters(stateStore, BigInt(104), BigInt(68), [ - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - ]), + bftMethod.setBFTParameters(stateStore, BigInt(104), BigInt(68), validators), ).rejects.toThrow('Invalid precommitThreshold input.'); }); it('should throw when less than 1/3 of aggregateBFTWeight for certificateThreshold is given', async () => { await expect( - bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(34), [ - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - ]), + bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(34), validators), ).rejects.toThrow('Invalid certificateThreshold input.'); }); it('should throw when certificateThreshold is given is greater than aggregateBFTWeight', async () => { await expect( - bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(104), [ - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - ]), + bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(104), validators), ).rejects.toThrow('Invalid certificateThreshold input.'); }); describe('when setBFTParameters is successful', () => { - const validators = [ - { - address: generatorAddress, - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), - }, - ]; - beforeEach(async () => { - await bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), validators); + const precommitThreshold = BigInt(68); + const certificateThreshold = BigInt(68); + + let bftParamsStore: StateStore; + let votesStore: StateStore; + + beforeEach(() => { + bftParamsStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_PARAMETERS); + votesStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_VOTES); }); it('should store validators in order of the input', async () => { - const paramsStore = stateStore.getStore( - MODULE_STORE_PREFIX_BFT, - STORE_PREFIX_BFT_PARAMETERS, + const shuffledValidators = [...validators]; + + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, ); - const params = await paramsStore.getWithSchema( + + const bftParams = await bftParamsStore.getWithSchema( utils.intToBuffer(104, 4), bftParametersSchema, ); - expect(params.validators).toHaveLength(3); - expect(params.validators[0].address).toEqual(validators[0].address); - expect(params.validators[1].address).toEqual(validators[1].address); - expect(params.validators[2].address).toEqual(validators[2].address); + expect(bftParams.validators).toHaveLength(3); + expect(bftParams.validators).toEqual(shuffledValidators); }); it('should store BFT parameters with height maxHeightPrevoted + 1 if blockBFTInfo does not exist', async () => { - const votesStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_VOTES); await votesStore.setWithSchema( EMPTY_KEY, { @@ -1008,60 +943,60 @@ describe('BFT Method', () => { bftVotesSchema, ); - await bftMethod.setBFTParameters(stateStore, BigInt(68), BigInt(68), [ - { - address: generatorAddress, - bftWeight: BigInt(50), - generatorKey: utils.getRandomBytes(32), - blsKey: utils.getRandomBytes(48), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(50), - generatorKey: utils.getRandomBytes(32), - blsKey: utils.getRandomBytes(48), - }, - { - address: utils.getRandomBytes(20), - bftWeight: BigInt(3), - generatorKey: utils.getRandomBytes(32), - blsKey: utils.getRandomBytes(48), - }, - ]); - - const paramsStore = stateStore.getStore( - MODULE_STORE_PREFIX_BFT, - STORE_PREFIX_BFT_PARAMETERS, + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, ); + await expect( - paramsStore.getWithSchema(utils.intToBuffer(11, 4), bftParametersSchema), + bftParamsStore.getWithSchema( + utils.intToBuffer(11, 4), + bftParametersSchema, + ), ).toResolve(); }); it('should store BFT parameters with height latest blockBFTInfo + 1', async () => { - const paramsStore = stateStore.getStore( - MODULE_STORE_PREFIX_BFT, - STORE_PREFIX_BFT_PARAMETERS, + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, ); + await expect( - paramsStore.getWithSchema(utils.intToBuffer(104, 4), bftParametersSchema), + bftParamsStore.getWithSchema( + utils.intToBuffer(104, 4), + bftParametersSchema, + ), ).toResolve(); }); it('should store new validators hash', async () => { - const paramsStore = stateStore.getStore( - MODULE_STORE_PREFIX_BFT, - STORE_PREFIX_BFT_PARAMETERS, + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, ); - const params = await paramsStore.getWithSchema( + + const bftParams = await bftParamsStore.getWithSchema( utils.intToBuffer(104, 4), bftParametersSchema, ); - expect(params.validatorsHash).not.toEqual(params30.validatorsHash); + expect(bftParams.validatorsHash).not.toEqual(params30.validatorsHash); }); it('should not update existing validators on bft votes', async () => { - const votesStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_VOTES); + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, + ); + const voteState = await votesStore.getWithSchema(EMPTY_KEY, bftVotesSchema); expect( voteState.activeValidatorsVoteInfo.find(v => v.address.equals(generatorAddress)), @@ -1073,7 +1008,13 @@ describe('BFT Method', () => { }); it('should insert new validators into active validators with initial values', async () => { - const votesStore = stateStore.getStore(MODULE_STORE_PREFIX_BFT, STORE_PREFIX_BFT_VOTES); + await bftMethod.setBFTParameters( + stateStore, + precommitThreshold, + certificateThreshold, + validators, + ); + const voteState = await votesStore.getWithSchema(EMPTY_KEY, bftVotesSchema); expect(voteState.activeValidatorsVoteInfo).toHaveLength(3); expect( @@ -1087,45 +1028,25 @@ describe('BFT Method', () => { }); }); - describe('validatorsHash', () => { - it('should sort validators ordered lexicographically by blsKey and include certificateThreshold', () => { - const accounts = [ + describe('computeValidatorsHash', () => { + it('should calculate correct validators hash', () => { + const activeValidators = [ { - address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), bftWeight: BigInt(20), }, { - address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48), - generatorKey: utils.getRandomBytes(32), bftWeight: BigInt(20), }, ]; - validatorsMethod.getValidatorKeys.mockImplementation((_, address: Buffer) => { - return { blsKey: accounts.find(k => k.address.equals(address))?.blsKey }; - }); - - const validatorsHash = computeValidatorsHash(accounts, BigInt(99)); + const certificateThreshold = BigInt(99); - const sortedAccounts = [...accounts]; + const validatorsHash = computeValidatorsHash(activeValidators, certificateThreshold); expect(validatorsHash).toEqual( utils.hash( - codec.encode(validatorsHashInputSchema, { - activeValidators: [ - { - blsKey: sortedAccounts[0].blsKey, - bftWeight: sortedAccounts[0].bftWeight, - }, - { - blsKey: sortedAccounts[1].blsKey, - bftWeight: sortedAccounts[1].bftWeight, - }, - ], - certificateThreshold: BigInt(99), - }), + codec.encode(validatorsHashInputSchema, { activeValidators, certificateThreshold }), ), ); }); diff --git a/framework/test/unit/engine/consensus/certificate_generation/commit_list.spec.ts b/framework/test/unit/engine/consensus/certificate_generation/commit_list.spec.ts index 2be80a2e41c..003f8253585 100644 --- a/framework/test/unit/engine/consensus/certificate_generation/commit_list.spec.ts +++ b/framework/test/unit/engine/consensus/certificate_generation/commit_list.spec.ts @@ -223,5 +223,25 @@ describe('CommitList', () => { // Assert expect(commitList.exists(singleSampleCommit)).toBeFalse(); }); + + it('should not delete anything if the height does not exist', () => { + const originalSize = commitList.size(); + + commitList.deleteSingle({ + ...singleSampleCommit, + height: 9999, + }); + expect(commitList.size()).toBe(originalSize); + }); + + it('should not delete anything if commit does not exist', () => { + const originalSize = commitList.size(); + + commitList.deleteSingle({ + ...singleSampleCommit, + validatorAddress: utils.getRandomBytes(20), + }); + expect(commitList.size()).toBe(originalSize); + }); }); }); diff --git a/framework/test/unit/engine/consensus/certificate_generation/commit_pool.spec.ts b/framework/test/unit/engine/consensus/certificate_generation/commit_pool.spec.ts index 2b6d26f41ef..130c712ea04 100644 --- a/framework/test/unit/engine/consensus/certificate_generation/commit_pool.spec.ts +++ b/framework/test/unit/engine/consensus/certificate_generation/commit_pool.spec.ts @@ -1062,7 +1062,7 @@ describe('CommitPool', () => { it.todo(''); }); - describe('_getMaxRemovalHeight', () => { + describe('getMaxRemovalHeight', () => { let blockHeader: BlockHeader; const finalizedHeight = 1010; @@ -1085,14 +1085,54 @@ describe('CommitPool', () => { when(getBlockHeaderByHeight).calledWith(finalizedHeight).mockReturnValue(blockHeader); }); it('should return successfully for an existing block header at finalizedHeight', async () => { - const maxRemovalHeight = await commitPool['_getMaxRemovalHeight'](); + const maxRemovalHeight = await commitPool.getMaxRemovalHeight(); expect(maxRemovalHeight).toBe(blockHeader.aggregateCommit.height); }); it('should throw an error for non-existent block header at finalizedHeight', async () => { chain.finalizedHeight = finalizedHeight + 1; - await expect(commitPool['_getMaxRemovalHeight']()).rejects.toThrow(NotFoundError); + await expect(commitPool.getMaxRemovalHeight()).rejects.toThrow(NotFoundError); + }); + + it('should return minCertifyHeight -1 if the finalizedBlock.aggregateCommit.height is smaller', async () => { + const finalizedBlockHeader = createFakeBlockHeader({ + height: 25520, + timestamp: finalizedHeight * 10, + aggregateCommit: { + aggregationBits: Buffer.alloc(0), + certificateSignature: Buffer.alloc(0), + height: 500, + }, + }); + when(getBlockHeaderByHeight) + .calledWith(finalizedHeight) + .mockReturnValue(finalizedBlockHeader); + const minimumCertifyHeight = 25519; + (commitPool as any)['_minCertifyHeight'] = minimumCertifyHeight; + + await expect(commitPool.getMaxRemovalHeight()).resolves.toEqual(minimumCertifyHeight - 1); + }); + + it('should return finalizedBlock.aggregateCommit.height if the minCertifyHeight - 1 is smaller', async () => { + const finalizedBlockHeader = createFakeBlockHeader({ + height: 25520, + timestamp: finalizedHeight * 10, + aggregateCommit: { + aggregationBits: Buffer.alloc(0), + certificateSignature: Buffer.alloc(0), + height: 25520, + }, + }); + when(getBlockHeaderByHeight) + .calledWith(finalizedHeight) + .mockReturnValue(finalizedBlockHeader); + const minimumCertifyHeight = 25519; + (commitPool as any)['_minCertifyHeight'] = minimumCertifyHeight; + + await expect(commitPool.getMaxRemovalHeight()).resolves.toEqual( + finalizedBlockHeader.aggregateCommit.height, + ); }); }); diff --git a/framework/test/unit/engine/consensus/consensus.spec.ts b/framework/test/unit/engine/consensus/consensus.spec.ts index 698f90e945e..2ca4d8b2331 100644 --- a/framework/test/unit/engine/consensus/consensus.spec.ts +++ b/framework/test/unit/engine/consensus/consensus.spec.ts @@ -73,6 +73,7 @@ describe('consensus', () => { dataAccess: { getBlockHeaderByHeight: jest.fn().mockResolvedValue(lastBlock.header), }, + genesisHeight: 0, } as unknown as Chain; network = { registerEndpoint: jest.fn(), diff --git a/framework/test/unit/engine/generator/endpoint.spec.ts b/framework/test/unit/engine/generator/endpoint.spec.ts index 774c4db0673..36df0b60c1b 100644 --- a/framework/test/unit/engine/generator/endpoint.spec.ts +++ b/framework/test/unit/engine/generator/endpoint.spec.ts @@ -34,6 +34,7 @@ import { previouslyGeneratedInfoSchema, } from '../../../../src/engine/generator/schemas'; import { fakeLogger } from '../../../utils/mocks'; +import { SingleCommitHandler } from '../../../../src/engine/generator/single_commit_handler'; describe('generator endpoint', () => { const logger: Logger = fakeLogger; @@ -104,6 +105,9 @@ describe('generator endpoint', () => { endpoint.init({ generatorDB: db, genesisHeight: 0, + singleCommitHandler: { + initSingleCommits: jest.fn(), + } as unknown as SingleCommitHandler, }); }); @@ -233,6 +237,27 @@ describe('generator endpoint', () => { expect(endpoint['_keypairs'].has(defaultEncryptedKeys.address)).toBeTrue(); }); + it('should create single commits for the address', async () => { + await expect( + endpoint.updateStatus({ + logger, + params: { + address: address.getLisk32AddressFromAddress(defaultEncryptedKeys.address), + enable: true, + password: defaultPassword, + ...bftProps, + }, + chainID, + }), + ).resolves.toEqual({ + address: address.getLisk32AddressFromAddress(defaultEncryptedKeys.address), + enabled: true, + }); + expect(endpoint['_singleCommitHandler'].initSingleCommits).toHaveBeenCalledWith( + defaultEncryptedKeys.address, + ); + }); + it('should accept if BFT properties specified are zero and there is no previous values', async () => { await db.del(Buffer.concat([GENERATOR_STORE_INFO_PREFIX, defaultEncryptedKeys.address])); await expect( @@ -403,6 +428,8 @@ describe('generator endpoint', () => { }); expect(resp.status).toHaveLength(2); expect(resp.status[0].address).not.toBeInstanceOf(Buffer); + expect(resp.status[0].blsKey).toBeString(); + expect(resp.status[0].generatorKey).toBeString(); }); }); diff --git a/framework/test/unit/engine/generator/generator.spec.ts b/framework/test/unit/engine/generator/generator.spec.ts index 0f7fc545188..1bf2a70d45d 100644 --- a/framework/test/unit/engine/generator/generator.spec.ts +++ b/framework/test/unit/engine/generator/generator.spec.ts @@ -14,11 +14,9 @@ import * as fs from 'fs'; import { EventEmitter } from 'events'; import { BlockAssets, Chain, Transaction } from '@liskhq/lisk-chain'; -import { bls, utils, address as cryptoAddress, legacy } from '@liskhq/lisk-cryptography'; +import { utils, address as cryptoAddress } from '@liskhq/lisk-cryptography'; import { InMemoryDatabase, Database } from '@liskhq/lisk-db'; import { codec } from '@liskhq/lisk-codec'; -import { when } from 'jest-when'; -import { Mnemonic } from '@liskhq/lisk-passphrase'; import { Generator } from '../../../../src/engine/generator'; import { Consensus } from '../../../../src/engine/generator/types'; import { Network } from '../../../../src/engine/network'; @@ -34,11 +32,12 @@ import { plainGeneratorKeysSchema, } from '../../../../src/engine/generator/schemas'; import { BFTModule } from '../../../../src/engine/bft'; -import { createFakeBlockHeader } from '../../../../src/testing'; import { ABI } from '../../../../src/abi'; import { defaultConfig } from '../../../../src/testing/fixtures'; import { testing } from '../../../../src'; import { GeneratorStore } from '../../../../src/engine/generator/generator_store'; +import { CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED } from '../../../../src/engine/consensus/constants'; +import { SingleCommitHandler } from '../../../../src/engine/generator/single_commit_handler'; describe('generator', () => { const logger = fakeLogger; @@ -189,6 +188,7 @@ describe('generator', () => { }), ); } + jest.spyOn(SingleCommitHandler.prototype, 'initAllSingleCommits'); }); it('should load all 101 validators', async () => { @@ -202,7 +202,6 @@ describe('generator', () => { }); it('should handle finalized height change between maxRemovalHeight and max height precommitted', async () => { - jest.spyOn(generator, '_handleFinalizedHeightChanged' as any).mockReturnValue([] as never); jest .spyOn(generator['_bft'].method, 'getBFTHeights') .mockResolvedValue({ maxHeightPrecommitted: 515, maxHeightCertified: 313 } as never); @@ -214,7 +213,7 @@ describe('generator', () => { logger, genesisHeight: 0, }); - expect(generator['_handleFinalizedHeightChanged']).toHaveBeenCalledWith(200, 515); + expect(generator['_singleCommitHandler'].initAllSingleCommits).toHaveBeenCalled(); }); }); @@ -640,173 +639,32 @@ describe('generator', () => { }); describe('events CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED', () => { - const passphrase = Mnemonic.generateMnemonic(256); - const keys = legacy.getPrivateAndPublicKeyFromPassphrase(passphrase); - const address = cryptoAddress.getAddressFromPublicKey(keys.publicKey); - const blsSecretKey = bls.generatePrivateKey(Buffer.from(passphrase, 'utf-8')); - const keypair = { - ...keys, - blsSecretKey, - blsPublicKey: bls.getPublicKeyFromPrivateKey(blsSecretKey), - }; - const blsKey = bls.getPublicKeyFromPrivateKey(keypair.blsSecretKey); - const blockHeader = createFakeBlockHeader(); - - describe('when generator is a standby validator', () => { - beforeEach(async () => { - generator['_keypairs'].set(address, keypair); - when(generator['_bft'].method.existBFTParameters as jest.Mock) - .calledWith(expect.anything(), 1) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 12) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 21) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 51) - .mockResolvedValue(false as never) - .calledWith(expect.anything(), 55) - .mockResolvedValue(true as never); - when(generator['_bft'].method.getBFTParametersActiveValidators as jest.Mock) - .calledWith(expect.anything(), 11) - .mockResolvedValue({ - validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], - }) - .calledWith(expect.anything(), 20) - .mockResolvedValue({ - validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], - }) - .calledWith(expect.anything(), 50) - .mockResolvedValue({ - validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], - }) - .calledWith(expect.anything(), 54) - .mockResolvedValue({ validators: [] }); - - jest - .spyOn(generator['_chain'].dataAccess, 'getBlockHeaderByHeight') - .mockResolvedValue(blockHeader as never); - await generator.init({ - blockchainDB, - generatorDB, - logger, - genesisHeight: 0, - }); - await generator.start(); - jest.spyOn(generator['_consensus'], 'certifySingleCommit'); - }); - - afterAll(async () => { - await generator.stop(); - }); - it('should not call certifySingleCommit when standby validator creates block', async () => { - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](10, 50)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(0); + beforeEach(async () => { + jest.spyOn(SingleCommitHandler.prototype, 'handleFinalizedHeightChanged'); + await generator.init({ + blockchainDB, + generatorDB, + logger, + genesisHeight: 0, }); + await generator.start(); }); - describe('when generator is an active validator', () => { - beforeEach(async () => { - generator['_keypairs'].set(address, keypair); - when(generator['_bft'].method.existBFTParameters as jest.Mock) - .calledWith(expect.anything(), 1) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 12) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 21) - .mockResolvedValue(true as never) - .calledWith(expect.anything(), 51) - .mockResolvedValue(false as never) - .calledWith(expect.anything(), 55) - .mockResolvedValue(true as never); - when(generator['_bft'].method.getBFTParametersActiveValidators as jest.Mock) - .calledWith(expect.anything(), 11) - .mockResolvedValue({ validators: [{ address, blsKey }] }) - .calledWith(expect.anything(), 20) - .mockResolvedValue({ validators: [{ address, blsKey: Buffer.alloc(48) }] }) - .calledWith(expect.anything(), 50) - .mockResolvedValue({ validators: [{ address, blsKey }] }) - .calledWith(expect.anything(), 54) - .mockResolvedValue({ validators: [] }); - - jest - .spyOn(generator['_chain'].dataAccess, 'getBlockHeaderByHeight') - .mockResolvedValue(blockHeader as never); - await generator.init({ - blockchainDB, - generatorDB, - logger, - genesisHeight: 0, - }); - await generator.start(); - jest.spyOn(generator['_consensus'], 'certifySingleCommit'); - }); - - afterAll(async () => { - await generator.stop(); - }); - - it('should call certifySingleCommit for range when params for height + 1 exist', async () => { - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](10, 50)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(2); - expect(generator['_consensus'].certifySingleCommit).toHaveBeenCalledWith(blockHeader, { - address, - blsPublicKey: blsKey, - blsSecretKey: keypair.blsSecretKey, - }); - }); - - it('should not call certifySingleCommit for range when params for height + 1 does not exist', async () => { - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](51, 54)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); - }); - - it('should not call certifySingleCommit for finalized height + 1 when BFT params exist', async () => { - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](53, 54)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); - }); - - it('should not call certifySingleCommit for the validator who has not registered bls key', async () => { - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](20, 21)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); - }); - - it('should call certifySingleCommit for finalized height + 1 when BFT params does not exist', async () => { - // For height 50, it should ceritifySingleCommit event though BFTParameter does not exist - await Promise.all(generator['_handleFinalizedHeightChanged'](15, 50)); + afterEach(async () => { + await generator.stop(); + }); - // Assert - expect(generator['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(1); - expect(generator['_consensus'].certifySingleCommit).toHaveBeenCalledWith(blockHeader, { - address, - blsPublicKey: blsKey, - blsSecretKey: keypair.blsSecretKey, - }); + it('should call singleCommitHandler.handleFinalizedHeightChanged', async () => { + generator['_consensus'].events.emit(CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED, { + from: 30001, + to: 30003, }); + await Promise.resolve(); - it('should not call certifySingleCommit when validator is not active at the height', async () => { - // height 20 returns existBFTParameters true, but no active validators. - // Therefore, it should not certify single commit - // Act - await Promise.all(generator['_handleFinalizedHeightChanged'](15, 54)); - - // Assert - expect(generator['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); - }); + expect(generator['_singleCommitHandler'].handleFinalizedHeightChanged).toHaveBeenCalledWith( + 30001, + 30003, + ); }); }); }); diff --git a/framework/test/unit/engine/generator/single_commit_handler.spec.ts b/framework/test/unit/engine/generator/single_commit_handler.spec.ts new file mode 100644 index 00000000000..bd532d9484f --- /dev/null +++ b/framework/test/unit/engine/generator/single_commit_handler.spec.ts @@ -0,0 +1,301 @@ +/* + * Copyright © 2021 Lisk Foundation + * + * See the LICENSE file at the top-level directory of this distribution + * for licensing information. + * + * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, + * no part of this software, including this file, may be copied, modified, + * propagated, or distributed except according to the terms contained in the + * LICENSE file. + * + * Removal or modification of this copyright notice is prohibited. + */ +import { EventEmitter } from 'events'; +import { dataStructures } from '@liskhq/lisk-utils'; +import { Chain } from '@liskhq/lisk-chain'; +import { bls, utils, address as cryptoAddress, legacy } from '@liskhq/lisk-cryptography'; +import { InMemoryDatabase, Database } from '@liskhq/lisk-db'; +import { when } from 'jest-when'; +import { Mnemonic } from '@liskhq/lisk-passphrase'; +import { Consensus, Keypair } from '../../../../src/engine/generator/types'; +import { fakeLogger } from '../../../utils/mocks'; +import { BFTModule } from '../../../../src/engine/bft'; +import { createFakeBlockHeader } from '../../../../src/testing'; +import { SingleCommitHandler } from '../../../../src/engine/generator/single_commit_handler'; +import { testing } from '../../../../src'; + +describe('SingleCommitHandler', () => { + const logger = fakeLogger; + + let chain: Chain; + let consensus: Consensus; + let keypairs: dataStructures.BufferMap; + let blockchainDB: Database; + let bft: BFTModule; + let consensusEvent: EventEmitter; + let singleCommitHandler: SingleCommitHandler; + + beforeEach(() => { + blockchainDB = new InMemoryDatabase() as never; + keypairs = new dataStructures.BufferMap(); + for (const key of testing.fixtures.keysList.keys) { + keypairs.set(cryptoAddress.getAddressFromLisk32Address(key.address), { + publicKey: Buffer.from(key.plain.generatorKey, 'hex'), + privateKey: Buffer.from(key.plain.generatorPrivateKey, 'hex'), + blsPublicKey: Buffer.from(key.plain.blsKey, 'hex'), + blsSecretKey: Buffer.from(key.plain.blsPrivateKey, 'hex'), + }); + } + chain = { + chainID: utils.getRandomBytes(32), + lastBlock: { + header: { + id: Buffer.from('6846255774763267134'), + height: 9187702, + timestamp: 93716450, + }, + transactions: [], + }, + finalizedHeight: 100, + dataAccess: { + getBlockHeaderByHeight: jest.fn(), + }, + constants: { + chainID: Buffer.from('chainID'), + }, + } as never; + consensusEvent = new EventEmitter(); + consensus = { + execute: jest.fn(), + syncing: jest.fn().mockReturnValue(false), + getAggregateCommit: jest.fn(), + certifySingleCommit: jest.fn(), + getConsensusParams: jest.fn().mockResolvedValue({ + currentValidators: [], + implyMaxPrevote: true, + maxHeightCertified: 0, + }), + getMaxRemovalHeight: jest.fn().mockResolvedValue(0), + events: consensusEvent, + } as never; + bft = { + beforeTransactionsExecute: jest.fn(), + method: { + getBFTHeights: jest.fn().mockResolvedValue({ + maxHeightPrevoted: 0, + maxHeightPrecommitted: 0, + maxHeightCertified: 0, + }), + setBFTParameters: jest.fn(), + getBFTParameters: jest.fn().mockResolvedValue({ validators: [] }), + getBFTParametersActiveValidators: jest.fn().mockResolvedValue({ validators: [] }), + existBFTParameters: jest.fn().mockResolvedValue(false), + getGeneratorAtTimestamp: jest.fn(), + impliesMaximalPrevotes: jest.fn().mockResolvedValue(false), + getSlotNumber: jest.fn(), + getSlotTime: jest.fn(), + }, + } as never; + singleCommitHandler = new SingleCommitHandler( + logger, + chain, + consensus, + bft, + keypairs, + blockchainDB, + ); + }); + + describe('events CONSENSUS_EVENT_FINALIZED_HEIGHT_CHANGED', () => { + const passphrase = Mnemonic.generateMnemonic(256); + const keys = legacy.getPrivateAndPublicKeyFromPassphrase(passphrase); + const address = cryptoAddress.getAddressFromPublicKey(keys.publicKey); + const blsSecretKey = bls.generatePrivateKey(Buffer.from(passphrase, 'utf-8')); + const keypair = { + ...keys, + blsSecretKey, + blsPublicKey: bls.getPublicKeyFromPrivateKey(blsSecretKey), + }; + const blsKey = bls.getPublicKeyFromPrivateKey(keypair.blsSecretKey); + const blockHeader = createFakeBlockHeader(); + + describe('when generator is a standby validator', () => { + beforeEach(() => { + keypairs.set(address, keypair); + when(singleCommitHandler['_bft'].method.existBFTParameters as jest.Mock) + .calledWith(expect.anything(), 1) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 12) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 21) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 51) + .mockResolvedValue(false as never) + .calledWith(expect.anything(), 55) + .mockResolvedValue(true as never); + when(singleCommitHandler['_bft'].method.getBFTParametersActiveValidators as jest.Mock) + .calledWith(expect.anything(), 11) + .mockResolvedValue({ + validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], + }) + .calledWith(expect.anything(), 20) + .mockResolvedValue({ + validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], + }) + .calledWith(expect.anything(), 50) + .mockResolvedValue({ + validators: [{ address: utils.getRandomBytes(20), blsKey: utils.getRandomBytes(48) }], + }) + .calledWith(expect.anything(), 54) + .mockResolvedValue({ validators: [] }); + + jest + .spyOn(singleCommitHandler['_chain'].dataAccess, 'getBlockHeaderByHeight') + .mockResolvedValue(blockHeader as never); + jest.spyOn(singleCommitHandler['_consensus'], 'certifySingleCommit'); + }); + + it('should not call certifySingleCommit when standby validator creates block', async () => { + // Act + await singleCommitHandler.handleFinalizedHeightChanged(10, 50); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(0); + }); + }); + + describe('when generator is an active validator', () => { + beforeEach(() => { + keypairs.set(address, keypair); + when(singleCommitHandler['_bft'].method.existBFTParameters as jest.Mock) + .calledWith(expect.anything(), 1) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 12) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 21) + .mockResolvedValue(true as never) + .calledWith(expect.anything(), 51) + .mockResolvedValue(false as never) + .calledWith(expect.anything(), 55) + .mockResolvedValue(true as never); + when(singleCommitHandler['_bft'].method.getBFTParametersActiveValidators as jest.Mock) + .calledWith(expect.anything(), 11) + .mockResolvedValue({ validators: [{ address, blsKey }] }) + .calledWith(expect.anything(), 20) + .mockResolvedValue({ validators: [{ address, blsKey: Buffer.alloc(48) }] }) + .calledWith(expect.anything(), 50) + .mockResolvedValue({ validators: [{ address, blsKey }] }) + .calledWith(expect.anything(), 54) + .mockResolvedValue({ validators: [] }); + + jest + .spyOn(singleCommitHandler['_chain'].dataAccess, 'getBlockHeaderByHeight') + .mockResolvedValue(blockHeader as never); + jest.spyOn(singleCommitHandler['_consensus'], 'certifySingleCommit'); + }); + + it('should call certifySingleCommit for range when params for height + 1 exist', async () => { + // Act + await singleCommitHandler.handleFinalizedHeightChanged(10, 50); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(2); + expect(singleCommitHandler['_consensus'].certifySingleCommit).toHaveBeenCalledWith( + blockHeader, + { + address, + blsPublicKey: blsKey, + blsSecretKey: keypair.blsSecretKey, + }, + ); + }); + + it('should not call certifySingleCommit for range when params for height + 1 does not exist', async () => { + // Act + await singleCommitHandler.handleFinalizedHeightChanged(51, 54); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); + }); + + it('should not call certifySingleCommit for finalized height + 1 when BFT params exist', async () => { + // Act + await singleCommitHandler.handleFinalizedHeightChanged(53, 54); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); + }); + + it('should not call certifySingleCommit for the validator who has not registered bls key', async () => { + // Act + await singleCommitHandler.handleFinalizedHeightChanged(20, 21); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); + }); + + it('should call certifySingleCommit for finalized height + 1 when BFT params does not exist', async () => { + // For height 50, it should ceritifySingleCommit event though BFTParameter does not exist + await singleCommitHandler.handleFinalizedHeightChanged(15, 50); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).toHaveBeenCalledTimes(1); + expect(singleCommitHandler['_consensus'].certifySingleCommit).toHaveBeenCalledWith( + blockHeader, + { + address, + blsPublicKey: blsKey, + blsSecretKey: keypair.blsSecretKey, + }, + ); + }); + + it('should not call certifySingleCommit when validator is not active at the height', async () => { + // height 20 returns existBFTParameters true, but no active validators. + // Therefore, it should not certify single commit + // Act + await singleCommitHandler.handleFinalizedHeightChanged(15, 54); + + // Assert + expect(singleCommitHandler['_consensus'].certifySingleCommit).not.toHaveBeenCalled(); + }); + }); + + describe('when previous finalized height change and maxRemovalHeight is non zero', () => { + beforeEach(() => { + jest.spyOn(singleCommitHandler, '_handleFinalizedHeightChanged' as never); + jest.spyOn(singleCommitHandler, '_certifySingleCommitForChangedHeight' as never); + jest.spyOn(singleCommitHandler, '_certifySingleCommit' as never); + }); + + it('should not call certifySingleCommit when getMaxRemovalHeight is higher than next finalized height', async () => { + jest.spyOn(consensus, 'getMaxRemovalHeight').mockResolvedValue(30000); + await singleCommitHandler.handleFinalizedHeightChanged(0, 25520); + + expect(singleCommitHandler['_handleFinalizedHeightChanged']).not.toHaveBeenCalledWith( + address, + 30000, + 25520, + ); + }); + + it('should call certifySingleCommit when getMaxRemovalHeight is lower than next finalized height', async () => { + jest.spyOn(consensus, 'getMaxRemovalHeight').mockResolvedValue(30000); + await singleCommitHandler.handleFinalizedHeightChanged(30001, 30003); + + expect(singleCommitHandler['_handleFinalizedHeightChanged']).toHaveBeenCalledWith( + expect.any(Buffer), + 30001, + 30003, + ); + expect(singleCommitHandler['_certifySingleCommitForChangedHeight']).toHaveBeenCalledTimes( + 1 * keypairs.size, + ); + expect(singleCommitHandler['_certifySingleCommit']).toHaveBeenCalledTimes( + 1 * keypairs.size, + ); + }); + }); + }); +}); diff --git a/framework/test/unit/engine/legacy/endpoint.spec.ts b/framework/test/unit/engine/legacy/endpoint.spec.ts index 70ee7189402..0a6af6282e0 100644 --- a/framework/test/unit/engine/legacy/endpoint.spec.ts +++ b/framework/test/unit/engine/legacy/endpoint.spec.ts @@ -24,18 +24,39 @@ import { transactionSchemaV2, } from '../../../../src/engine/legacy/schemas'; import { LegacyBlockJSON, LegacyTransactionJSON } from '../../../../src/engine/legacy/types'; +import { Storage } from '../../../../src/engine/legacy/storage'; const bufferToHex = (b: Buffer) => Buffer.from(b).toString('hex'); +const randomSnapshotBlockID = utils.getRandomBytes(20); +const expectedSnapshotBlockID = utils.getRandomBytes(20); describe('Legacy endpoint', () => { + const { header, payload } = blockFixtures[0]; let encodedBlock: Buffer; let legacyEndpoint: LegacyEndpoint; beforeEach(() => { - legacyEndpoint = new LegacyEndpoint({ db: new InMemoryDatabase() as any }); + legacyEndpoint = new LegacyEndpoint({ + db: new InMemoryDatabase() as any, + legacyConfig: { + sync: true, + brackets: [ + { + startHeight: 0, + snapshotBlockID: randomSnapshotBlockID.toString('hex'), + snapshotHeight: 100, + }, + { + startHeight: 16270306, + snapshotBlockID: expectedSnapshotBlockID.toString('hex'), + snapshotHeight: 16270316, + }, + ], + }, + }); encodedBlock = codec.encode(blockSchemaV2, { - header: codec.encode(blockHeaderSchemaV2, blockFixtures[0].header), - payload: blockFixtures[0].payload, + header: codec.encode(blockHeaderSchemaV2, header), + payload, }); jest.spyOn(legacyEndpoint.storage, 'getBlockByID').mockResolvedValue(encodedBlock); @@ -44,25 +65,19 @@ describe('Legacy endpoint', () => { describe('LegacyEndpoint', () => { const matchBlockExpectations = (block: LegacyBlockJSON) => { - expect(block.header.id).toEqual(bufferToHex(blockFixtures[0].header.id)); - expect(block.header.version).toEqual(blockFixtures[0].header.version); - expect(block.header.timestamp).toEqual(blockFixtures[0].header.timestamp); - expect(block.header.height).toEqual(blockFixtures[0].header.height); - expect(block.header.previousBlockID).toEqual( - bufferToHex(blockFixtures[0].header.previousBlockID), - ); - expect(block.header.transactionRoot).toEqual( - bufferToHex(blockFixtures[0].header.transactionRoot), - ); - expect(block.header.generatorPublicKey).toEqual( - bufferToHex(blockFixtures[0].header.generatorPublicKey), - ); - expect(BigInt(block.header.reward as number)).toEqual(blockFixtures[0].header.reward); - expect(block.header.asset).toEqual(bufferToHex(blockFixtures[0].header.asset)); - expect(block.header.signature).toEqual(bufferToHex(blockFixtures[0].header.signature)); - - expect(block.payload).toHaveLength(blockFixtures[0].payload.length); - expect(block.payload[0]).toEqual(bufferToHex(blockFixtures[0].payload[0])); + expect(block.header.id).toEqual(bufferToHex(header.id)); + expect(block.header.version).toEqual(header.version); + expect(block.header.timestamp).toEqual(header.timestamp); + expect(block.header.height).toEqual(header.height); + expect(block.header.previousBlockID).toEqual(bufferToHex(header.previousBlockID)); + expect(block.header.transactionRoot).toEqual(bufferToHex(header.transactionRoot)); + expect(block.header.generatorPublicKey).toEqual(bufferToHex(header.generatorPublicKey)); + expect(BigInt(block.header.reward as number)).toEqual(header.reward); + expect(block.header.asset).toEqual(bufferToHex(header.asset)); + expect(block.header.signature).toEqual(bufferToHex(header.signature)); + + expect(block.payload).toHaveLength(payload.length); + expect(block.payload[0]).toEqual(bufferToHex(payload[0])); }; const matchTxExpectations = ( @@ -83,7 +98,7 @@ describe('Legacy endpoint', () => { it('getBlockByID', async () => { const block = await legacyEndpoint.getBlockByID({ - params: { id: bufferToHex(blockFixtures[0].header.id) }, + params: { id: bufferToHex(header.id) }, } as any); matchBlockExpectations(block); @@ -91,14 +106,14 @@ describe('Legacy endpoint', () => { it('getBlockByHeight', async () => { const block = await legacyEndpoint.getBlockByHeight({ - params: { height: blockFixtures[0].header.height }, + params: { height: header.height }, } as any); matchBlockExpectations(block); }); it('getTransactionByID', async () => { - const tx = blockFixtures[0].payload[0]; + const tx = payload[0]; jest.spyOn(legacyEndpoint['storage'], 'getTransactionByID').mockResolvedValue(tx); const txId = utils.hash(tx).toString('hex'); @@ -110,8 +125,8 @@ describe('Legacy endpoint', () => { }); it('getTransactionsByBlockID', async () => { - const blockId = blockFixtures[0].header.id; - const tx = blockFixtures[0].payload[0]; + const blockId = header.id; + const tx = payload[0]; const txId = utils.hash(tx).toString('hex'); jest.spyOn(legacyEndpoint['storage'], 'getTransactionsByBlockID').mockResolvedValue([tx]); @@ -123,5 +138,43 @@ describe('Legacy endpoint', () => { expect(transactions).toBeArray(); matchTxExpectations(transactions[0], tx, txId); }); + + it('getLegacyBrackets', async () => { + const blockId = header.id; + const legacyConfig = { + sync: true, + brackets: [ + { + startHeight: header.height - 200, + snapshotBlockID: blockId.toString('hex'), + snapshotHeight: header.height, + }, + ], + }; + + const legacyStorage = new Storage(new InMemoryDatabase() as any); + await legacyStorage.setBracketInfo(blockId, { + startHeight: header.height - 200, + lastBlockHeight: header.height - 100, + snapshotBlockHeight: header.height, + }); + legacyEndpoint = new LegacyEndpoint({ + db: legacyStorage as any, + legacyConfig, + }); + + (legacyEndpoint as any)['storage'] = legacyStorage; + + const brackets = await legacyEndpoint.getLegacyBrackets({} as any); + + expect(brackets).toEqual([ + { + startHeight: legacyConfig.brackets[0].startHeight, + snapshotBlockID: legacyConfig.brackets[0].snapshotBlockID, + snapshotBlockHeight: header.height, + lastBlockHeight: header.height - 100, + }, + ]); + }); }); }); diff --git a/framework/test/unit/engine/legacy/fixtures.ts b/framework/test/unit/engine/legacy/fixtures.ts index 1b5e0baf1ce..5534e25e4b0 100644 --- a/framework/test/unit/engine/legacy/fixtures.ts +++ b/framework/test/unit/engine/legacy/fixtures.ts @@ -14,7 +14,7 @@ import { utils } from '@liskhq/lisk-cryptography'; import { regularMerkleTree } from '@liskhq/lisk-tree'; -import { encodeBlock } from '../../../../src/engine/legacy/codec'; +import { encodeBlock, encodeBlockHeader } from '../../../../src/engine/legacy/codec'; import { LegacyBlockHeader, LegacyBlockWithID } from '../../../../src/engine/legacy/types'; // Version 2 blocks @@ -354,18 +354,23 @@ export const createFakeLegacyBlockHeaderV2 = ( * @params start: Start height of the block range going backwards * @params numberOfBlocks: Number of blocks to be generated with decreasing height */ -export const getLegacyBlocksRangeV2 = (startHeight: number, numberOfBlocks: number): Buffer[] => { - const blocks: LegacyBlockWithID[] = []; +export const getLegacyBlockHeadersRangeV2 = ( + startHeight: number, + numberOfBlocks: number, +): Buffer[] => { + const blockHeaders: LegacyBlockHeader[] = []; for (let i = startHeight; i >= startHeight - numberOfBlocks; i -= 1) { // After the startHeight, all the blocks are generated with previousBlockID as previous height block ID const block = createFakeLegacyBlockHeaderV2({ height: i, previousBlockID: - i === startHeight ? utils.getRandomBytes(32) : blocks[startHeight - i - 1].header.id, + i === startHeight + ? utils.getRandomBytes(32) + : (blockHeaders[startHeight - i - 1].id as Buffer), }); - blocks.push(block); + blockHeaders.push(block.header); } - return blocks.map(b => encodeBlock(b)); + return blockHeaders.map(b => encodeBlockHeader(b)); }; diff --git a/framework/test/unit/engine/legacy/legacy_chain_handler.spec.ts b/framework/test/unit/engine/legacy/legacy_chain_handler.spec.ts index 15c52b8c850..c82dcf092de 100644 --- a/framework/test/unit/engine/legacy/legacy_chain_handler.spec.ts +++ b/framework/test/unit/engine/legacy/legacy_chain_handler.spec.ts @@ -15,10 +15,11 @@ import { utils } from '@liskhq/lisk-cryptography'; import { codec } from '@liskhq/lisk-codec'; +import { InMemoryDatabase } from '@liskhq/lisk-db'; import { LegacyConfig } from '../../../../src'; import { LegacyChainHandler } from '../../../../src/engine/legacy/legacy_chain_handler'; import { Network } from '../../../../src/engine/network'; -import { encodeBlock, encodeLegacyChainBracketInfo } from '../../../../src/engine/legacy/codec'; +import { encodeBlock } from '../../../../src/engine/legacy/codec'; import { Peer, LegacyBlock } from '../../../../src/engine/legacy/types'; import { getBlocksFromIdResponseSchema } from '../../../../src/engine/consensus/schema'; import { blockFixtures } from './fixtures'; @@ -27,7 +28,6 @@ import { fakeLogger } from '../../../utils/mocks'; const randomSnapshotBlockID = utils.getRandomBytes(20); const expectedSnapshotBlockID = utils.getRandomBytes(20); -// https://lisk.observer/block/19583716 describe('Legacy Chain Handler', () => { let legacyChainHandler: LegacyChainHandler; let legacyConfig: LegacyConfig; @@ -39,11 +39,6 @@ describe('Legacy Chain Handler', () => { legacyConfig = { sync: true, brackets: [ - { - startHeight: 0, - snapshotBlockID: randomSnapshotBlockID.toString('hex'), - snapshotHeight: 100, - }, { startHeight: 16270306, snapshotBlockID: expectedSnapshotBlockID.toString('hex'), @@ -55,13 +50,13 @@ describe('Legacy Chain Handler', () => { { peerId: 'peerId-1', options: { - legacy: [expectedSnapshotBlockID], + legacy: [expectedSnapshotBlockID.toString('hex')], }, }, { peerId: 'peerId-2', options: { - legacy: [randomSnapshotBlockID, expectedSnapshotBlockID], + legacy: [randomSnapshotBlockID.toString('hex'), expectedSnapshotBlockID.toString('hex')], }, }, ]; @@ -74,48 +69,16 @@ describe('Legacy Chain Handler', () => { legacyChainHandler = new LegacyChainHandler({ legacyConfig, network, logger: fakeLogger }); await legacyChainHandler.init({ - db: { get: jest.fn(), write: jest.fn(), set: jest.fn() } as never, + db: new InMemoryDatabase() as never, }); jest.spyOn(legacyChainHandler['_network'], 'getConnectedPeers').mockImplementation(() => { return peers as any; }); - jest - .spyOn(legacyChainHandler['_storage'], 'getLegacyChainBracketInfo') - .mockReturnValueOnce( - encodeLegacyChainBracketInfo({ - startHeight: 0, - snapshotBlockHeight: 0, - lastBlockHeight: 0, - }) as any, // this means this bracket is already synced, since it's lastBlockHeight equals bracket's startHeight - ) - .mockReturnValueOnce( - encodeLegacyChainBracketInfo({ - startHeight: 16270306, - snapshotBlockHeight: 16270316, - lastBlockHeight: 16270316, - }) as any, - ); - jest .spyOn(legacyChainHandler['_storage'], 'getBlockByHeight') .mockReturnValueOnce(encodeBlock(legacyBlock16270316) as any); // we want to return blocks from this height ONCE - - // `getLegacyBlocksFromId` should return blocks in DESC order (starting from 16270316 (excluding) till 16270306) - const reversedFixtures = blockFixtures - .slice(0, blockFixtures.length - 1) - .sort((a, b) => b.header.height - a.header.height); - const encodedBlocks = reversedFixtures.map(block => encodeBlock(block)); - - jest - .spyOn(network, 'requestFromPeer') - .mockReturnValueOnce({ - data: codec.encode(getBlocksFromIdResponseSchema, { blocks: encodedBlocks }), - } as any) - .mockReturnValueOnce({ - data: [], - } as any); }); describe('constructor', () => { @@ -125,20 +88,106 @@ describe('Legacy Chain Handler', () => { }); describe('sync', () => { + beforeEach(() => { + // `getLegacyBlocksFromId` should return blocks in DESC order (starting from 16270316 (excluding) till 16270306) + const reversedFixtures = blockFixtures + .slice(0, blockFixtures.length - 1) + .sort((a, b) => b.header.height - a.header.height); + const encodedBlocks = reversedFixtures.map(block => encodeBlock(block)); + + jest + .spyOn(network, 'requestFromPeer') + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: encodedBlocks }), + } as any) + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: [] }), + } as any) + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: [] }), + } as any); + }); it('should sync blocks in range for given config brackets', async () => { jest.spyOn(legacyChainHandler['_storage'], 'saveBlock'); - jest.spyOn(legacyChainHandler['_storage'], 'setLegacyChainBracketInfo'); + jest.spyOn(legacyChainHandler['_storage'], 'setBracketInfo'); jest.spyOn(legacyChainHandler['_network'], 'applyNodeInfo'); + jest.spyOn(legacyChainHandler as any, '_trySyncBlocks'); await legacyChainHandler.sync(); - // starting from 16270316 (excluding) till 16270306 = 10, // but we save blocks only if ```block.header.height > bracket.startHeight``` - expect(legacyChainHandler['_storage'].saveBlock).toHaveBeenCalledTimes(9); + expect(legacyChainHandler['_trySyncBlocks']).toHaveBeenCalledTimes(1); + }); + }); + + describe('_syncBlocks', () => { + let reversedFixtures; + let encodedBlocks: any[]; + beforeEach(() => { + reversedFixtures = blockFixtures + .slice(0, blockFixtures.length - 1) + .sort((a, b) => b.header.height - a.header.height); + encodedBlocks = reversedFixtures.map(block => encodeBlock(block)); + }); + it('should sync blocks in range for given config brackets', async () => { + jest + .spyOn(network, 'requestFromPeer') + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: encodedBlocks }), + } as any) + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: [] }), + } as any) + .mockReturnValueOnce({ + data: codec.encode(getBlocksFromIdResponseSchema, { blocks: [] }), + } as any); + jest.spyOn(legacyChainHandler['_storage'], 'saveBlock'); + jest.spyOn(legacyChainHandler['_storage'], 'setBracketInfo'); + jest.spyOn(legacyChainHandler['_network'], 'applyNodeInfo'); + jest.spyOn(legacyChainHandler as any, '_trySyncBlocks'); + await legacyChainHandler['_syncBlocks']( + legacyConfig.brackets[0], + legacyBlock16270316.header.id as Buffer, + 0, + ); + + expect(legacyChainHandler['_storage'].saveBlock).toHaveBeenCalledTimes(10); // should be 1, since if `lastBlock.header.height > bracket.startHeight` is skipped // & only the final `_updateBracketInfo(...)` is called - expect(legacyChainHandler['_storage'].setLegacyChainBracketInfo).toHaveBeenCalledTimes(1); + expect(legacyChainHandler['_storage'].setBracketInfo).toHaveBeenCalledTimes(1); + }); + + it('should throw error when no peers are found', async () => { + jest.spyOn(legacyChainHandler['_network'], 'getConnectedPeers').mockImplementation(() => []); + await expect( + legacyChainHandler['_syncBlocks']( + legacyConfig.brackets[0], + legacyBlock16270316.header.id as Buffer, + 0, + ), + ).rejects.toThrow('No peer found with legacy info.: Attempting to sync again after 12000 ms'); + }); + + it('should throw error when no peers are found with legacy data', async () => { + jest.spyOn(legacyChainHandler['_network'], 'getConnectedPeers').mockImplementation( + () => + [ + { + peerId: 'peerId-1', + options: { + legacy: [randomSnapshotBlockID.toString('hex')], + }, + }, + ] as any, + ); + await expect( + legacyChainHandler['_syncBlocks']( + legacyConfig.brackets[0], + legacyBlock16270316.header.id as Buffer, + 0, + ), + ).rejects.toThrow('No peer found with legacy info.: Attempting to sync again after 12000 ms'); }); }); }); diff --git a/framework/test/unit/engine/legacy/network_endpoint.spec.ts b/framework/test/unit/engine/legacy/network_endpoint.spec.ts index fb85b62f613..0a98e673ec8 100644 --- a/framework/test/unit/engine/legacy/network_endpoint.spec.ts +++ b/framework/test/unit/engine/legacy/network_endpoint.spec.ts @@ -19,13 +19,11 @@ import { codec } from '@liskhq/lisk-codec'; import { LegacyNetworkEndpoint } from '../../../../src/engine/legacy/network_endpoint'; import { loggerMock } from '../../../../src/testing/mocks'; import { Network } from '../../../../src/engine/network'; -import { - getBlocksFromIdRequestSchema, - getBlocksFromIdResponseSchema, -} from '../../../../src/engine/consensus/schema'; +import { getBlocksFromIdResponseSchema } from '../../../../src/engine/consensus/schema'; -import { getLegacyBlocksRangeV2 } from './fixtures'; -import { decodeBlock, encodeBlock } from '../../../../src/engine/legacy/codec'; +import { getLegacyBlockHeadersRangeV2 } from './fixtures'; +import { decodeBlockHeader } from '../../../../src/engine/legacy/codec'; +import { getLegacyBlocksFromIdRequestSchema } from '../../../../src/engine/legacy/schemas'; describe('Legacy P2P network endpoint', () => { const defaultPeerID = 'peer-id'; @@ -65,40 +63,51 @@ describe('Legacy P2P network endpoint', () => { }); it("should return empty list if ID doesn't exist", async () => { - const blockId = utils.getRandomBytes(32); - const blockIds = codec.encode(getBlocksFromIdRequestSchema, { - blockId, + const blockID = utils.getRandomBytes(32); + const snapshotBlockID = utils.getRandomBytes(32); + const requestPayload = codec.encode(getLegacyBlocksFromIdRequestSchema, { + blockID, + snapshotBlockID, }); - const blocks = await endpoint.handleRPCGetLegacyBlocksFromID(blockIds, defaultPeerID); + await endpoint._storage.setBracketInfo(snapshotBlockID, { + lastBlockHeight: 100, + snapshotBlockHeight: 200, + startHeight: 50, + }); + const blocks = await endpoint.handleRPCGetLegacyBlocksFromID(requestPayload, defaultPeerID); expect(blocks).toEqual(codec.encode(getBlocksFromIdResponseSchema, { blocks: [] })); }); it('should return 100 blocks from the requested ID', async () => { - const startHeight = 110; + const requestedHeight = 110; // 100 blocks including the requested block ID - const blocks = getLegacyBlocksRangeV2(startHeight, 99); - - const requestedBlock = decodeBlock(blocks[0]).block; + const blockHeaders = getLegacyBlockHeadersRangeV2(requestedHeight, 100); - const { - header: { id, ...blockHeader }, - payload, - } = requestedBlock; + const requestedBlockHeader = decodeBlockHeader(blockHeaders[0]); - const requestedBlockWithoutID = { header: { ...blockHeader }, payload }; - - const encodedBlockWithoutID = encodeBlock(requestedBlockWithoutID); - const requestedBlockID = utils.hash(encodedBlockWithoutID); + const { id: requestedBlockID } = requestedBlockHeader; // Save blocks to the database - for (let i = 0; i < blocks.length; i += 1) { - const block = blocks[i]; - await endpoint['_storage'].saveBlock(utils.hash(block), startHeight + i, block, []); + for (let i = 0; i < blockHeaders.length; i += 1) { + const blockHeader = blockHeaders[i]; + await endpoint['_storage'].saveBlock( + utils.hash(blockHeader), + requestedHeight - i, + blockHeader, + [], + ); } - const encodedRequest = codec.encode(getBlocksFromIdRequestSchema, { - blockId: requestedBlockID, + const snapshotBlockID = utils.getRandomBytes(32); + const encodedRequest = codec.encode(getLegacyBlocksFromIdRequestSchema, { + blockID: requestedBlockID, + snapshotBlockID, } as never); + await endpoint._storage.setBracketInfo(snapshotBlockID, { + lastBlockHeight: 100, + snapshotBlockHeight: 200, + startHeight: requestedHeight - 101, + }); const blocksReceived = await endpoint.handleRPCGetLegacyBlocksFromID( encodedRequest, defaultPeerID, diff --git a/framework/test/unit/engine/legacy/storage.spec.ts b/framework/test/unit/engine/legacy/storage.spec.ts index d18c7961f81..1d4f59a6e85 100644 --- a/framework/test/unit/engine/legacy/storage.spec.ts +++ b/framework/test/unit/engine/legacy/storage.spec.ts @@ -15,7 +15,7 @@ import { Batch, Database, InMemoryDatabase } from '@liskhq/lisk-db'; import { utils } from '@liskhq/lisk-cryptography'; -import { encodeBlock, encodeLegacyChainBracketInfo } from '../../../../src/engine/legacy/codec'; +import { encodeBlock, encodeBlockHeader } from '../../../../src/engine/legacy/codec'; import { Storage } from '../../../../src/engine/legacy/storage'; import { blockFixtures } from './fixtures'; import { buildBlockHeightDbKey, buildBlockIDDbKey } from '../../../../src/engine/legacy/utils'; @@ -36,8 +36,8 @@ describe('Legacy storage', () => { for (const block of blocks) { const { header, payload } = block; - batch.set(buildBlockIDDbKey(header.id), encodeBlock({ header, payload })); - batch.set(buildBlockHeightDbKey(header.height), header.id); + const encodedHeader = encodeBlockHeader(header); + await storage.saveBlock(header.id, header.height, encodedHeader, payload); } await db.write(batch); @@ -60,8 +60,9 @@ describe('Legacy storage', () => { }); it('should throw error if block with given id does not exist', async () => { - await expect(storage.getBlockByID(Buffer.alloc(0))).rejects.toThrow( - `Specified key 626c6f636b733a6964 does not exist`, + const randomBlockID = utils.hash(utils.getRandomBytes(1)); + await expect(storage.getBlockByID(randomBlockID)).rejects.toThrow( + `Specified key ${buildBlockIDDbKey(randomBlockID).toString('hex')} does not exist`, ); }); }); @@ -76,7 +77,7 @@ describe('Legacy storage', () => { it('should throw an error if the block is not found', async () => { await expect(storage.getBlockByHeight(100)).rejects.toThrow( - `Specified key 626c6f636b733a68656967687400000064 does not exist`, + `Specified key ${buildBlockHeightDbKey(100).toString('hex')} does not exist`, ); }); }); @@ -124,7 +125,7 @@ describe('Legacy storage', () => { describe('saveBlock', () => { it("should save the block along with it's transactions", async () => { const { header, payload } = blockFixtures[0]; - await storage.saveBlock(header.id, header.height, encodeBlock({ header, payload }), payload); + await storage.saveBlock(header.id, header.height, encodeBlockHeader(header), payload); const result = await storage.getBlockByID(header.id); expect(result).toEqual(encodeBlock({ header, payload })); @@ -138,7 +139,7 @@ describe('Legacy storage', () => { it("should save the block without it's transactions", async () => { const { header, payload } = blockFixtures[0]; - await storage.saveBlock(header.id, header.height, encodeBlock({ header, payload }), []); + await storage.saveBlock(header.id, header.height, encodeBlockHeader(header), payload); const result = await storage.getBlockByID(header.id); expect(result).toEqual(encodeBlock({ header, payload })); @@ -166,15 +167,15 @@ describe('Legacy storage', () => { lastBlockHeight: header.height, }; - await storage.setLegacyChainBracketInfo(header.id, bracketInfo); + await storage.setBracketInfo(header.id, bracketInfo); - const result = await storage.getLegacyChainBracketInfo(header.id); + const result = await storage.getBracketInfo(header.id); - expect(result).toEqual(encodeLegacyChainBracketInfo(bracketInfo)); + expect(result).toEqual(bracketInfo); }); it('should throw error if block with given id does not exist', async () => { - await expect(storage.getLegacyChainBracketInfo(Buffer.alloc(0))).rejects.toThrow( + await expect(storage.getBracketInfo(Buffer.alloc(0))).rejects.toThrow( `Specified key 02 does not exist`, ); }); diff --git a/framework/test/unit/genesis_block.spec.ts b/framework/test/unit/genesis_block.spec.ts index 2ee294300df..9a287c8f99a 100644 --- a/framework/test/unit/genesis_block.spec.ts +++ b/framework/test/unit/genesis_block.spec.ts @@ -33,6 +33,7 @@ describe('generateGenesisBlock', () => { expect(result.header.validatorsHash).toHaveLength(32); expect(result.header.eventRoot).toHaveLength(32); expect(result.header.version).toBe(0); + expect(result.header.aggregateCommit.height).toBe(30); expect(stateMachine.executeGenesisBlock).toHaveBeenCalledTimes(1); }); diff --git a/framework/test/unit/modules/dynamic_rewards/endpoint.spec.ts b/framework/test/unit/modules/dynamic_reward/endpoint.spec.ts similarity index 93% rename from framework/test/unit/modules/dynamic_rewards/endpoint.spec.ts rename to framework/test/unit/modules/dynamic_reward/endpoint.spec.ts index f2f2366a9c1..8ff12926634 100644 --- a/framework/test/unit/modules/dynamic_rewards/endpoint.spec.ts +++ b/framework/test/unit/modules/dynamic_reward/endpoint.spec.ts @@ -12,8 +12,8 @@ * Removal or modification of this copyright notice is prohibited. */ import { utils, address as cryptoAddress } from '@liskhq/lisk-cryptography'; -import { DynamicRewardModule } from '../../../../src/modules/dynamic_rewards'; -import { DynamicRewardEndpoint } from '../../../../src/modules/dynamic_rewards/endpoint'; +import { DynamicRewardModule } from '../../../../src/modules/dynamic_reward'; +import { DynamicRewardEndpoint } from '../../../../src/modules/dynamic_reward/endpoint'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB, @@ -135,11 +135,12 @@ describe('DynamicRewardModuleEndpoint', () => { }), ); + // total reward for active validators is 9LSK (5LSK - 0.5LSK) * 2, which will be distributed by weight expect(response).toEqual({ - blockReward: '20957000000', - dailyReward: '2511351993600', - monthlyReward: '75340559808000', - yearlyReward: '916643477664000', + blockReward: '464000000', // 9 * (230/500) + 0.5 + dailyReward: '55602720000', + monthlyReward: '1668081600000', + yearlyReward: '20294992800000', }); }); }); diff --git a/framework/test/unit/modules/dynamic_rewards/module.spec.ts b/framework/test/unit/modules/dynamic_reward/module.spec.ts similarity index 96% rename from framework/test/unit/modules/dynamic_rewards/module.spec.ts rename to framework/test/unit/modules/dynamic_reward/module.spec.ts index 1cd4ae2b80a..3260ad2ccaa 100644 --- a/framework/test/unit/modules/dynamic_rewards/module.spec.ts +++ b/framework/test/unit/modules/dynamic_reward/module.spec.ts @@ -21,27 +21,27 @@ import { InMemoryPrefixedStateDB, } from '../../../../src/testing'; import { RewardMintedEvent } from '../../../../src/modules/reward/events/reward_minted'; -import { DynamicRewardModule } from '../../../../src/modules/dynamic_rewards'; +import { DynamicRewardModule } from '../../../../src/modules/dynamic_reward'; import { PoSMethod, RandomMethod, TokenMethod, ValidatorsMethod, -} from '../../../../src/modules/dynamic_rewards/types'; +} from '../../../../src/modules/dynamic_reward/types'; import { CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION, CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REWARD, DECIMAL_PERCENT_FACTOR, defaultConfig, EMPTY_BYTES, -} from '../../../../src/modules/dynamic_rewards/constants'; +} from '../../../../src/modules/dynamic_reward/constants'; import { BlockAfterExecuteContext, BlockExecuteContext, GenesisBlockExecuteContext, } from '../../../../src'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; -import { EndOfRoundTimestampStore } from '../../../../src/modules/dynamic_rewards/stores/end_of_round_timestamp'; +import { EndOfRoundTimestampStore } from '../../../../src/modules/dynamic_reward/stores/end_of_round_timestamp'; import { REWARD_NO_REDUCTION, REWARD_REDUCTION_MAX_PREVOTES, @@ -51,7 +51,6 @@ import { describe('DynamicRewardModule', () => { const defaultRoundLength = 103; - const defaultNumberOfActiveValidators = 101; let rewardModule: DynamicRewardModule; let tokenMethod: TokenMethod; @@ -72,7 +71,6 @@ describe('DynamicRewardModule', () => { getValidatorsParams: jest.fn(), }; posMethod = { - getNumberOfActiveValidators: jest.fn().mockReturnValue(defaultNumberOfActiveValidators), getRoundLength: jest.fn().mockReturnValue(defaultRoundLength), updateSharedRewards: jest.fn(), isEndOfRound: jest.fn(), @@ -187,7 +185,6 @@ describe('DynamicRewardModule', () => { ]; (validatorsMethod.getValidatorsParams as jest.Mock).mockResolvedValue({ validators }); - (posMethod.getNumberOfActiveValidators as jest.Mock).mockReturnValue(activeValidator); }); it('should store minimal reward for active validators when full round is forged', async () => { @@ -226,7 +223,7 @@ describe('DynamicRewardModule', () => { // generatorAddress has 20% of total weight expect(blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REWARD)).toEqual( - minimumReward + ratioReward / BigInt(5), + minimumReward + ratioReward / BigInt(5), // 20 / 100 for generator address ); expect( blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION), @@ -316,7 +313,6 @@ describe('DynamicRewardModule', () => { (validatorsMethod.getGeneratorsBetweenTimestamps as jest.Mock).mockResolvedValue( generatorMap, ); - (posMethod.getNumberOfActiveValidators as jest.Mock).mockReturnValue(activeValidator); when(tokenMethod.userSubstoreExists) .calledWith( expect.anything(), diff --git a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts index 115cc738f60..5017a38fc8a 100644 --- a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts +++ b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts @@ -22,7 +22,6 @@ import { Transaction, CommandVerifyContext, ChainAccount, - CCMsg, } from '../../../../src'; import { BaseCCCommand } from '../../../../src/modules/interoperability/base_cc_command'; import { BaseCrossChainUpdateCommand } from '../../../../src/modules/interoperability/base_cross_chain_update_command'; @@ -36,6 +35,7 @@ import { MODULE_NAME_INTEROPERABILITY, EMPTY_BYTES, EmptyCCM, + EVENT_TOPIC_CCM_EXECUTION, } from '../../../../src/modules/interoperability/constants'; import { CCMProcessedCode, @@ -65,7 +65,7 @@ import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_ import { ChainValidatorsStore } from '../../../../src/modules/interoperability/stores/chain_validators'; import { ChannelDataStore } from '../../../../src/modules/interoperability/stores/channel_data'; import { MainchainInteroperabilityInternalMethod } from '../../../../src/modules/interoperability/mainchain/internal_method'; -import { getMainchainID } from '../../../../src/modules/interoperability/utils'; +import { getIDFromCCMBytes, getMainchainID } from '../../../../src/modules/interoperability/utils'; import { BaseInteroperabilityInternalMethod } from '../../../../src/modules/interoperability/base_interoperability_internal_methods'; import { CROSS_CHAIN_COMMAND_NAME_TRANSFER } from '../../../../src/modules/token/constants'; import { @@ -73,6 +73,7 @@ import { OwnChainAccount, } from '../../../../src/modules/interoperability/stores/own_chain_account'; import { createStoreGetter } from '../../../../src/testing/utils'; +import { EVENT_TOPIC_TRANSACTION_EXECUTION } from '../../../../src/state_machine/constants'; class CrossChainUpdateCommand extends BaseCrossChainUpdateCommand { // eslint-disable-next-line @typescript-eslint/require-await @@ -93,7 +94,6 @@ describe('BaseCrossChainUpdateCommand', () => { senderPublicKey, signatures: [], }; - const minReturnFeePerByte = BigInt(10000000); const certificate = codec.encode(certificateSchema, { blockID: utils.getRandomBytes(32), @@ -255,7 +255,7 @@ describe('BaseCrossChainUpdateCommand', () => { command.init( { getMessageFeeTokenID: jest.fn().mockResolvedValue(messageFeeTokenID), - getMinReturnFeePerByte: jest.fn().mockResolvedValue(minReturnFeePerByte), + getMinReturnFeePerByte: jest.fn().mockResolvedValue(BigInt(10000000)), } as any, { initializeUserAccount: jest.fn(), @@ -322,19 +322,6 @@ describe('BaseCrossChainUpdateCommand', () => { .set(stateStore, params.sendingChainID, chainAccount); }); - it('should reject when ccu params validation fails', async () => { - const nonBufferSendingChainID = 2; - verifyContext = { - ...verifyContext, - params: { ...params, sendingChainID: nonBufferSendingChainID } as any, - }; - - // 2nd param `isMainchain` could be false - await expect(command['verifyCommon'](verifyContext, false)).rejects.toThrow( - `Property '.sendingChainID' should pass "dataType" keyword validation`, - ); - }); - it('should call validator.validate with crossChainUpdateTransactionParams schema', async () => { jest.spyOn(validator, 'validate'); @@ -528,11 +515,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); }); - // CAUTION! - // tests should be written/executed as per `BaseCrossChainUpdateCommand::beforeCrossChainMessagesExecution`, - // otherwise, they can fail due to some other check - // also, we can simplify test cases by giving only one CCM to params.inboxUpdate.crossChainMessages array - describe('beforeCrossChainMessagesExecution', () => { + describe('verifyCertificateSignatureAndPartnerChainOutboxRoot', () => { let executeContext: CommandExecuteContext; let stateStore: PrefixedStateReadWriter; @@ -563,21 +546,15 @@ describe('BaseCrossChainUpdateCommand', () => { it('should verify certificate signature', async () => { await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([ - expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), - true, - ]); + command['verifyCertificateSignatureAndPartnerChainOutboxRoot'](executeContext), + ).resolves.toBeUndefined(); expect(internalMethod.verifyCertificateSignature).toHaveBeenCalledTimes(1); }); it('should initialize user account for message fee token ID when inboxUpdate is not empty', async () => { await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([ - expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), - true, - ]); + command['verifyCertificateSignatureAndPartnerChainOutboxRoot'](executeContext), + ).resolves.toBeUndefined(); expect(command['_interopsMethod'].getMessageFeeTokenID).toHaveBeenCalledWith( expect.anything(), params.sendingChainID, @@ -591,28 +568,15 @@ describe('BaseCrossChainUpdateCommand', () => { it('should resolve empty ccm with false result when verifyPartnerChainOutboxRoot fails', async () => { (command['internalMethod'].verifyPartnerChainOutboxRoot as jest.Mock).mockRejectedValue( - new Error('invalid root'), + new Error('Inbox root does not match partner chain outbox root.'), ); await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([[], false]); + command['verifyCertificateSignatureAndPartnerChainOutboxRoot'](executeContext), + ).rejects.toThrow('Inbox root does not match partner chain outbox root.'); expect(command['_interopsMethod'].getMessageFeeTokenID).not.toHaveBeenCalled(); }); - it('should verifyPartnerChainOutboxRoot when inboxUpdate is not empty', async () => { - await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([ - expect.toBeArrayOfSize(params.inboxUpdate.crossChainMessages.length), - true, - ]); - expect(command['internalMethod'].verifyPartnerChainOutboxRoot).toHaveBeenCalledWith( - expect.anything(), - params, - ); - }); - it('should not initialize user account for message fee token ID when inboxUpdate is empty', async () => { executeContext = createTransactionContext({ chainID, @@ -635,13 +599,68 @@ describe('BaseCrossChainUpdateCommand', () => { }).createCommandExecuteContext(command.schema); await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([[], true]); + command['verifyCertificateSignatureAndPartnerChainOutboxRoot'](executeContext), + ).resolves.toBeUndefined(); expect(command['_interopsMethod'].getMessageFeeTokenID).not.toHaveBeenCalled(); expect(command['_tokenMethod'].initializeUserAccount).not.toHaveBeenCalled(); }); + it('should verifyPartnerChainOutboxRoot when inboxUpdate is not empty', async () => { + await expect( + command['verifyCertificateSignatureAndPartnerChainOutboxRoot'](executeContext), + ).resolves.toBeUndefined(); + expect(command['internalMethod'].verifyPartnerChainOutboxRoot).toHaveBeenCalledWith( + expect.anything(), + params, + ); + }); + }); + + // CAUTION! + // tests should be written/executed as per `BaseCrossChainUpdateCommand::beforeCrossChainMessagesExecution`, + // otherwise, they can fail due to some other check + // also, we can simplify test cases by giving only one CCM to params.inboxUpdate.crossChainMessages array + describe('beforeCrossChainMessagesExecution', () => { + let executeContext: CommandExecuteContext; + let stateStore: PrefixedStateReadWriter; + + beforeEach(async () => { + stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); + + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, params), + }), + }).createCommandExecuteContext(command.schema); + jest.spyOn(interopsModule.events.get(CcmProcessedEvent), 'log'); + await interopsModule.stores + .get(ChainAccountStore) + .set(stateStore, defaultSendingChainID, partnerChainAccount); + await interopsModule.stores.get(ChainValidatorsStore).set(stateStore, defaultSendingChainID, { + activeValidators, + certificateThreshold: params.certificateThreshold, + }); + await interopsModule.stores + .get(ChannelDataStore) + .set(stateStore, defaultSendingChainID, partnerChannel); + }); + it('should terminate the chain and add an event when fails with ccm decoding', async () => { + const invalidCCM = { + crossChainCommand: '', + fee: BigInt(0), + module: '', + nonce: BigInt(0), + params: EMPTY_BYTES, + receivingChainID: EMPTY_BYTES, + sendingChainID: EMPTY_BYTES, + // status: 0 INTENTIONALLY skipped to cause INVALID_CCM_DECODING_EXCEPTION exception + }; + executeContext = createTransactionContext({ chainID, stateStore, @@ -652,18 +671,7 @@ describe('BaseCrossChainUpdateCommand', () => { ...params, inboxUpdate: { ...params.inboxUpdate, - crossChainMessages: [ - codec.encode(ccmSchema, { - crossChainCommand: '', - fee: BigInt(0), - module: '', - nonce: BigInt(0), - params: EMPTY_BYTES, - receivingChainID: EMPTY_BYTES, - sendingChainID: EMPTY_BYTES, - // status: 0 INTENTIONALLY skipped to cause INVALID_CCM_DECODING_EXCEPTION exception - }), - ], + crossChainMessages: [codec.encode(ccmSchema, invalidCCM)], }, }), }), @@ -676,8 +684,12 @@ describe('BaseCrossChainUpdateCommand', () => { expect.anything(), params.sendingChainID, ); + const invalidCCMID = getIDFromCCMBytes(codec.encode(ccmSchema, invalidCCM)); + const ccmEventQueue = executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, invalidCCMID]), + ); expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, + { ...executeContext, eventQueue: ccmEventQueue }, executeContext.params.sendingChainID, executeContext.chainID, { @@ -716,9 +728,12 @@ describe('BaseCrossChainUpdateCommand', () => { expect.anything(), params.sendingChainID, ); - + const ccmID = getIDFromCCMBytes(codec.encode(ccmSchema, EmptyCCM)); + const ccmEventQueue = executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ); expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, + { ...executeContext, eventQueue: ccmEventQueue }, executeContext.params.sendingChainID, EmptyCCM.receivingChainID, { @@ -729,9 +744,7 @@ describe('BaseCrossChainUpdateCommand', () => { ); }); - it('should call _verifyRoutingRules', async () => { - jest.spyOn(command, '_verifyRoutingRules' as any); - + it('should terminate the chain and add an event when CCM sending chain and ccu sending chain is not the same', async () => { const ccm = { crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, fee: BigInt(0), @@ -765,20 +778,18 @@ describe('BaseCrossChainUpdateCommand', () => { await expect( command['beforeCrossChainMessagesExecution'](executeContext, true), ).resolves.toEqual([[], false]); - expect(command['_verifyRoutingRules']).toHaveBeenCalledTimes(1); - }); - - const routingRulesCommonExpects = ( - cmdExecuteContext: CommandExecuteContext, - ccm: CCMsg, - ) => { expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( expect.anything(), - cmdExecuteContext.params.sendingChainID, + params.sendingChainID, + ); + + const ccmID = getIDFromCCMBytes(codec.encode(ccmSchema, ccm)); + const ccmEventQueue = executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), ); expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - cmdExecuteContext, - cmdExecuteContext.params.sendingChainID, + { ...executeContext, eventQueue: ccmEventQueue }, + executeContext.params.sendingChainID, ccm.receivingChainID, { ccm, @@ -786,10 +797,12 @@ describe('BaseCrossChainUpdateCommand', () => { code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, }, ); - }; + }); - describe('_verifyRoutingRules', () => { - const routingRulesCCM = { + // Sending and receiving chains must differ. + it('should terminate the chain and add an event when receiving chain is the same as sending chain', async () => { + const sendingChainID = chainID; + const ccm = { crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, fee: BigInt(0), module: MODULE_NAME_INTEROPERABILITY, @@ -802,143 +815,150 @@ describe('BaseCrossChainUpdateCommand', () => { status: CCMStatusCode.OK, }; - // Sending and receiving chains must differ - it('should terminate the chain and add an event when receiving chain is the same as sending chain', async () => { - const ccm = routingRulesCCM; - const sendingChainID = chainID; - - executeContext = createTransactionContext({ - chainID: sendingChainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, - // this is needed to pass `!ccm.sendingChainID.equals(params.sendingChainID)` check (previous test) - sendingChainID, - }), - }), - }).createCommandExecuteContext(command.schema); - - await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([[], false]); - - routingRulesCommonExpects(executeContext, ccm); - }); - - it('should terminate the chain and add an event when CCM sending chain and ccu sending chain is not the same', async () => { - const ccm = { - ...routingRulesCCM, - // this will fail for `!ccm.sendingChainID.equals(params.sendingChainID)` - // params.sendingChainID is `defaultSendingChainID` (line 158) - sendingChainID: Buffer.from([1, 2, 3, 4]), - }; - - executeContext = createTransactionContext({ - chainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, - }), + executeContext = createTransactionContext({ + chainID: sendingChainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, + // this is needed to pass `!ccm.sendingChainID.equals(params.sendingChainID)` check (previous test) + sendingChainID, }), - }).createCommandExecuteContext(command.schema); + }), + }).createCommandExecuteContext(command.schema); - await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); + expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( + expect.anything(), + sendingChainID, + ); + const ccmID = getIDFromCCMBytes(codec.encode(ccmSchema, ccm)); + const ccmEventQueue = executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ); + expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( + { ...executeContext, eventQueue: ccmEventQueue }, + executeContext.params.sendingChainID, + ccm.receivingChainID, + { + ccm, + result: CCMProcessedResult.DISCARDED, + code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, + }, + ); + }); - routingRulesCommonExpects(executeContext, ccm); - }); + it('should terminate the chain and add an event when CCM is not directed to the sidechain', async () => { + const ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, + fee: BigInt(0), + module: MODULE_NAME_INTEROPERABILITY, + nonce: BigInt(1), + params: utils.getRandomBytes(10), + // will fail for `!context.chainID.equals(ccm.receivingChainID)` + receivingChainID: Buffer.from([0, 0, 3, 0]), + sendingChainID: defaultSendingChainID, + status: CCMStatusCode.OK, + }; - it('should reject with terminate the chain and add an event when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is true', async () => { - const ccm = { - ...routingRulesCCM, - // must be same as `context.chainID` to pass `!context.chainID.equals(ccm.receivingChainID)` - receivingChainID: chainID, - // must be same as defaultSendingChainID to pass `!ccm.sendingChainID.equals(params.sendingChainID)` - sendingChainID: defaultSendingChainID, - // will fail for `CCMStatusCode.CHANNEL_UNAVAILABLE` - status: CCMStatusCode.CHANNEL_UNAVAILABLE, - }; - - executeContext = createTransactionContext({ - chainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, - }), + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, }), - }).createCommandExecuteContext(command.schema); + }), + }).createCommandExecuteContext(command.schema); - await expect( - command['beforeCrossChainMessagesExecution'](executeContext, true), - ).resolves.toEqual([[], false]); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, false), + ).resolves.toEqual([[], false]); + expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( + expect.anything(), + params.sendingChainID, + ); + const ccmID = getIDFromCCMBytes(codec.encode(ccmSchema, ccm)); + const ccmEventQueue = executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ); + expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( + { ...executeContext, eventQueue: ccmEventQueue }, + executeContext.params.sendingChainID, + ccm.receivingChainID, + { + ccm, + result: CCMProcessedResult.DISCARDED, + code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, + }, + ); + }); - routingRulesCommonExpects(executeContext, ccm); - }); + it('should reject with terminate the chain and add an event when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is true', async () => { + const ccm = { + crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, + fee: BigInt(0), + module: MODULE_NAME_INTEROPERABILITY, + nonce: BigInt(1), + params: utils.getRandomBytes(10), + // must be same as `context.chainID` to pass `!context.chainID.equals(ccm.receivingChainID)` + receivingChainID: chainID, + // must be same as defaultSendingChainID to pass `!ccm.sendingChainID.equals(params.sendingChainID)` + sendingChainID: defaultSendingChainID, + // will fail for `CCMStatusCode.CHANNEL_UNAVAILABLE` + status: CCMStatusCode.CHANNEL_UNAVAILABLE, + }; - it('should terminate the chain and add an event when CCM is not directed to the sidechain', async () => { - const ccm = { - ...routingRulesCCM, - // will fail for `!context.chainID.equals(ccm.receivingChainID)` - receivingChainID: Buffer.from([0, 0, 3, 0]), - sendingChainID: defaultSendingChainID, - }; - - executeContext = createTransactionContext({ - chainID, - stateStore, - transaction: new Transaction({ - ...defaultTransaction, - command: command.name, - params: codec.encode(crossChainUpdateTransactionParams, { - ...params, - inboxUpdate: { - ...params.inboxUpdate, - crossChainMessages: [codec.encode(ccmSchema, ccm)], - }, - }), + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: command.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + inboxUpdate: { + ...params.inboxUpdate, + crossChainMessages: [codec.encode(ccmSchema, ccm)], + }, }), - }).createCommandExecuteContext(command.schema); + }), + }).createCommandExecuteContext(command.schema); - await expect( - command['beforeCrossChainMessagesExecution'](executeContext, false), - ).resolves.toEqual([[], false]); - expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( - expect.anything(), - params.sendingChainID, - ); - expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( - executeContext, - executeContext.params.sendingChainID, - ccm.receivingChainID, - { - ccm, - result: CCMProcessedResult.DISCARDED, - code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, - }, - ); - }); + await expect( + command['beforeCrossChainMessagesExecution'](executeContext, true), + ).resolves.toEqual([[], false]); + expect(internalMethod.terminateChainInternal).toHaveBeenCalledWith( + expect.anything(), + params.sendingChainID, + ); + expect(command['events'].get(CcmProcessedEvent).log).toHaveBeenCalledWith( + expect.anything(), + executeContext.params.sendingChainID, + ccm.receivingChainID, + { + ccm, + result: CCMProcessedResult.DISCARDED, + code: CCMProcessedCode.INVALID_CCM_ROUTING_EXCEPTION, + }, + ); + expect(executeContext.eventQueue['_defaultTopics'][0]).toEqual( + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, executeContext.transaction.id]), + ); }); it('should resolve when ccm status is CCMStatusCode.CHANNEL_UNAVAILABLE and mainchain is false', async () => { @@ -978,7 +998,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); }); - describe('afterCrossChainMessagesExecution', () => { + describe('afterCrossChainMessagesExecute', () => { let executeContext: CommandExecuteContext; let chainValidatorsStore: ChainValidatorsStore; @@ -1023,7 +1043,7 @@ describe('BaseCrossChainUpdateCommand', () => { } as any); await expect( - command['afterCrossChainMessagesExecution'](executeContext), + command['afterCrossChainMessagesExecute'](executeContext), ).resolves.toBeUndefined(); expect(command['internalMethod'].updateValidators).toHaveBeenCalledWith( expect.anything(), @@ -1034,7 +1054,7 @@ describe('BaseCrossChainUpdateCommand', () => { it('should update validators if activeValidatorsUpdate is empty but params.certificateThreshold !== sendingChainValidators.certificateThreshold', async () => { executeContext.params.activeValidatorsUpdate.bftWeightsUpdateBitmap = EMPTY_BUFFER; await expect( - command['afterCrossChainMessagesExecution'](executeContext), + command['afterCrossChainMessagesExecute'](executeContext), ).resolves.toBeUndefined(); expect(command['internalMethod'].updateValidators).toHaveBeenCalledWith( @@ -1046,7 +1066,7 @@ describe('BaseCrossChainUpdateCommand', () => { it('should not update certificate and updatePartnerChainOutboxRoot if certificate is empty', async () => { executeContext.params.certificate = EMPTY_BYTES; await expect( - command['afterCrossChainMessagesExecution'](executeContext), + command['afterCrossChainMessagesExecute'](executeContext), ).resolves.toBeUndefined(); expect(command['internalMethod'].updateCertificate).not.toHaveBeenCalled(); expect(command['internalMethod'].updatePartnerChainOutboxRoot).not.toHaveBeenCalled(); @@ -1062,7 +1082,7 @@ describe('BaseCrossChainUpdateCommand', () => { }, }; await expect( - command['afterCrossChainMessagesExecution'](executeContext), + command['afterCrossChainMessagesExecute'](executeContext), ).resolves.toBeUndefined(); expect(command['internalMethod'].updatePartnerChainOutboxRoot).not.toHaveBeenCalled(); @@ -1551,7 +1571,6 @@ describe('BaseCrossChainUpdateCommand', () => { describe('bounce', () => { const ccmStatus = CCMStatusCode.MODULE_NOT_SUPPORTED; const ccmProcessedEventCode = CCMProcessedCode.MODULE_NOT_SUPPORTED; - const ccmSize = 100; let stateStore: PrefixedStateReadWriter; beforeEach(async () => { @@ -1573,7 +1592,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1590,18 +1609,17 @@ describe('BaseCrossChainUpdateCommand', () => { }); it('should log event when ccm.fee is less than min fee', async () => { - const minFee = minReturnFeePerByte * BigInt(ccmSize); context = createCrossChainMessageContext({ ccm: { ...defaultCCM, status: CCMStatusCode.OK, - fee: minFee - BigInt(1), + fee: BigInt(1), }, stateStore, }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1631,7 +1649,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1667,7 +1685,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1697,7 +1715,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1724,7 +1742,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(2); diff --git a/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts b/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts index 3db753832a2..b753100173a 100644 --- a/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts +++ b/framework/test/unit/modules/interoperability/base_interoperability_module.spec.ts @@ -19,7 +19,6 @@ import { ChainStatus, EMPTY_BYTES, MainchainInteroperabilityModule, - getMainchainID, } from '../../../../src'; import { MAX_NUM_VALIDATORS, @@ -43,12 +42,9 @@ import { InvalidCertificateSignatureEvent } from '../../../../src/modules/intero import { InvalidRegistrationSignatureEvent } from '../../../../src/modules/interoperability/events/invalid_registration_signature'; import { TerminatedOutboxCreatedEvent } from '../../../../src/modules/interoperability/events/terminated_outbox_created'; import { TerminatedStateCreatedEvent } from '../../../../src/modules/interoperability/events/terminated_state_created'; -import { InvalidRMTVerification } from '../../../../src/modules/interoperability/events/invalid_rmt_verification'; -import { InvalidSMTVerification } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; -import { - ChainInfo, - TerminatedStateAccountWithChainID, -} from '../../../../src/modules/interoperability/types'; +import { InvalidRMTVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_rmt_verification'; +import { InvalidSMTVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; +// import { ChainInfo, TerminatedStateAccountWithChainID } from '../../../../src/modules/interoperability/types'; describe('initGenesisState Common Tests', () => { const chainID = Buffer.from([0, 0, 0, 0]); @@ -100,8 +96,8 @@ describe('initGenesisState Common Tests', () => { expect(interopMod.events.get(TerminatedStateCreatedEvent)).toBeDefined(); expect(interopMod.events.get(TerminatedOutboxCreatedEvent)).toBeDefined(); expect(interopMod.events.get(InvalidCertificateSignatureEvent)).toBeDefined(); - expect(interopMod.events.get(InvalidRMTVerification)).toBeDefined(); - expect(interopMod.events.get(InvalidSMTVerification)).toBeDefined(); + expect(interopMod.events.get(InvalidRMTVerificationEvent)).toBeDefined(); + expect(interopMod.events.get(InvalidSMTVerificationEvent)).toBeDefined(); }); }); @@ -493,7 +489,7 @@ must NOT have more than ${MAX_NUM_VALIDATORS} items`, }); }); - describe('_verifyTerminatedStateAccountsCommon', () => { + describe('_verifyTerminatedStateAccountsIDs', () => { certificateThreshold = BigInt(10); const validChainInfos = [ { @@ -581,46 +577,6 @@ must NOT have more than ${MAX_NUM_VALIDATORS} items`, 'terminatedStateAccounts must be ordered lexicographically by chainID.', ); }); - - it('should call _verifyChainID the same number of times as size of terminatedStateAccounts', () => { - const interopModPrototype = Object.getPrototypeOf(interopMod); - jest.spyOn(interopModPrototype, '_verifyChainID' as any); - - const chainInfoLocal: ChainInfo = { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }; - - const terminatedStateAccounts: TerminatedStateAccountWithChainID[] = [ - { - chainID: chainInfoLocal.chainID, - terminatedStateAccount, - }, - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedStateAccount, - }, - ]; - - interopModPrototype._verifyTerminatedStateAccountsCommon( - terminatedStateAccounts, - getMainchainID(params.chainID as Buffer), - ); - expect(interopModPrototype['_verifyChainID']).toHaveBeenCalledTimes( - terminatedStateAccounts.length, - ); - }); }); describe('processGenesisState', () => { diff --git a/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts b/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts index 24c9af6a3c9..069aa5407cb 100644 --- a/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts +++ b/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts @@ -40,7 +40,7 @@ import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_ import { createTransactionContext } from '../../../../src/testing'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { createStoreGetter } from '../../../../src/testing/utils'; -import { InvalidSMTVerification } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; +import { InvalidSMTVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; import { computeStorePrefix } from '../../../../src/modules/base_store'; describe('RecoverStateCommand', () => { @@ -190,7 +190,7 @@ describe('RecoverStateCommand', () => { }); it('should return error if proof of inclusion is not valid', async () => { - const invalidSMTVerificationEvent = interopMod.events.get(InvalidSMTVerification); + const invalidSMTVerificationEvent = interopMod.events.get(InvalidSMTVerificationEvent); jest.spyOn(SparseMerkleTree.prototype, 'verify').mockResolvedValue(false); jest.spyOn(invalidSMTVerificationEvent, 'error'); diff --git a/framework/test/unit/modules/interoperability/internal_method.spec.ts b/framework/test/unit/modules/interoperability/internal_method.spec.ts index a7acb99820c..739c6b3060a 100644 --- a/framework/test/unit/modules/interoperability/internal_method.spec.ts +++ b/framework/test/unit/modules/interoperability/internal_method.spec.ts @@ -21,19 +21,26 @@ import { validator } from '@liskhq/lisk-validator'; import { BLS_PUBLIC_KEY_LENGTH, BLS_SIGNATURE_LENGTH, + CCMStatusCode, + CROSS_CHAIN_COMMAND_REGISTRATION, EMPTY_BYTES, EMPTY_HASH, HASH_LENGTH, + MAX_UINT64, MESSAGE_TAG_CERTIFICATE, MIN_RETURN_FEE_PER_BYTE_BEDDOWS, + MODULE_NAME_INTEROPERABILITY, } from '../../../../src/modules/interoperability/constants'; import { MainchainInteroperabilityInternalMethod } from '../../../../src/modules/interoperability/mainchain/internal_method'; import * as utils from '../../../../src/modules/interoperability/utils'; -import { MainchainInteroperabilityModule, testing } from '../../../../src'; import { CrossChainUpdateTransactionParams, + MainchainInteroperabilityModule, + Transaction, + testing, + CCMsg, OwnChainAccount, -} from '../../../../src/modules/interoperability/types'; +} from '../../../../src'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { ChannelDataStore } from '../../../../src/modules/interoperability/stores/channel_data'; @@ -48,10 +55,10 @@ import { import { ChainAccountStore } from '../../../../src/modules/interoperability/stores/chain_account'; import { TerminatedStateStore } from '../../../../src/modules/interoperability/stores/terminated_state'; import { StoreGetter } from '../../../../src/modules/base_store'; -import { MethodContext } from '../../../../src/state_machine'; +import { CommandExecuteContext, EventQueue, MethodContext } from '../../../../src/state_machine'; import { ChainAccountUpdatedEvent } from '../../../../src/modules/interoperability/events/chain_account_updated'; import { TerminatedStateCreatedEvent } from '../../../../src/modules/interoperability/events/terminated_state_created'; -import { createTransientMethodContext } from '../../../../src/testing'; +import { createTransactionContext, createTransientMethodContext } from '../../../../src/testing'; import { ChainValidatorsStore } from '../../../../src/modules/interoperability/stores/chain_validators'; import { certificateSchema, @@ -62,6 +69,13 @@ import { Certificate } from '../../../../src/engine/consensus/certificate_genera import { TerminatedOutboxCreatedEvent } from '../../../../src/modules/interoperability/events/terminated_outbox_created'; import { createStoreGetter } from '../../../../src/testing/utils'; import { InvalidCertificateSignatureEvent } from '../../../../src/modules/interoperability/events/invalid_certificate_signature'; +import { EVENT_TOPIC_TRANSACTION_EXECUTION } from '../../../../src/state_machine/constants'; +import { InvalidOutboxRootVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_outbox_root_verification'; +import { + ccmSchema, + crossChainUpdateTransactionParams, +} from '../../../../src/modules/interoperability/schemas'; +import { InvalidSMTVerificationEvent } from '../../../../src/modules/interoperability/events/invalid_smt_verification'; describe('Base interoperability internal method', () => { const interopMod = new MainchainInteroperabilityModule(); @@ -149,7 +163,7 @@ describe('Base interoperability internal method', () => { siblingHashes: [], }, }, - certificateThreshold: BigInt(99), + certificateThreshold: BigInt(9), sendingChainID: cryptoUtils.getRandomBytes(4), }; let mainchainInteroperabilityInternalMethod: MainchainInteroperabilityInternalMethod; @@ -189,7 +203,15 @@ describe('Base interoperability internal method', () => { interopMod.events, new Map(), ); - methodContext = createTransientMethodContext({ stateStore }); + const defaultTopic = Buffer.concat([ + EVENT_TOPIC_TRANSACTION_EXECUTION, + cryptoUtils.hash(cryptoUtils.getRandomBytes(1)), + ]); + methodContext = createTransientMethodContext({ + stateStore, + eventQueue: new EventQueue(0, [], [defaultTopic]), + }); + // Adding transaction ID as default topic storeContext = createStoreGetter(stateStore); await channelDataSubstore.set(methodContext, chainID, channelData); await ownChainAccountSubstore.set(methodContext, EMPTY_BYTES, ownChainAccount); @@ -925,6 +947,134 @@ describe('Base interoperability internal method', () => { ).rejects.toThrow('New validators must have a positive BFT weight.'); }); + it('should reject if new active validator bft weight equals 0', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + const newValidators = [ + { blsKey: Buffer.from([0, 0, 0, 0]), bftWeight: BigInt(1) }, + { blsKey: Buffer.from([0, 0, 0, 1]), bftWeight: BigInt(3) }, + { blsKey: Buffer.from([0, 0, 2, 0]), bftWeight: BigInt(0) }, + ]; + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue(newValidators); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow('Validator bft weight must be positive integer.'); + }); + + it(`should reject if total bft weight > ${MAX_UINT64}`, async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + const newValidators = [ + { blsKey: Buffer.from([0, 0, 0, 0]), bftWeight: BigInt(1) }, + { blsKey: Buffer.from([0, 0, 0, 1]), bftWeight: BigInt(3) }, + { blsKey: Buffer.from([0, 0, 2, 0]), bftWeight: MAX_UINT64 }, + ]; + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue(newValidators); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow('Total BFT weight exceeds maximum value.'); + }); + + it('should reject if certificate threshold is too small', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + const newValidators = [ + { blsKey: Buffer.from([0, 0, 0, 0]), bftWeight: BigInt(1000000000000) }, + { blsKey: Buffer.from([0, 0, 0, 1]), bftWeight: BigInt(1000000000000) }, + { blsKey: Buffer.from([0, 0, 2, 0]), bftWeight: BigInt(1000000000000) }, + ]; + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue(newValidators); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow('Certificate threshold is too small.'); + }); + + it('should reject if certificate threshold is too large', async () => { + const ccu = { + ...ccuParams, + certificate: codec.encode(certificateSchema, defaultCertificate), + activeValidatorsUpdate: { + blsKeysUpdate: [ + Buffer.from([0, 0, 0, 0]), + Buffer.from([0, 0, 0, 1]), + Buffer.from([0, 0, 3, 0]), + ], + bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4)], + // 7 corresponds to 0111 + bftWeightsUpdateBitmap: Buffer.from([7]), + }, + }; + const existingKey = Buffer.from([0, 2, 3, 0]); + await chainValidatorsSubstore.set(methodContext, ccu.sendingChainID, { + activeValidators: [{ blsKey: existingKey, bftWeight: BigInt(2) }], + certificateThreshold: BigInt(1), + }); + const newValidators = [ + { blsKey: Buffer.from([0, 0, 0, 0]), bftWeight: BigInt(1) }, + { blsKey: Buffer.from([0, 0, 0, 1]), bftWeight: BigInt(1) }, + { blsKey: Buffer.from([0, 0, 2, 0]), bftWeight: BigInt(1) }, + ]; + jest.spyOn(utils, 'calculateNewActiveValidators').mockReturnValue(newValidators); + + await expect( + mainchainInteroperabilityInternalMethod.verifyValidatorsUpdate(methodContext, ccu), + ).rejects.toThrow('Certificate threshold is too large.'); + }); + it('should reject if new validatorsHash does not match with certificate', async () => { const ccu = { ...ccuParams, @@ -1326,56 +1476,144 @@ describe('Base interoperability internal method', () => { }); describe('verifyPartnerChainOutboxRoot', () => { - const encodedCertificate = codec.encode(certificateSchema, defaultCertificate); - const txParams: CrossChainUpdateTransactionParams = { - certificate: encodedCertificate, - activeValidatorsUpdate: { - blsKeysUpdate: [], - bftWeightsUpdate: [], - bftWeightsUpdateBitmap: Buffer.from([]), + const certificate: Certificate = { + blockID: cryptography.utils.getRandomBytes(HASH_LENGTH), + height: 21, + timestamp: Math.floor(Date.now() / 1000), + stateRoot: cryptoUtils.getRandomBytes(HASH_LENGTH), + validatorsHash: cryptography.utils.getRandomBytes(HASH_LENGTH), + aggregationBits: cryptography.utils.getRandomBytes(1), + signature: cryptography.utils.getRandomBytes(BLS_SIGNATURE_LENGTH), + }; + const encodedDefaultCertificate = codec.encode(certificateSchema, { + ...certificate, + }); + // const txParams: CrossChainUpdateTransactionParams = { + // certificate: encodedDefaultCertificate, + // activeValidatorsUpdate: { + // blsKeysUpdate: [], + // bftWeightsUpdate: [], + // bftWeightsUpdateBitmap: Buffer.from([]), + // }, + // certificateThreshold: BigInt(10), + // sendingChainID: cryptoUtils.getRandomBytes(4), + // inboxUpdate: { + // crossChainMessages: [], + // messageWitnessHashes: [], + // outboxRootWitness: { + // bitmap: cryptoUtils.getRandomBytes(4), + // siblingHashes: [cryptoUtils.getRandomBytes(32)], + // }, + // }, + // }; + // const chainID = Buffer.alloc(4, 0); + const senderPublicKey = cryptoUtils.getRandomBytes(32); + const defaultTransaction = { + fee: BigInt(0), + module: interopMod.name, + nonce: BigInt(1), + senderPublicKey, + signatures: [], + }; + + const defaultSendingChainID = 20; + const defaultSendingChainIDBuffer = cryptoUtils.intToBuffer(defaultSendingChainID, 4); + const defaultCCMs: CCMsg[] = [ + { + crossChainCommand: CROSS_CHAIN_COMMAND_REGISTRATION, + fee: BigInt(0), + module: MODULE_NAME_INTEROPERABILITY, + nonce: BigInt(1), + params: Buffer.alloc(2), + receivingChainID: Buffer.from([0, 0, 0, 2]), + sendingChainID: defaultSendingChainIDBuffer, + status: CCMStatusCode.OK, }, - certificateThreshold: BigInt(10), - sendingChainID: cryptoUtils.getRandomBytes(4), - inboxUpdate: { - crossChainMessages: [], - messageWitnessHashes: [], - outboxRootWitness: { - bitmap: cryptoUtils.getRandomBytes(4), - siblingHashes: [cryptoUtils.getRandomBytes(32)], - }, + ]; + const defaultCCMsEncoded = defaultCCMs.map(ccMsg => codec.encode(ccmSchema, ccMsg)); + const defaultInboxUpdateValue = { + crossChainMessages: defaultCCMsEncoded, + messageWitnessHashes: [Buffer.alloc(32)], + outboxRootWitness: { + bitmap: Buffer.alloc(1), + siblingHashes: [Buffer.alloc(32)], }, }; + let commandExecuteContext: CommandExecuteContext; + let crossChainUpdateParams: CrossChainUpdateTransactionParams; + beforeEach(async () => { - await interopMod.stores.get(ChannelDataStore).set(methodContext, txParams.sendingChainID, { - ...channelData, - }); + crossChainUpdateParams = { + activeValidatorsUpdate: { + blsKeysUpdate: [], + bftWeightsUpdate: [], + bftWeightsUpdateBitmap: Buffer.alloc(0), + }, + certificate: encodedDefaultCertificate, + inboxUpdate: { ...defaultInboxUpdateValue }, + certificateThreshold: BigInt(20), + sendingChainID: cryptoUtils.intToBuffer(defaultSendingChainID, 4), + }; + commandExecuteContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: '', + params: codec.encode(crossChainUpdateTransactionParams, crossChainUpdateParams), + }), + }).createCommandExecuteContext(crossChainUpdateTransactionParams); + await interopMod.stores + .get(ChannelDataStore) + .set(commandExecuteContext, crossChainUpdateParams.sendingChainID, { + ...channelData, + }); + jest.spyOn(interopMod.events.get(InvalidOutboxRootVerificationEvent), 'error'); + jest.spyOn(interopMod.events.get(InvalidSMTVerificationEvent), 'error'); }); it('should reject when outboxRootWitness is empty but partnerchain outbox root does not match inboxRoot', async () => { await expect( - mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot(methodContext, { - ...txParams, - inboxUpdate: { - ...txParams.inboxUpdate, - outboxRootWitness: { - bitmap: Buffer.alloc(0), - siblingHashes: [], + mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( + commandExecuteContext as any, + { + ...crossChainUpdateParams, + inboxUpdate: { + ...crossChainUpdateParams.inboxUpdate, + outboxRootWitness: { + bitmap: Buffer.alloc(0), + siblingHashes: [], + }, }, + certificate: Buffer.alloc(0), }, - certificate: Buffer.alloc(0), - }), + ), ).rejects.toThrow('Inbox root does not match partner chain outbox root'); + + expect( + interopMod['events'].get(InvalidOutboxRootVerificationEvent).error, + ).toHaveBeenCalledWith(commandExecuteContext, crossChainUpdateParams.sendingChainID, { + inboxRoot: expect.anything(), + partnerChainOutboxRoot: channelData.partnerChainOutboxRoot, + }); }); it('should reject when certificate state root does not contain valid inclusion proof for inbox update', async () => { jest.spyOn(SparseMerkleTree.prototype, 'verify').mockResolvedValue(false); await expect( - mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot(methodContext, { - ...txParams, - }), + mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( + commandExecuteContext as any, + { + ...crossChainUpdateParams, + }, + ), ).rejects.toThrow('Invalid inclusion proof for inbox update'); + + expect(interopMod['events'].get(InvalidSMTVerificationEvent).error).toHaveBeenCalledWith( + commandExecuteContext, + ); }); it('should resolve when certificate is empty and inbox root matches partner outbox root', async () => { @@ -1385,17 +1623,21 @@ describe('Base interoperability internal method', () => { .mockReturnValue(channelData.partnerChainOutboxRoot); await expect( - mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot(methodContext, { - ...txParams, - inboxUpdate: { - ...txParams.inboxUpdate, - outboxRootWitness: { - bitmap: Buffer.alloc(0), - siblingHashes: [], + mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( + commandExecuteContext as any, + { + ...crossChainUpdateParams, + inboxUpdate: { + crossChainMessages: [], + messageWitnessHashes: [], + outboxRootWitness: { + bitmap: Buffer.alloc(0), + siblingHashes: [], + }, }, + certificate: Buffer.alloc(0), }, - certificate: Buffer.alloc(0), - }), + ), ).resolves.toBeUndefined(); }); @@ -1405,9 +1647,12 @@ describe('Base interoperability internal method', () => { jest.spyOn(regularMerkleTree, 'calculateRootFromRightWitness').mockReturnValue(nextRoot); await expect( - mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot(methodContext, { - ...txParams, - }), + mainchainInteroperabilityInternalMethod.verifyPartnerChainOutboxRoot( + commandExecuteContext as any, + { + ...crossChainUpdateParams, + }, + ), ).resolves.toBeUndefined(); const outboxKey = Buffer.concat([ @@ -1415,15 +1660,15 @@ describe('Base interoperability internal method', () => { cryptoUtils.hash(ownChainAccount.chainID), ]); expect(SparseMerkleTree.prototype.verify).toHaveBeenCalledWith( - defaultCertificate.stateRoot, + certificate.stateRoot, [outboxKey], { - siblingHashes: txParams.inboxUpdate.outboxRootWitness.siblingHashes, + siblingHashes: crossChainUpdateParams.inboxUpdate.outboxRootWitness.siblingHashes, queries: [ { key: outboxKey, value: cryptoUtils.hash(codec.encode(outboxRootSchema, { root: nextRoot })), - bitmap: txParams.inboxUpdate.outboxRootWitness.bitmap, + bitmap: crossChainUpdateParams.inboxUpdate.outboxRootWitness.bitmap, }, ], }, diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/initialize_message_recovery.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/initialize_message_recovery.spec.ts index 3965f8a986e..e2e769f541a 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/initialize_message_recovery.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/initialize_message_recovery.spec.ts @@ -16,6 +16,7 @@ import { codec } from '@liskhq/lisk-codec'; import { utils } from '@liskhq/lisk-cryptography'; import { SparseMerkleTree } from '@liskhq/lisk-db'; import { + CommandExecuteContext, CommandVerifyContext, MainchainInteroperabilityModule, Transaction, @@ -45,6 +46,7 @@ import { TerminatedStateStore } from '../../../../../../src/modules/interoperabi import { OwnChainAccount } from '../../../../../../src/modules/interoperability/types'; import { PrefixedStateReadWriter } from '../../../../../../src/state_machine/prefixed_state_read_writer'; import { createTransactionContext, InMemoryPrefixedStateDB } from '../../../../../../src/testing'; +import { InvalidSMTVerificationEvent } from '../../../../../../src/modules/interoperability/events/invalid_smt_verification'; describe('InitializeMessageRecoveryCommand', () => { const interopMod = new MainchainInteroperabilityModule(); @@ -240,70 +242,23 @@ describe('InitializeMessageRecoveryCommand', () => { expect(result.error?.message).toInclude(`Terminated outbox account already exists.`); }); - it('should reject when proof of inclusion is not valid', async () => { - jest.spyOn(SparseMerkleTree.prototype, 'verify').mockResolvedValue(false); - - const result = await command.verify(defaultContext); - - expect(result.status).toBe(VerifyStatus.FAIL); - expect(result.error?.message).toInclude( - 'Message recovery initialization proof of inclusion is not valid.', - ); - }); - it('should resolve when ownchainID !== mainchainID', async () => { await interopMod.stores .get(OwnChainAccountStore) .set(stateStore, EMPTY_BYTES, { ...ownChainAccount, chainID: Buffer.from([2, 2, 2, 2]) }); - const queryKey = Buffer.concat([ - interopMod.stores.get(ChannelDataStore).key, - utils.hash(Buffer.from([2, 2, 2, 2])), - ]); await expect(command.verify(defaultContext)).resolves.toEqual({ status: VerifyStatus.OK }); - expect(SparseMerkleTree.prototype.verify).toHaveBeenCalledWith( - terminatedState.stateRoot, - [queryKey], - { - siblingHashes: defaultParams.siblingHashes, - queries: [ - { - key: queryKey, - value: utils.hash(defaultParams.channel), - bitmap: defaultParams.bitmap, - }, - ], - }, - ); }); it('should resolve when params is valid', async () => { - const queryKey = Buffer.concat([ - interopMod.stores.get(ChannelDataStore).key, - utils.hash(ownChainAccount.chainID), - ]); - await expect(command.verify(defaultContext)).resolves.toEqual({ status: VerifyStatus.OK }); - expect(SparseMerkleTree.prototype.verify).toHaveBeenCalledWith( - terminatedState.stateRoot, - [queryKey], - { - siblingHashes: defaultParams.siblingHashes, - queries: [ - { - key: queryKey, - value: utils.hash(defaultParams.channel), - bitmap: defaultParams.bitmap, - }, - ], - }, - ); }); }); describe('execute', () => { - it('should create terminated outbox account', async () => { - const context = createTransactionContext({ + let executeContext: CommandExecuteContext; + beforeEach(() => { + executeContext = createTransactionContext({ stateStore, transaction: new Transaction({ ...defaultTx, @@ -313,7 +268,33 @@ describe('InitializeMessageRecoveryCommand', () => { }), }), }).createCommandExecuteContext(command.schema); - await expect(command.execute(context)).resolves.toBeUndefined(); + }); + + it('should reject when proof of inclusion is not valid and log SMT verification event', async () => { + jest.spyOn(SparseMerkleTree.prototype, 'verifyInclusionProof').mockResolvedValue(false); + jest.spyOn(command['events'].get(InvalidSMTVerificationEvent), 'error'); + + await expect(command.execute(executeContext)).rejects.toThrow( + 'Message recovery initialization proof of inclusion is not valid', + ); + expect(command['events'].get(InvalidSMTVerificationEvent).error).toHaveBeenCalledOnceWith( + executeContext, + ); + }); + + it('should create terminated outbox account', async () => { + await interopMod.stores.get(TerminatedOutboxStore).set(stateStore, targetChainID, { + outboxRoot: utils.getRandomBytes(32), + outboxSize: 10, + partnerChainInboxSize: 20, + }); + jest.spyOn(SparseMerkleTree.prototype, 'verifyInclusionProof').mockResolvedValue(true); + const queryKey = Buffer.concat([ + interopMod.stores.get(ChannelDataStore).key, + utils.hash(executeContext.chainID), + ]); + + await expect(command.execute(executeContext)).resolves.toBeUndefined(); expect(command['internalMethod'].createTerminatedOutboxAccount).toHaveBeenCalledWith( expect.anything(), @@ -322,6 +303,21 @@ describe('InitializeMessageRecoveryCommand', () => { storedChannel.outbox.size, paramsChannel.inbox.size, ); + + expect(SparseMerkleTree.prototype.verifyInclusionProof).toHaveBeenCalledWith( + terminatedState.stateRoot, + [queryKey], + { + siblingHashes: defaultParams.siblingHashes, + queries: [ + { + key: queryKey, + value: utils.hash(defaultParams.channel), + bitmap: defaultParams.bitmap, + }, + ], + }, + ); }); }); }); diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/recover_message.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/recover_message.spec.ts index 016877a0af8..088fd60bf51 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/recover_message.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/recover_message.spec.ts @@ -31,6 +31,7 @@ import { CONTEXT_STORE_KEY_CCM_PROCESSING, CROSS_CHAIN_COMMAND_CHANNEL_TERMINATED, CROSS_CHAIN_COMMAND_REGISTRATION, + EVENT_TOPIC_CCM_EXECUTION, MODULE_NAME_INTEROPERABILITY, } from '../../../../../../src/modules/interoperability/constants'; import { RecoverMessageCommand } from '../../../../../../src/modules/interoperability/mainchain/commands/recover_message'; @@ -57,7 +58,7 @@ import { CCMProcessedResult, } from '../../../../../../src/modules/interoperability/events/ccm_processed'; import { CcmSendSuccessEvent } from '../../../../../../src/modules/interoperability/events/ccm_send_success'; -import { InvalidRMTVerification } from '../../../../../../src/modules/interoperability/events/invalid_rmt_verification'; +import { InvalidRMTVerificationEvent } from '../../../../../../src/modules/interoperability/events/invalid_rmt_verification'; describe('MessageRecoveryCommand', () => { const interopModule = new MainchainInteroperabilityModule(); @@ -491,7 +492,7 @@ describe('MessageRecoveryCommand', () => { jest.spyOn(command, '_forwardRecovery' as never); jest.spyOn(interopModule.stores.get(TerminatedOutboxStore), 'set'); jest.spyOn(commandExecuteContext['contextStore'], 'set'); - jest.spyOn(command['events'].get(InvalidRMTVerification), 'error'); + jest.spyOn(command['events'].get(InvalidRMTVerificationEvent), 'error'); }); it('should return error if message recovery proof of inclusion is not valid', async () => { @@ -525,7 +526,7 @@ describe('MessageRecoveryCommand', () => { await expect(command.execute(commandExecuteContext)).rejects.toThrow( 'Message recovery proof of inclusion is not valid.', ); - expect(command['events'].get(InvalidRMTVerification).error).toHaveBeenCalledWith( + expect(command['events'].get(InvalidRMTVerificationEvent).error).toHaveBeenCalledWith( commandExecuteContext, ); }); @@ -540,7 +541,9 @@ describe('MessageRecoveryCommand', () => { const ctx: CrossChainMessageContext = { ...commandExecuteContext, ccm, - eventQueue: commandExecuteContext.eventQueue.getChildQueue(utils.hash(crossChainMessage)), + eventQueue: commandExecuteContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, utils.hash(crossChainMessage)]), + ), }; expect(command['_applyRecovery']).toHaveBeenCalledWith(ctx); @@ -567,7 +570,9 @@ describe('MessageRecoveryCommand', () => { const ctx: CrossChainMessageContext = { ...commandExecuteContext, ccm, - eventQueue: commandExecuteContext.eventQueue.getChildQueue(utils.hash(crossChainMessage)), + eventQueue: commandExecuteContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, utils.hash(crossChainMessage)]), + ), }; expect(command['_forwardRecovery']).toHaveBeenCalledWith(ctx); diff --git a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts index db9fad7214a..84c3aec1b5e 100644 --- a/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/commands/submit_mainchain_cross_chain_update.spec.ts @@ -57,6 +57,7 @@ import { CROSS_CHAIN_COMMAND_REGISTRATION, CROSS_CHAIN_COMMAND_SIDECHAIN_TERMINATED, EMPTY_FEE_ADDRESS, + EVENT_TOPIC_CCM_EXECUTION, HASH_LENGTH, MIN_RETURN_FEE_PER_BYTE_BEDDOWS, MODULE_NAME_INTEROPERABILITY, @@ -537,6 +538,29 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { ); }); + it('should call panic which shutdown the application when apply fails', async () => { + const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => { + return undefined as never; + }); + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: mainchainCCUUpdateCommand.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + }), + }), + }).createCommandExecuteContext(mainchainCCUUpdateCommand.schema); + jest + .spyOn(mainchainCCUUpdateCommand, 'apply' as never) + .mockRejectedValue(new Error('Something went wrong.') as never); + await expect(mainchainCCUUpdateCommand.execute(executeContext)).resolves.toBeUndefined(); + expect(mockExit).toHaveBeenCalledWith(1); + expect(mainchainCCUUpdateCommand['apply']).toHaveBeenCalledTimes(1); + }); + it('should call apply for ccm and add to the inbox where receiving chain is the main chain', async () => { executeContext = createTransactionContext({ chainID, @@ -557,7 +581,9 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { expect(mainchainCCUUpdateCommand['apply']).toHaveBeenCalledWith({ ...executeContext, ccm: decodedCCM, - eventQueue: executeContext.eventQueue.getChildQueue(ccmID), + eventQueue: executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ), }); expect(mainchainCCUUpdateCommand['internalMethod'].appendToInboxTree).toHaveBeenCalledTimes( 3, @@ -586,7 +612,9 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { expect(mainchainCCUUpdateCommand['_forward']).toHaveBeenCalledWith({ ...executeContext, ccm: firstDecodedCCM, - eventQueue: executeContext.eventQueue.getChildQueue(firstCCMID), + eventQueue: executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, firstCCMID]), + ), }); const { ccmID: thirdCCMID, decodedCCM: thirdDecodedCCM } = getDecodedCCMAndID( params.inboxUpdate.crossChainMessages[2], @@ -594,12 +622,37 @@ describe('SubmitMainchainCrossChainUpdateCommand', () => { expect(mainchainCCUUpdateCommand['_forward']).toHaveBeenCalledWith({ ...executeContext, ccm: thirdDecodedCCM, - eventQueue: executeContext.eventQueue.getChildQueue(thirdCCMID), + eventQueue: executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, thirdCCMID]), + ), }); expect(mainchainCCUUpdateCommand['internalMethod'].appendToInboxTree).toHaveBeenCalledTimes( 3, ); }); + + it('should call panic which shutdown the application when forward fails', async () => { + const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => { + return undefined as never; + }); + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: mainchainCCUUpdateCommand.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + }), + }), + }).createCommandExecuteContext(mainchainCCUUpdateCommand.schema); + jest + .spyOn(mainchainCCUUpdateCommand, '_forward' as never) + .mockRejectedValue(new Error('Something went wrong.') as never); + await expect(mainchainCCUUpdateCommand.execute(executeContext)).resolves.toBeUndefined(); + expect(mockExit).toHaveBeenCalledWith(1); + expect(mainchainCCUUpdateCommand['_forward']).toHaveBeenCalledTimes(1); + }); }); describe('_forward', () => { diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index e5e5533ea65..3465e8c860b 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -322,6 +322,38 @@ describe('initGenesisState', () => { ].join(', ')}`, ); }); + + it('should throw if chainInfo.chainData.status === TERMINATED exists but no terminateStateAccount', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + // No terminatedStateAccount + terminatedStateAccounts: [], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.`, + ); + }); }); it('should check that _verifyChannelData is called from _verifyChainInfos', async () => { @@ -330,21 +362,235 @@ describe('initGenesisState', () => { await expect( interopMod.initGenesisState(contextWithValidValidatorsHash), ).resolves.toBeUndefined(); + }); - // must be true to pass this test - expect(interopMod['_verifyChannelData']).toHaveBeenCalled(); + it('should call _verifyTerminatedStateAccountsIDs', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); + + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); + expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); }); - it('should check that _verifyChainValidators is called from _verifyChainInfos', async () => { - jest.spyOn(interopMod, '_verifyChainValidators' as any); + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.ACTIVE, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect( - interopMod.initGenesisState(contextWithValidValidatorsHash), - ).resolves.toBeUndefined(); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, + ); + }); - // must be true to pass this test - expect(interopMod['_verifyChainValidators']).toHaveBeenCalled(); + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is REGISTERED', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, + ); }); + + it('should throw error if chainID in terminatedStateAccounts does not exist in chainInfo', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 2]), + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', + ); + }); + + it('should throw error if some stateAccount in terminatedStateAccounts have stateRoot not equal to chainData.lastCertificate.stateRoot', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + stateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), + }, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", + ); + }); + + it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + mainchainStateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), + }, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, + ); + }); + + it('should throw error if some stateAccount in terminatedStateAccounts is not initialized', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: validChainInfos, + terminatedStateAccounts: [ + { + chainID: Buffer.from([0, 0, 0, 1]), + terminatedStateAccount: { + ...terminatedStateAccount, + initialized: false, + }, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + 'stateAccount is not initialized.', + ); + }); + }); + + it('should check that _verifyChainValidators is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChainValidators' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + + // must be true to pass this test + expect(interopMod['_verifyChainValidators']).toHaveBeenCalled(); }); it(`should call _verifyTerminatedStateAccounts from initGenesisState`, async () => { @@ -472,7 +718,6 @@ describe('initGenesisState', () => { ...lastCertificate, validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, - status: ChainStatus.TERMINATED, }, chainValidators: { activeValidators, @@ -527,8 +772,8 @@ describe('initGenesisState', () => { ); }); - it('should call _verifyTerminatedStateAccountsCommon', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccountsCommon' as any); + it('should call _verifyTerminatedStateAccounts', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); const context = createInitGenesisStateContext( { @@ -561,7 +806,7 @@ describe('initGenesisState', () => { ); await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); - expect(interopMod['_verifyTerminatedStateAccountsCommon']).toHaveBeenCalledTimes(1); + expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); }); it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { diff --git a/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts b/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts index 1b5c68b914d..ec3ef4bce05 100644 --- a/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts @@ -50,7 +50,7 @@ import { import { createStoreGetter } from '../../../../../../src/testing/utils'; import { OwnChainAccountStore } from '../../../../../../src/modules/interoperability/stores/own_chain_account'; import { getMainchainID } from '../../../../../../src/modules/interoperability/utils'; -import { InvalidSMTVerification } from '../../../../../../src/modules/interoperability/events/invalid_smt_verification'; +import { InvalidSMTVerificationEvent } from '../../../../../../src/modules/interoperability/events/invalid_smt_verification'; describe('Sidechain InitializeStateRecoveryCommand', () => { const interopMod = new SidechainInteroperabilityModule(); @@ -368,7 +368,7 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { }); describe('execute', () => { - let invalidSMTVerificationEvent: InvalidSMTVerification; + let invalidSMTVerificationEvent: InvalidSMTVerificationEvent; beforeEach(() => { mainchainAccount = { name: 'mainchain', @@ -380,9 +380,9 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { }, status: ChainStatus.ACTIVE, }; - invalidSMTVerificationEvent = new InvalidSMTVerification(interopMod.name); + invalidSMTVerificationEvent = new InvalidSMTVerificationEvent(interopMod.name); stateRecoveryInitCommand['events'].register( - InvalidSMTVerification, + InvalidSMTVerificationEvent, invalidSMTVerificationEvent, ); }); diff --git a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts index 3550031e0e3..44fecc3766c 100644 --- a/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/commands/submit_sidechain_cross_chain_update.spec.ts @@ -47,6 +47,7 @@ import { CCMStatusCode, CROSS_CHAIN_COMMAND_REGISTRATION, CROSS_CHAIN_COMMAND_SIDECHAIN_TERMINATED, + EVENT_TOPIC_CCM_EXECUTION, MIN_RETURN_FEE_PER_BYTE_BEDDOWS, MODULE_NAME_INTEROPERABILITY, } from '../../../../../../src/modules/interoperability/constants'; @@ -380,6 +381,29 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { ); }); + it('should call panic which shutdown the application when apply fails', async () => { + const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => { + return undefined as never; + }); + executeContext = createTransactionContext({ + chainID, + stateStore, + transaction: new Transaction({ + ...defaultTransaction, + command: sidechainCCUUpdateCommand.name, + params: codec.encode(crossChainUpdateTransactionParams, { + ...params, + }), + }), + }).createCommandExecuteContext(sidechainCCUUpdateCommand.schema); + jest + .spyOn(sidechainCCUUpdateCommand, 'apply' as never) + .mockRejectedValue(new Error('Something went wrong.') as never); + await expect(sidechainCCUUpdateCommand.execute(executeContext)).resolves.toBeUndefined(); + expect(mockExit).toHaveBeenCalledWith(1); + expect(sidechainCCUUpdateCommand['apply']).toHaveBeenCalledTimes(1); + }); + it('should call apply for ccm and add to the inbox where receiving chain is the main chain', async () => { executeContext = createTransactionContext({ chainID, @@ -400,7 +424,9 @@ describe('SubmitSidechainCrossChainUpdateCommand', () => { expect(sidechainCCUUpdateCommand['apply']).toHaveBeenCalledWith({ ...executeContext, ccm: decodedCCM, - eventQueue: executeContext.eventQueue.getChildQueue(ccmID), + eventQueue: executeContext.eventQueue.getChildQueue( + Buffer.concat([EVENT_TOPIC_CCM_EXECUTION, ccmID]), + ), }); } expect(sidechainCCUUpdateCommand['internalMethod'].appendToInboxTree).toHaveBeenCalledTimes( diff --git a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts index 2c265156395..eb9b66149df 100644 --- a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts @@ -461,8 +461,8 @@ describe('initGenesisState', () => { describe('_verifyTerminatedStateAccounts', () => { const chainIDNotEqualToOwnChainID = Buffer.from([1, 3, 5, 7]); - it('should call _verifyTerminatedStateAccountsCommon', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccountsCommon' as any); + it('should call _verifyTerminatedStateAccounts', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); // const chainIDDefault = getMainchainID(chainID); const context = createInitGenesisStateContext( @@ -480,7 +480,7 @@ describe('initGenesisState', () => { ); await interopMod.initGenesisState(context); - expect(interopMod['_verifyTerminatedStateAccountsCommon']).toHaveBeenCalledTimes(1); + expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); }); it(`should throw error if stateAccount.chainID is equal to OWN_CHAIN_ID`, async () => { diff --git a/framework/test/unit/modules/interoperability/utils.spec.ts b/framework/test/unit/modules/interoperability/utils.spec.ts index 84576d455a0..b7be77d65fb 100644 --- a/framework/test/unit/modules/interoperability/utils.spec.ts +++ b/framework/test/unit/modules/interoperability/utils.spec.ts @@ -33,10 +33,7 @@ import { } from '../../../../src/modules/interoperability/types'; import { checkCertificateTimestamp, - checkCertificateValidity, - checkLivenessRequirementFirstCCU, - checkValidatorsHashWithCertificate, - computeValidatorsHash, + checkLivenessRequirement, validateFormat, verifyLivenessConditionForRegisteredChains, isValidName, @@ -61,22 +58,11 @@ describe('Utils', () => { signature: cryptography.utils.getRandomBytes(BLS_SIGNATURE_LENGTH), }; - const defaultActiveValidatorsUpdate = { - blsKeysUpdate: [ - utils.getRandomBytes(48), - utils.getRandomBytes(48), - utils.getRandomBytes(48), - utils.getRandomBytes(48), - ].sort((v1, v2) => v1.compare(v2)), - bftWeightsUpdate: [BigInt(1), BigInt(3), BigInt(4), BigInt(3)], - bftWeightsUpdateBitmap: Buffer.from([1, 0, 2]), - }; - beforeEach(() => { jest.spyOn(validator, 'validate'); }); - describe('checkLivenessRequirementFirstCCU', () => { + describe('checkLivenessRequirement', () => { const partnerChainAccount = { status: ChainStatus.REGISTERED, }; @@ -91,7 +77,7 @@ describe('Utils', () => { }; it(`should return VerifyStatus.FAIL status when chain status ${ChainStatus.REGISTERED} && certificate is empty`, () => { - const result = checkLivenessRequirementFirstCCU( + const result = checkLivenessRequirement( partnerChainAccount as ChainAccount, txParamsEmptyCertificate as CrossChainUpdateTransactionParams, ); @@ -99,7 +85,7 @@ describe('Utils', () => { }); it(`should return status VerifyStatus.OK status when chain status ${ChainStatus.REGISTERED} && certificate is non-empty`, () => { - const result = checkLivenessRequirementFirstCCU( + const result = checkLivenessRequirement( partnerChainAccount as ChainAccount, txParamsNonEmptyCertificate as CrossChainUpdateTransactionParams, ); @@ -107,74 +93,6 @@ describe('Utils', () => { }); }); - describe('checkCertificateValidity', () => { - const partnerChainAccount = { - lastCertificate: { - height: 20, - }, - }; - - const partnerChainAccountWithHigherHeight = { - lastCertificate: { - height: 40, - }, - }; - - const certificate = { - ...defaultCertificate, - }; - - const certificateWithEmptyValues = { - ...defaultCertificate, - stateRoot: EMPTY_BYTES, - validatorsHash: EMPTY_BYTES, - aggregationBits: EMPTY_BYTES, - signature: EMPTY_BYTES, - }; - - const encodedCertificate = codec.encode(certificateSchema, certificate); - const encodedWithEmptyValuesCertificate = codec.encode( - certificateSchema, - certificateWithEmptyValues, - ); - - it('should return VerifyStatus.FAIL when certificate required properties are missing', () => { - const { status, error } = checkCertificateValidity( - partnerChainAccount as ChainAccount, - encodedWithEmptyValuesCertificate, - ); - - expect(status).toEqual(VerifyStatus.FAIL); - expect(error?.message).toBe('Certificate is missing required values.'); - }); - - it('should return VerifyStatus.FAIL when certificate height is less than or equal to last certificate height', () => { - const { status, error } = checkCertificateValidity( - partnerChainAccountWithHigherHeight as ChainAccount, - encodedCertificate, - ); - - expect(status).toEqual(VerifyStatus.FAIL); - expect(error?.message).toBe( - 'Certificate height should be greater than last certificate height.', - ); - }); - - it('should return VerifyStatus.OK when certificate has all values and height greater than last certificate height', () => { - const { status, error } = checkCertificateValidity( - partnerChainAccount as ChainAccount, - encodedCertificate, - ); - - expect(status).toEqual(VerifyStatus.OK); - expect(error).toBeUndefined(); - expect(validator.validate).toHaveBeenCalledWith( - certificateSchema, - expect.toBeObject() as Certificate, - ); - }); - }); - describe('checkCertificateTimestamp', () => { const timestamp = Date.now(); const txParams: any = { @@ -207,157 +125,18 @@ describe('Utils', () => { ).toThrow('Certificate is invalid due to invalid timestamp.'); }); - it('should return undefined certificate.timestamp is less than header.timestamp', () => { - expect(checkCertificateTimestamp(txParams, certificate, header)).toBeUndefined(); - }); - }); - - describe('checkValidatorsHashWithCertificate', () => { - const activeValidatorsUpdate = { ...defaultActiveValidatorsUpdate }; - const partnerValidators: any = { - certificateThreshold: BigInt(10), - activeValidators: activeValidatorsUpdate.blsKeysUpdate.map((v, i) => ({ - blsKey: v, - bftWeight: activeValidatorsUpdate.bftWeightsUpdate[i] + BigInt(1), - })), - }; - const validatorsHash = computeValidatorsHash( - partnerValidators.activeValidators, - partnerValidators.certificateThreshold, - ); - - const certificate: Certificate = { - ...defaultCertificate, - validatorsHash, - }; - - const encodedCertificate = codec.encode(certificateSchema, certificate); - - const txParams: any = { - certificate: encodedCertificate, - activeValidatorsUpdate, - certificateThreshold: BigInt(10), - }; - - beforeEach(() => { - jest - .spyOn(interopUtils, 'calculateNewActiveValidators') - .mockReturnValue(partnerValidators.activeValidators); - }); - - it('should return VerifyStatus.FAIL when certificate is empty', () => { - const txParamsWithIncorrectHash = { ...txParams, certificate: EMPTY_BYTES }; - const { status, error } = checkValidatorsHashWithCertificate( - txParamsWithIncorrectHash, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.FAIL); - expect(error?.message).toBe( - 'Certificate cannot be empty when activeValidatorsUpdate or certificateThreshold has a non-empty value.', - ); - }); - - it('should return VerifyStatus.FAIL when certificate has missing fields', () => { - const txParamsWithIncorrectHash = { ...txParams, certificate: Buffer.alloc(2) }; - const { status, error } = checkValidatorsHashWithCertificate( - txParamsWithIncorrectHash, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.FAIL); - expect(error?.message).toBe( - 'Certificate should have all required values when activeValidatorsUpdate or certificateThreshold has a non-empty value.', - ); - }); - - it('should return VerifyStatus.FAIL when validators hash is incorrect', () => { - const certificateInvalidValidatorHash: Certificate = { - ...certificate, - validatorsHash: cryptography.utils.getRandomBytes(HASH_LENGTH), - }; - const invalidEncodedCertificate = codec.encode( - certificateSchema, - certificateInvalidValidatorHash, - ); - - const txParamsWithIncorrectHash = { ...txParams, certificate: invalidEncodedCertificate }; - const { status, error } = checkValidatorsHashWithCertificate( - txParamsWithIncorrectHash, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.FAIL); - expect(error?.message).toBe('Validators hash given in the certificate is incorrect.'); - }); - - it('should return VerifyStatus.OK when validators hash is correct', () => { - const txParamsWithCorrectHash = { ...txParams }; - const { status, error } = checkValidatorsHashWithCertificate( - txParamsWithCorrectHash, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.OK); - expect(error).toBeUndefined(); - expect(validator.validate).toHaveBeenCalledWith( - certificateSchema, - expect.toBeObject() as Certificate, - ); - }); - - it('should return VerifyStatus.OK when activeValidatorsUpdate is empty and certificateThreshold === 0', () => { - const ineligibleTxParams = { - ...txParams, - activeValidatorsUpdate: { - bftWeightsUpdate: [], - bftWeightsUpdateBitmap: Buffer.from([]), - blsKeysUpdate: [], - }, - certificateThreshold: BigInt(0), - }; - const { status, error } = checkValidatorsHashWithCertificate( - ineligibleTxParams, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.OK); - expect(error).toBeUndefined(); - }); - - it('should return VerifyStatus.OK when certificateThreshold === 0 but activeValidatorsUpdate.length > 0', () => { - const { status, error } = checkValidatorsHashWithCertificate( - { ...txParams, certificateThreshold: BigInt(0) }, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.OK); - expect(error).toBeUndefined(); - expect(validator.validate).toHaveBeenCalledWith( - certificateSchema, - expect.toBeObject() as Certificate, - ); + it('should throw error when certificate.timestamp is equal to header.timestamp', () => { + expect(() => + checkCertificateTimestamp( + txParams, + { ...certificate, timestamp: header.timestamp }, + header, + ), + ).toThrow('Certificate is invalid due to invalid timestamp.'); }); - it('should return VerifyStatus.OK when certificateThreshold > 0 but activeValidatorsUpdate is empty', () => { - const { status, error } = checkValidatorsHashWithCertificate( - { - ...txParams, - activeValidatorsUpdate: { - bftWeightsUpdate: [], - bftWeightsUpdateBitmap: Buffer.from([]), - blsKeysUpdate: [], - }, - }, - partnerValidators, - ); - - expect(status).toEqual(VerifyStatus.OK); - expect(error).toBeUndefined(); - expect(validator.validate).toHaveBeenCalledWith( - certificateSchema, - expect.toBeObject() as Certificate, - ); + it('should return undefined certificate.timestamp is less than header.timestamp', () => { + expect(checkCertificateTimestamp(txParams, certificate, header)).toBeUndefined(); }); }); diff --git a/framework/test/unit/modules/pos/endpoint.spec.ts b/framework/test/unit/modules/pos/endpoint.spec.ts index 7094bc39d80..5b9c53cd701 100644 --- a/framework/test/unit/modules/pos/endpoint.spec.ts +++ b/framework/test/unit/modules/pos/endpoint.spec.ts @@ -15,7 +15,7 @@ import { address as cryptoAddress, utils } from '@liskhq/lisk-cryptography'; import { codec } from '@liskhq/lisk-codec'; import { math } from '@liskhq/lisk-utils'; -import { defaultConfig, EMPTY_KEY } from '../../../../src/modules/pos/constants'; +import { COMMISSION, defaultConfig, EMPTY_KEY } from '../../../../src/modules/pos/constants'; import { PoSEndpoint } from '../../../../src/modules/pos/endpoint'; import { InMemoryPrefixedStateDB } from '../../../../src/testing/in_memory_prefixed_state'; import { PrefixedStateReadWriter } from '../../../../src/state_machine/prefixed_state_read_writer'; @@ -306,6 +306,7 @@ describe('PosModuleEndpoint', () => { ...defaultConfig, roundLength: defaultConfig.numberActiveValidators + defaultConfig.numberStandbyValidators, posTokenID: config.posTokenID.toString('hex'), + defaultCommission: COMMISSION, }); }); }); diff --git a/framework/test/unit/modules/pos/module.spec.ts b/framework/test/unit/modules/pos/module.spec.ts index eefd43f078f..14f92b4a5c1 100644 --- a/framework/test/unit/modules/pos/module.spec.ts +++ b/framework/test/unit/modules/pos/module.spec.ts @@ -1033,7 +1033,8 @@ describe('PoS module', () => { .mockReturnValue(missedBlocks); validatorData[missedValidatorIndex].consecutiveMissedBlocks = 50; - validatorData[missedValidatorIndex].lastGeneratedHeight = nextForgedHeight - 260000 + 5000; + validatorData[missedValidatorIndex].lastGeneratedHeight = + nextForgedHeight - defaultConfig.failSafeInactiveWindow + 5000; await validatorStore.set( createStoreGetter(stateStore), diff --git a/framework/test/unit/modules/token/cc_method.spec.ts b/framework/test/unit/modules/token/cc_method.spec.ts index 6f219684fb2..90ad6491dd0 100644 --- a/framework/test/unit/modules/token/cc_method.spec.ts +++ b/framework/test/unit/modules/token/cc_method.spec.ts @@ -19,6 +19,8 @@ import { CCM_STATUS_OK, CHAIN_ID_LENGTH, CROSS_CHAIN_COMMAND_NAME_TRANSFER, + MIN_MODULE_NAME_LENGTH, + MAX_MODULE_NAME_LENGTH, TokenEventResult, } from '../../../../src/modules/token/constants'; import { TokenInteroperableMethod } from '../../../../src/modules/token/cc_method'; @@ -700,6 +702,52 @@ describe('TokenInteroperableMethod', () => { ); }); + it('should reject if module name length in lockedBalances is not valid', async () => { + await expect( + tokenInteropMethod.recover({ + ...createRecoverContext(stateStore), + storeKey: Buffer.concat([defaultAddress, defaultTokenID]), + substorePrefix: userStore.subStorePrefix, + storeValue: codec.encode(userStoreSchema, { + availableBalance: defaultAccount.availableBalance, + lockedBalances: [ + { module: 'token'.repeat(MIN_MODULE_NAME_LENGTH - 1), amount: BigInt(10) }, + ], + }), + terminatedChainID: sendingChainID, + }), + ).rejects.toThrow('Invalid arguments.'); + + checkEventResult( + methodContext.eventQueue, + RecoverEvent, + TokenEventResult.RECOVER_FAIL_INVALID_INPUTS, + ); + + await expect( + tokenInteropMethod.recover({ + ...createRecoverContext(stateStore), + storeKey: Buffer.concat([defaultAddress, defaultTokenID]), + substorePrefix: userStore.subStorePrefix, + storeValue: codec.encode(userStoreSchema, { + availableBalance: defaultAccount.availableBalance, + lockedBalances: [ + { module: '1'.repeat(MAX_MODULE_NAME_LENGTH + 1), amount: BigInt(10) }, + ], + }), + terminatedChainID: sendingChainID, + }), + ).rejects.toThrow('Invalid arguments.'); + + checkEventResult( + methodContext.eventQueue, + RecoverEvent, + TokenEventResult.RECOVER_FAIL_INVALID_INPUTS, + 2, + 1, + ); + }); + it('should reject if token is not native', async () => { jest .spyOn(tokenInteropMethod['_interopMethod'], 'getMessageFeeTokenIDFromCCM') diff --git a/framework/test/unit/modules/token/init_genesis_state_fixture.ts b/framework/test/unit/modules/token/init_genesis_state_fixture.ts index 93a6dc5e14b..acb1d9b8f6f 100644 --- a/framework/test/unit/modules/token/init_genesis_state_fixture.ts +++ b/framework/test/unit/modules/token/init_genesis_state_fixture.ts @@ -221,22 +221,6 @@ export const invalidGenesisAssets = [ }, 'contains 0 amount locked balance', ], - [ - 'Empty account on userSubstore', - { - ...validData, - userSubstore: [ - ...validData.userSubstore, - { - address: Buffer.alloc(20, 2), - tokenID: Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), - availableBalance: BigInt('0'), - lockedBalances: [], - }, - ], - }, - 'has empty data', - ], [ 'Duplicate address and tokenID for userSubstore', { diff --git a/framework/test/unit/state_machine/state_machine.spec.ts b/framework/test/unit/state_machine/state_machine.spec.ts index 211d12f8659..236322eb399 100644 --- a/framework/test/unit/state_machine/state_machine.spec.ts +++ b/framework/test/unit/state_machine/state_machine.spec.ts @@ -36,6 +36,7 @@ import { EVENT_INDEX_BEFORE_TRANSACTIONS, EVENT_INDEX_FINALIZE_GENESIS_STATE, EVENT_INDEX_INIT_GENESIS_STATE, + EVENT_TOPIC_TRANSACTION_EXECUTION, } from '../../../src/state_machine/constants'; import { PrefixedStateReadWriter } from '../../../src/state_machine/prefixed_state_read_writer'; import { InMemoryPrefixedStateDB } from '../../../src/testing/in_memory_prefixed_state'; @@ -58,6 +59,7 @@ describe('state_machine', () => { params: codec.encode(new CustomCommand0(new NamedRegistry(), new NamedRegistry()).schema, { data: 'some info', }), + id: utils.hash(utils.getRandomBytes(2)), } as Transaction; let stateMachine: StateMachine; @@ -205,7 +207,9 @@ describe('state_machine', () => { const events = ctx.eventQueue.getEvents(); const dataDecoded = codec.decode(standardEventDataSchema, events[0].toObject().data); expect(events).toHaveLength(1); - expect(events[0].toObject().topics[0]).toEqual(transaction.id); + expect(events[0].toObject().topics[0]).toEqual( + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, transaction.id]), + ); expect(dataDecoded).toStrictEqual({ success: true }); }); @@ -216,6 +220,7 @@ describe('state_machine', () => { params: codec.encode(new CustomCommand3(new NamedRegistry(), new NamedRegistry()).schema, { data: 'some info', }), + id: utils.hash(utils.getRandomBytes(2)), } as Transaction; stateMachine.registerModule(new CustomModule3()); const ctx = new TransactionContext({ @@ -231,9 +236,13 @@ describe('state_machine', () => { await stateMachine.executeTransaction(ctx); const events = ctx.eventQueue.getEvents(); + const dataDecoded = codec.decode(standardEventDataSchema, events[0].toObject().data); expect(events).toHaveLength(1); - expect(events[0].toObject().topics[0]).toEqual(transaction.id); + + expect(events[0].toObject().topics[0]).toEqual( + Buffer.concat([EVENT_TOPIC_TRANSACTION_EXECUTION, transactionWithInvalidCommand.id]), + ); expect(dataDecoded).toStrictEqual({ success: false }); }); diff --git a/yarn.lock b/yarn.lock index 40d0909b755..be608c0fbfa 100644 --- a/yarn.lock +++ b/yarn.lock @@ -31,6 +31,14 @@ dependencies: "@babel/highlight" "^7.18.6" +"@babel/code-frame@^7.22.13": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== + dependencies: + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" + "@babel/compat-data@^7.20.0": version "7.20.1" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.1.tgz#f2e6ef7790d8c8dbf03d379502dcc246dcce0b30" @@ -78,7 +86,7 @@ json5 "^2.2.1" semver "^6.3.0" -"@babel/generator@^7.20.1", "@babel/generator@^7.20.2", "@babel/generator@^7.7.2": +"@babel/generator@^7.20.2", "@babel/generator@^7.7.2": version "7.20.4" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.4.tgz#4d9f8f0c30be75fd90a0562099a26e5839602ab8" integrity sha512-luCf7yk/cm7yab6CAW1aiFnmEfBJplb/JojV56MYEK7ziWfGmFlTfmL9Ehwfy4gFhbjBfWO1wj7/TuSbVNEEtA== @@ -87,7 +95,17 @@ "@jridgewell/gen-mapping" "^0.3.2" jsesc "^2.5.1" -"@babel/generator@^7.8.6", "@babel/generator@^7.8.7": +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== + dependencies: + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + +"@babel/generator@^7.8.7": version "7.8.7" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.8.7.tgz#870b3cf7984f5297998152af625c4f3e341400f7" integrity sha512-DQwjiKJqH4C3qGiyQCAExJHoZssn49JTMJgZ8SANGgVFdkupcUhLOdkAeoC6kmHZCPfoDG5M0b6cFlSN5wW7Ew== @@ -112,36 +130,25 @@ resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== -"@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== - dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== -"@babel/helper-function-name@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.8.3.tgz#eeeb665a01b1f11068e9fb86ad56a1cb1a824cca" - integrity sha512-BCxgX1BC2hD/oBlIFUgOCQDOPV8nSINxCwM3o93xP4P9Fq6aV5sgv2cOOITDMtCfQ+3PvHp3l689XZvAM9QyOA== +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== dependencies: - "@babel/helper-get-function-arity" "^7.8.3" - "@babel/template" "^7.8.3" - "@babel/types" "^7.8.3" + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" -"@babel/helper-get-function-arity@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz#b894b947bd004381ce63ea1db9f08547e920abd5" - integrity sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA== +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: - "@babel/types" "^7.8.3" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" "@babel/helper-module-imports@^7.18.6": version "7.18.6" @@ -198,18 +205,23 @@ dependencies: "@babel/types" "^7.18.6" -"@babel/helper-split-export-declaration@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz#31a9f30070f91368a7182cf05f831781065fc7a9" - integrity sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA== +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== dependencies: - "@babel/types" "^7.8.3" + "@babel/types" "^7.22.5" "@babel/helper-string-parser@^7.19.4": version "7.19.4" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== +"@babel/helper-string-parser@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" + integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== + "@babel/helper-validator-identifier@^7.10.3", "@babel/helper-validator-identifier@^7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" @@ -220,6 +232,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-option@^7.18.6": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" @@ -270,6 +287,15 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.22.13": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@^7.1.0", "@babel/parser@^7.7.5", "@babel/parser@^7.8.6", "@babel/parser@^7.8.7": version "7.8.7" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.8.7.tgz#7b8facf95d25fef9534aad51c4ffecde1a61e26a" @@ -280,11 +306,16 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.10.3.tgz#7e71d892b0d6e7d04a1af4c3c79d72c1f10f5315" integrity sha512-oJtNJCMFdIMwXGmx+KxuaD7i3b8uS7TTFYW/FNG2BT8m+fmGHoiPYoH0Pe3gya07WuFmM5FCDIr1x0irkD/hyA== -"@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.20.1", "@babel/parser@^7.20.2": +"@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.20.2": version "7.20.3" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.3.tgz#5358cf62e380cf69efcb87a7bb922ff88bfac6e2" integrity sha512-OP/s5a94frIPXwjzEcv5S/tpQfc6XhxYUnmWpgdqMWGgYCuErA3SzozaRAMQgSZWKeTJxht9aWAkUY+0UzvOFg== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== + "@babel/plugin-syntax-async-generators@^7.8.4": version "7.8.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" @@ -428,6 +459,15 @@ "@babel/parser" "^7.18.10" "@babel/types" "^7.18.10" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + "@babel/template@^7.3.3": version "7.10.3" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.3.tgz#4d13bc8e30bf95b0ce9d175d30306f42a2c9a7b8" @@ -446,37 +486,22 @@ "@babel/parser" "^7.8.6" "@babel/types" "^7.8.6" -"@babel/traverse@^7.20.1", "@babel/traverse@^7.7.2": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.1.tgz#9b15ccbf882f6d107eeeecf263fbcdd208777ec8" - integrity sha512-d3tN8fkVJwFLkHkBN479SOsw4DMZnz8cdbL/gvuDuzy3TS6Nfw80HuQqhw1pITbIruHyh7d1fMA47kWzmcUEGA== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.20.1" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.20.1" - "@babel/types" "^7.20.0" +"@babel/traverse@^7.20.1", "@babel/traverse@^7.7.2", "@babel/traverse@^7.7.4", "@babel/traverse@^7.8.4", "@babel/traverse@^7.8.6": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" -"@babel/traverse@^7.7.4", "@babel/traverse@^7.8.4", "@babel/traverse@^7.8.6": - version "7.8.6" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.8.6.tgz#acfe0c64e1cd991b3e32eae813a6eb564954b5ff" - integrity sha512-2B8l0db/DPi8iinITKuo7cbPznLCEk0kCxDoB9/N6gGNg/gxOXiR/IcymAFPiBwk5w6TtQ27w4wpElgp9btR9A== - dependencies: - "@babel/code-frame" "^7.8.3" - "@babel/generator" "^7.8.6" - "@babel/helper-function-name" "^7.8.3" - "@babel/helper-split-export-declaration" "^7.8.3" - "@babel/parser" "^7.8.6" - "@babel/types" "^7.8.6" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.13" - "@babel/types@^7.0.0", "@babel/types@^7.3.0", "@babel/types@^7.8.3", "@babel/types@^7.8.6", "@babel/types@^7.8.7": version "7.8.7" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.8.7.tgz#1fc9729e1acbb2337d5b6977a63979b4819f5d1d" @@ -495,7 +520,7 @@ lodash "^4.17.13" to-fast-properties "^2.0.0" -"@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2": +"@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.20.0", "@babel/types@^7.20.2": version "7.20.2" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.2.tgz#67ac09266606190f496322dbaff360fdaa5e7842" integrity sha512-FnnvsNWgZCr232sqtXggapvlkk/tuwR/qhGzcmxI0GXLCjmPYQPzio2FbdlWuY6y1sHFfQKk+rRbUZ9VStQMog== @@ -504,6 +529,15 @@ "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -1058,6 +1092,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== + "@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" @@ -1076,6 +1115,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + "@jridgewell/trace-mapping@0.3.9": version "0.3.9" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" @@ -1092,6 +1136,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.17": + version "0.3.20" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" + integrity sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@jridgewell/trace-mapping@^0.3.9": version "0.3.14" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" @@ -5775,7 +5827,7 @@ chalk@^1.0.0, chalk@^1.1.3: strip-ansi "^3.0.0" supports-color "^2.0.0" -chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1: +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== From 63b3238297cc34bf9ed5115a1a399442a38018d5 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 25 Oct 2023 08:49:08 +0200 Subject: [PATCH 155/170] :white_check_mark: Fix genesis block --- elements/lisk-chain/test/unit/block_header.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/elements/lisk-chain/test/unit/block_header.spec.ts b/elements/lisk-chain/test/unit/block_header.spec.ts index 8010797718e..03b6ff38911 100644 --- a/elements/lisk-chain/test/unit/block_header.spec.ts +++ b/elements/lisk-chain/test/unit/block_header.spec.ts @@ -57,7 +57,7 @@ const getGenesisBlockAttrs = () => ({ maxHeightGenerated: 0, validatorsHash: utils.hash(Buffer.alloc(0)), aggregateCommit: { - height: 0, + height: 1009988, aggregationBits: Buffer.alloc(0), certificateSignature: EMPTY_BUFFER, }, From 886233250f1b9a17bfbf9c654e1ca1e7087357ca Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 30 Oct 2023 09:33:49 +0100 Subject: [PATCH 156/170] :recycle: Fix merge conflicts --- .../interoperability/mainchain/module.ts | 19 ++++++------ .../base_cross_chain_update_command.spec.ts | 30 ++++++++++++++----- 2 files changed, 33 insertions(+), 16 deletions(-) diff --git a/framework/src/modules/interoperability/mainchain/module.ts b/framework/src/modules/interoperability/mainchain/module.ts index 034dd3db388..708775d5295 100644 --- a/framework/src/modules/interoperability/mainchain/module.ts +++ b/framework/src/modules/interoperability/mainchain/module.ts @@ -261,15 +261,7 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule throw new Error(`ownChainName must be equal to ${CHAIN_NAME_MAINCHAIN}.`); } - // if chainInfos is empty, then ownChainNonce == 0 - // If chainInfos is non-empty, ownChainNonce > 0 - if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { - throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); - } else if (chainInfos.length !== 0 && ownChainNonce <= BigInt(0)) { - throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); - } - - this._verifyChainInfos(ctx, chainInfos, terminatedStateAccounts); + this._verifyChainInfos(ctx, chainInfos, ownChainNonce, terminatedStateAccounts); this._verifyTerminatedStateAccounts(chainInfos, terminatedStateAccounts, mainchainID); this._verifyTerminatedOutboxAccounts( chainInfos, @@ -284,8 +276,17 @@ export class MainchainInteroperabilityModule extends BaseInteroperabilityModule private _verifyChainInfos( ctx: GenesisBlockExecuteContext, chainInfos: ChainInfo[], + ownChainNonce: bigint, terminatedStateAccounts: TerminatedStateAccountWithChainID[], ) { + // if chainInfos is empty, then ownChainNonce == 0 + // If chainInfos is non-empty, ownChainNonce > 0 + if (chainInfos.length === 0 && ownChainNonce !== BigInt(0)) { + throw new Error(`ownChainNonce must be 0 if chainInfos is empty.`); + } else if (chainInfos.length !== 0 && ownChainNonce <= 0) { + throw new Error(`ownChainNonce must be positive if chainInfos is not empty.`); + } + // Each entry chainInfo in chainInfos has a unique chainInfo.chainID const chainIDs = chainInfos.map(info => info.chainID); if (!objectUtils.bufferArrayUniqueItems(chainIDs)) { diff --git a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts index 5017a38fc8a..51d49194290 100644 --- a/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts +++ b/framework/test/unit/modules/interoperability/base_cross_chain_update_command.spec.ts @@ -94,6 +94,7 @@ describe('BaseCrossChainUpdateCommand', () => { senderPublicKey, signatures: [], }; + const minReturnFeePerByte = BigInt(10000000); const certificate = codec.encode(certificateSchema, { blockID: utils.getRandomBytes(32), @@ -322,6 +323,19 @@ describe('BaseCrossChainUpdateCommand', () => { .set(stateStore, params.sendingChainID, chainAccount); }); + it('should reject when ccu params validation fails', async () => { + const nonBufferSendingChainID = 2; + verifyContext = { + ...verifyContext, + params: { ...params, sendingChainID: nonBufferSendingChainID } as any, + }; + + // 2nd param `isMainchain` could be false + await expect(command['verifyCommon'](verifyContext, false)).rejects.toThrow( + `Property '.sendingChainID' should pass "dataType" keyword validation`, + ); + }); + it('should call validator.validate with crossChainUpdateTransactionParams schema', async () => { jest.spyOn(validator, 'validate'); @@ -1571,6 +1585,7 @@ describe('BaseCrossChainUpdateCommand', () => { describe('bounce', () => { const ccmStatus = CCMStatusCode.MODULE_NOT_SUPPORTED; const ccmProcessedEventCode = CCMProcessedCode.MODULE_NOT_SUPPORTED; + const ccmSize = 100; let stateStore: PrefixedStateReadWriter; beforeEach(async () => { @@ -1592,7 +1607,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1609,17 +1624,18 @@ describe('BaseCrossChainUpdateCommand', () => { }); it('should log event when ccm.fee is less than min fee', async () => { + const minFee = minReturnFeePerByte * BigInt(ccmSize); context = createCrossChainMessageContext({ ccm: { ...defaultCCM, status: CCMStatusCode.OK, - fee: BigInt(1), + fee: minFee - BigInt(1), }, stateStore, }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(1); @@ -1649,7 +1665,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1685,7 +1701,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1715,7 +1731,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(internalMethod.addToOutbox).toHaveBeenCalledWith( @@ -1742,7 +1758,7 @@ describe('BaseCrossChainUpdateCommand', () => { }); await expect( - command['bounce'](context, 100, ccmStatus, ccmProcessedEventCode), + command['bounce'](context, ccmSize, ccmStatus, ccmProcessedEventCode), ).resolves.toBeUndefined(); expect(context.eventQueue.getEvents()).toHaveLength(2); From 9a6063f977975a26f81216b7afeaa16d50d9255f Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 30 Oct 2023 16:08:45 +0100 Subject: [PATCH 157/170] :recycle: Remove duplicate test --- .../interoperability/mainchain/module.spec.ts | 37 ------------------- 1 file changed, 37 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index 09a2a602669..aeb54f567bd 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -386,43 +386,6 @@ describe('initGenesisState', () => { await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); }); - it('should call _verifyTerminatedStateAccountsIDs', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); - - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); - expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); - }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { const context = createInitGenesisStateContext( { From 0e50017507c44cf362edddc3def642781623d956 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 30 Oct 2023 17:15:39 +0100 Subject: [PATCH 158/170] Revert and remove another duplicate test This reverts commit 9a6063f977975a26f81216b7afeaa16d50d9255f. --- .../interoperability/mainchain/module.spec.ts | 74 +++++++++---------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index aeb54f567bd..131ed9295b7 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -386,6 +386,43 @@ describe('initGenesisState', () => { await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); }); + it('should call _verifyTerminatedStateAccountsIDs', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); + + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); + expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); + }); + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { const context = createInitGenesisStateContext( { @@ -575,43 +612,6 @@ describe('initGenesisState', () => { ).resolves.toBeUndefined(); }); - it('should call _verifyTerminatedStateAccountsIDs', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); - - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); - expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); - }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { const context = createInitGenesisStateContext( { From 098b822c7c6216bfc47d7510d9cbbb38b9591796 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 30 Oct 2023 23:32:14 +0100 Subject: [PATCH 159/170] :fire: Remove duplicate test --- .../interoperability/mainchain/module.spec.ts | 36 ------------------- 1 file changed, 36 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index 131ed9295b7..b75f1fdf211 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -612,42 +612,6 @@ describe('initGenesisState', () => { ).resolves.toBeUndefined(); }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.ACTIVE, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, - ); - }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is REGISTERED', async () => { const context = createInitGenesisStateContext( { From 36a1df9ffe467f11006edf25eeca539bc45701ef Mon Sep 17 00:00:00 2001 From: Franco NG Date: Tue, 31 Oct 2023 11:04:08 +0100 Subject: [PATCH 160/170] Add length check to getLisk32AddressFromPublicKey (#9124) * Add length check to getLisk32AddressFromPublicKey * Set ED25519_PUBLIC_KEY_LENGTH be a constant in lisk-cryptography * Update test cases * Update formats on getLisk32AddressFromPublicKey --- elements/lisk-cryptography/src/address.ts | 8 +++++++- elements/lisk-cryptography/src/constants.ts | 1 + elements/lisk-cryptography/test/address.spec.ts | 15 ++++++++++++++- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/elements/lisk-cryptography/src/address.ts b/elements/lisk-cryptography/src/address.ts index b02170dd5c0..3abaa752138 100644 --- a/elements/lisk-cryptography/src/address.ts +++ b/elements/lisk-cryptography/src/address.ts @@ -17,6 +17,7 @@ import { BINARY_ADDRESS_LENGTH, DEFAULT_LISK32_ADDRESS_PREFIX, LISK32_ADDRESS_LENGTH, + ED25519_PUBLIC_KEY_LENGTH, } from './constants'; import { getPublicKey } from './nacl'; import { hash } from './utils'; @@ -118,7 +119,12 @@ const addressToLisk32 = (address: Buffer): string => { export const getLisk32AddressFromPublicKey = ( publicKey: Buffer, prefix = DEFAULT_LISK32_ADDRESS_PREFIX, -): string => `${prefix}${addressToLisk32(getAddressFromPublicKey(publicKey))}`; +): string => { + if (publicKey.length !== ED25519_PUBLIC_KEY_LENGTH) { + throw new Error(`publicKey length must be ${ED25519_PUBLIC_KEY_LENGTH}.`); + } + return `${prefix}${addressToLisk32(getAddressFromPublicKey(publicKey))}`; +}; export const validateLisk32Address = ( address: string, diff --git a/elements/lisk-cryptography/src/constants.ts b/elements/lisk-cryptography/src/constants.ts index bc57b3538d7..790b9e391c0 100644 --- a/elements/lisk-cryptography/src/constants.ts +++ b/elements/lisk-cryptography/src/constants.ts @@ -25,3 +25,4 @@ export const SHA256 = 'sha256'; export const LISK32_CHARSET = 'zxvcpmbn3465o978uyrtkqew2adsjhfg'; export const LISK32_ADDRESS_LENGTH = 41; export const MESSAGE_TAG_NON_PROTOCOL_MESSAGE = 'LSK_NPM_'; +export const ED25519_PUBLIC_KEY_LENGTH = 32; diff --git a/elements/lisk-cryptography/test/address.spec.ts b/elements/lisk-cryptography/test/address.spec.ts index 3cf77fe6b31..22acad9534b 100644 --- a/elements/lisk-cryptography/test/address.spec.ts +++ b/elements/lisk-cryptography/test/address.spec.ts @@ -24,6 +24,7 @@ import { LISK32_CHARSET, DEFAULT_LISK32_ADDRESS_PREFIX, LISK32_ADDRESS_LENGTH, + ED25519_PUBLIC_KEY_LENGTH, } from '../src/constants'; import * as utils from '../src/utils'; @@ -57,8 +58,20 @@ describe('address', () => { }); describe('#getLisk32AddressFromPublicKey', () => { + it('should reject when publicKey length not equal to ED25519_PUBLIC_KEY_LENGTH', () => { + expect(() => + getLisk32AddressFromPublicKey( + Buffer.alloc(ED25519_PUBLIC_KEY_LENGTH - 1), + DEFAULT_LISK32_ADDRESS_PREFIX, + ), + ).toThrow(`publicKey length must be ${ED25519_PUBLIC_KEY_LENGTH}.`); + }); + it('should generate lisk32 address from publicKey', () => { - const address = getLisk32AddressFromPublicKey(defaultPublicKey, 'lsk'); + const address = getLisk32AddressFromPublicKey( + defaultPublicKey, + DEFAULT_LISK32_ADDRESS_PREFIX, + ); expect(address).toBe(getLisk32AddressFromAddress(defaultAddress)); }); From 0c2b1b0a0283387a2f5c0ff21d1d77ffcdb67d78 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 31 Oct 2023 11:54:40 +0100 Subject: [PATCH 161/170] :recycle: Fix the test structure --- .../interoperability/mainchain/module.spec.ts | 330 +++++------------- 1 file changed, 93 insertions(+), 237 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index b75f1fdf211..ac22cb98f8c 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -356,260 +356,116 @@ describe('initGenesisState', () => { }); }); - describe('terminatedStateAccounts', () => { - it('should not throw error if length of terminatedStateAccounts is zero', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); - }); - - it('should call _verifyTerminatedStateAccountsIDs', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); - - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); + it('should check that _verifyChannelData is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChannelData' as any); - await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); - expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); - }); + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + }); + }); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.ACTIVE, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, + describe('terminatedStateAccounts', () => { + it('should not throw error if length of terminatedStateAccounts is zero', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + // this is needed to verify `validatorsHash` related tests (above) + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, - ); - }); + }, + ], + terminatedStateAccounts: [], + }, + params, + ); - it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is REGISTERED', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); + await expect(interopMod.initGenesisState(context)).resolves.not.toThrow(); + }); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, - ); - }); + it('should call _verifyTerminatedStateAccountsIDs', async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccountsIDs' as any); - it('should throw error if chainID in terminatedStateAccounts does not exist in chainInfo', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', - ); - }); - - it('should throw error if some stateAccount in terminatedStateAccounts have stateRoot not equal to chainData.lastCertificate.stateRoot', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), - }, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", - ); - }); + await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); + expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); + }); - it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - mainchainStateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.ACTIVE, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), }, }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, - ); - }); - - it('should throw error if some stateAccount in terminatedStateAccounts is not initialized', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - initialized: false, - }, + chainValidators: { + activeValidators, + certificateThreshold, }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'stateAccount is not initialized.', - ); - }); - }); - - it('should check that _verifyChannelData is called from _verifyChainInfos', async () => { - jest.spyOn(interopMod, '_verifyChannelData' as any); + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); - await expect( - interopMod.initGenesisState(contextWithValidValidatorsHash), - ).resolves.toBeUndefined(); + await expect(interopMod.initGenesisState(context)).rejects.toThrow( + `For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state`, + ); }); it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is REGISTERED', async () => { From 8688ff30d9b912b2ce00c676d6208b5a2e5d3cb4 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 31 Oct 2023 14:27:56 +0100 Subject: [PATCH 162/170] :recycle: Remove duplicate --- .../interoperability/mainchain/module.spec.ts | 148 +++--------------- 1 file changed, 19 insertions(+), 129 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index ac22cb98f8c..f33a1f6b272 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -365,7 +365,25 @@ describe('initGenesisState', () => { }); }); - describe('terminatedStateAccounts', () => { + it('should check that _verifyChainValidators is called from _verifyChainInfos', async () => { + jest.spyOn(interopMod, '_verifyChainValidators' as any); + + await expect( + interopMod.initGenesisState(contextWithValidValidatorsHash), + ).resolves.toBeUndefined(); + + // must be true to pass this test + expect(interopMod['_verifyChainValidators']).toHaveBeenCalled(); + }); + + it(`should call _verifyTerminatedStateAccounts from initGenesisState`, async () => { + jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); + + await interopMod.initGenesisState(contextWithValidValidatorsHash); + expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); + }); + + describe('_verifyTerminatedStateAccounts', () => { it('should not throw error if length of terminatedStateAccounts is zero', async () => { const context = createInitGenesisStateContext( { @@ -611,27 +629,7 @@ describe('initGenesisState', () => { 'stateAccount is not initialized.', ); }); - }); - - it('should check that _verifyChainValidators is called from _verifyChainInfos', async () => { - jest.spyOn(interopMod, '_verifyChainValidators' as any); - - await expect( - interopMod.initGenesisState(contextWithValidValidatorsHash), - ).resolves.toBeUndefined(); - - // must be true to pass this test - expect(interopMod['_verifyChainValidators']).toHaveBeenCalled(); - }); - - it(`should call _verifyTerminatedStateAccounts from initGenesisState`, async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); - - await interopMod.initGenesisState(contextWithValidValidatorsHash); - expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); - }); - describe('_verifyTerminatedStateAccounts', () => { it("should not throw error if length of terminatedStateAccounts is zero while there doesn't exist some chain in chainData with status TERMINATED", async () => { certificateThreshold = BigInt(10); const context = createInitGenesisStateContext( @@ -735,42 +733,6 @@ describe('initGenesisState', () => { ); }); - it('should throw error if chainID in terminatedStateAccounts does not exist in chainInfo', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 2]), - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', - ); - }); - it('should throw if chainInfo.chainData.status === TERMINATED but no corresponding terminateStateAccount', async () => { const context = createInitGenesisStateContext( { @@ -839,78 +801,6 @@ describe('initGenesisState', () => { await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); }); - - it('should throw error if some stateAccount in terminatedStateAccounts have mainchainStateRoot not equal to EMPTY_HASH', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - mainchainStateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), // *** - }, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `stateAccount.mainchainStateRoot is not equal to ${EMPTY_HASH.toString('hex')}.`, - ); - }); - - it('should throw error if some stateAccount in terminatedStateAccounts is not initialized', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - initialized: false, // *** - }, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - 'stateAccount is not initialized.', - ); - }); - - it('should throw error if some stateAccount in terminatedStateAccounts have stateRoot not equal to chainData.lastCertificate.stateRoot', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - // this is needed to verify `validatorsHash` related tests (above) - chainInfos: validChainInfos, - terminatedStateAccounts: [ - { - chainID: Buffer.from([0, 0, 0, 1]), - terminatedStateAccount: { - ...terminatedStateAccount, - stateRoot: Buffer.from(utils.getRandomBytes(HASH_LENGTH)), // *** - }, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - "stateAccount.stateRoot doesn't match chainInfo.chainData.lastCertificate.stateRoot.", - ); - }); }); it(`should call _verifyTerminatedOutboxAccounts from initGenesisState `, async () => { From f8140220e86698123d4cfdd2997c30cee12ac5b5 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 31 Oct 2023 16:00:49 +0100 Subject: [PATCH 163/170] :recycle: Remove duplicate --- .../interoperability/mainchain/module.spec.ts | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index f33a1f6b272..7404c48ba47 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -733,38 +733,6 @@ describe('initGenesisState', () => { ); }); - it('should throw if chainInfo.chainData.status === TERMINATED but no corresponding terminateStateAccount', async () => { - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - // No terminatedStateAccount - terminatedStateAccounts: [], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).rejects.toThrow( - `For each chainInfo with status terminated there should be a corresponding entry in terminatedStateAccounts.`, - ); - }); - it('should call _verifyTerminatedStateAccounts', async () => { jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); From 223afceb7a16b5ad9434db928a556e6584fec76b Mon Sep 17 00:00:00 2001 From: Incede <33103370+Incede@users.noreply.github.com> Date: Wed, 1 Nov 2023 10:37:12 +0100 Subject: [PATCH 164/170] Unit test review: Dynamic block rewards (#9104) * Update unit tests * Cleanup leftover comments --- .../modules/dynamic_rewards/module.spec.ts | 178 ++++++++++-------- 1 file changed, 100 insertions(+), 78 deletions(-) diff --git a/framework/test/unit/modules/dynamic_rewards/module.spec.ts b/framework/test/unit/modules/dynamic_rewards/module.spec.ts index 1cd4ae2b80a..9dfbec951ab 100644 --- a/framework/test/unit/modules/dynamic_rewards/module.spec.ts +++ b/framework/test/unit/modules/dynamic_rewards/module.spec.ts @@ -58,6 +58,21 @@ describe('DynamicRewardModule', () => { let randomMethod: RandomMethod; let validatorsMethod: ValidatorsMethod; let posMethod: PoSMethod; + let generatorAddress: Buffer; + let standbyValidatorAddress: Buffer; + let stateStore: PrefixedStateReadWriter; + let blockHeader: BlockHeader; + + const activeValidator = 4; + const minimumReward = + (BigInt(defaultConfig.brackets[0]) * + BigInt(defaultConfig.factorMinimumRewardActiveValidators)) / + DECIMAL_PERCENT_FACTOR; + const totalRewardActiveValidator = BigInt(defaultConfig.brackets[0]) * BigInt(activeValidator); + const stakeRewardActiveValidators = + totalRewardActiveValidator - minimumReward * BigInt(activeValidator); + // generatorAddress has 20% of total weight, bftWeightSum/bftWeight = BigInt(5) + const defaultReward = minimumReward + stakeRewardActiveValidators / BigInt(5); beforeEach(async () => { rewardModule = new DynamicRewardModule(); @@ -126,9 +141,7 @@ describe('DynamicRewardModule', () => { }); describe('initGenesisState', () => { - let blockHeader: BlockHeader; let blockExecuteContext: GenesisBlockExecuteContext; - let stateStore: PrefixedStateReadWriter; beforeEach(() => { stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); @@ -151,23 +164,12 @@ describe('DynamicRewardModule', () => { describe('beforeTransactionsExecute', () => { let blockExecuteContext: BlockExecuteContext; - let generatorAddress: Buffer; - let standbyValidatorAddress: Buffer; - let stateStore: PrefixedStateReadWriter; - - const activeValidator = 4; - const minimumReward = - (BigInt(defaultConfig.brackets[0]) * - BigInt(defaultConfig.factorMinimumRewardActiveValidators)) / - DECIMAL_PERCENT_FACTOR; - const totalRewardActiveValidator = BigInt(defaultConfig.brackets[0]) * BigInt(activeValidator); - const ratioReward = totalRewardActiveValidator - minimumReward * BigInt(activeValidator); beforeEach(async () => { generatorAddress = utils.getRandomBytes(20); standbyValidatorAddress = utils.getRandomBytes(20); stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - const blockHeader = createBlockHeaderWithDefaults({ + blockHeader = createBlockHeaderWithDefaults({ height: defaultConfig.offset, generatorAddress, }); @@ -224,9 +226,8 @@ describe('DynamicRewardModule', () => { await rewardModule.beforeTransactionsExecute(blockExecuteContext); - // generatorAddress has 20% of total weight expect(blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REWARD)).toEqual( - minimumReward + ratioReward / BigInt(5), + defaultReward, ); expect( blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION), @@ -244,7 +245,7 @@ describe('DynamicRewardModule', () => { generatorMap, ); - const blockHeader = createBlockHeaderWithDefaults({ + blockHeader = createBlockHeaderWithDefaults({ height: defaultConfig.offset, generatorAddress: standbyValidatorAddress, }); @@ -262,24 +263,64 @@ describe('DynamicRewardModule', () => { blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION), ).toEqual(REWARD_NO_REDUCTION); }); + + it('should store zero reward with seed reveal reduction when seed reveal is invalid', async () => { + // Round not finished + const generatorMap = new Array(1).fill(0).reduce(prev => { + // eslint-disable-next-line no-param-reassign + prev[utils.getRandomBytes(20).toString('binary')] = 1; + return prev; + }, {}); + (validatorsMethod.getGeneratorsBetweenTimestamps as jest.Mock).mockResolvedValue( + generatorMap, + ); + (randomMethod.isSeedRevealValid as jest.Mock).mockResolvedValue(false); + + await rewardModule.beforeTransactionsExecute(blockExecuteContext); + + expect(blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REWARD)).toEqual( + BigInt(0), + ); + expect( + blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION), + ).toEqual(REWARD_REDUCTION_SEED_REVEAL); + }); + + it('should return quarter deducted reward when header does not imply max prevotes', async () => { + // Round not finished + const generatorMap = new Array(1).fill(0).reduce(prev => { + // eslint-disable-next-line no-param-reassign + prev[utils.getRandomBytes(20).toString('binary')] = 1; + return prev; + }, {}); + (validatorsMethod.getGeneratorsBetweenTimestamps as jest.Mock).mockResolvedValue( + generatorMap, + ); + blockHeader = createBlockHeaderWithDefaults({ + height: defaultConfig.offset, + impliesMaxPrevotes: false, + generatorAddress, + }); + blockExecuteContext = createBlockContext({ + stateStore, + header: blockHeader, + }).getBlockAfterExecuteContext(); + + await rewardModule.beforeTransactionsExecute(blockExecuteContext); + + expect(blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REWARD)).toEqual( + defaultReward / BigInt(4), + ); + expect( + blockExecuteContext.contextStore.get(CONTEXT_STORE_KEY_DYNAMIC_BLOCK_REDUCTION), + ).toEqual(REWARD_REDUCTION_MAX_PREVOTES); + }); }); describe('afterTransactionsExecute', () => { let blockExecuteContext: BlockAfterExecuteContext; - let stateStore: PrefixedStateReadWriter; - let generatorAddress: Buffer; - let standbyValidatorAddress: Buffer; let contextStore: Map; - const activeValidator = 4; - const minimumReward = - (BigInt(defaultConfig.brackets[0]) * - BigInt(defaultConfig.factorMinimumRewardActiveValidators)) / - DECIMAL_PERCENT_FACTOR; - const totalRewardActiveValidator = BigInt(defaultConfig.brackets[0]) * BigInt(activeValidator); - const ratioReward = totalRewardActiveValidator - minimumReward * BigInt(activeValidator); - const defaultReward = minimumReward + ratioReward / BigInt(5); - beforeEach(async () => { jest.spyOn(rewardModule.events.get(RewardMintedEvent), 'log'); jest.spyOn(tokenMethod, 'userSubstoreExists'); @@ -287,7 +328,7 @@ describe('DynamicRewardModule', () => { standbyValidatorAddress = utils.getRandomBytes(20); contextStore = new Map(); stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); - const blockHeader = createBlockHeaderWithDefaults({ + blockHeader = createBlockHeaderWithDefaults({ height: defaultConfig.offset, generatorAddress, }); @@ -326,50 +367,6 @@ describe('DynamicRewardModule', () => { .mockResolvedValue(true as never); }); - it('should return zero reward with seed reveal reduction when seed reveal is invalid', async () => { - (randomMethod.isSeedRevealValid as jest.Mock).mockResolvedValue(false); - await rewardModule.beforeTransactionsExecute(blockExecuteContext); - await rewardModule.afterTransactionsExecute(blockExecuteContext); - - expect(rewardModule.events.get(RewardMintedEvent).log).toHaveBeenCalledWith( - expect.anything(), - blockExecuteContext.header.generatorAddress, - { amount: BigInt(0), reduction: REWARD_REDUCTION_SEED_REVEAL }, - ); - }); - - it('should return quarter deducted reward when header does not imply max prevotes', async () => { - const blockHeader = createBlockHeaderWithDefaults({ - height: defaultConfig.offset, - impliesMaxPrevotes: false, - generatorAddress, - }); - blockExecuteContext = createBlockContext({ - stateStore, - contextStore, - header: blockHeader, - }).getBlockAfterExecuteContext(); - when(tokenMethod.userSubstoreExists) - .calledWith( - expect.anything(), - blockExecuteContext.header.generatorAddress, - rewardModule['_moduleConfig'].tokenID, - ) - .mockResolvedValue(true as never); - - await rewardModule.beforeTransactionsExecute(blockExecuteContext); - await rewardModule.afterTransactionsExecute(blockExecuteContext); - - expect(rewardModule.events.get(RewardMintedEvent).log).toHaveBeenCalledWith( - expect.anything(), - blockExecuteContext.header.generatorAddress, - { - amount: defaultReward / BigInt(4), - reduction: REWARD_REDUCTION_MAX_PREVOTES, - }, - ); - }); - it('should return full reward when header and assets are valid', async () => { await rewardModule.beforeTransactionsExecute(blockExecuteContext); await rewardModule.afterTransactionsExecute(blockExecuteContext); @@ -381,7 +378,7 @@ describe('DynamicRewardModule', () => { ); }); - it('should mint the token and update shared reward when reward is non zero and user account of geenrator exists for the token id', async () => { + it('should mint the token and update shared reward when reward is non zero and user account of generator exists for the token id', async () => { await rewardModule.beforeTransactionsExecute(blockExecuteContext); await rewardModule.afterTransactionsExecute(blockExecuteContext); @@ -405,7 +402,7 @@ describe('DynamicRewardModule', () => { ); }); - it('should not mint or update shared reward and return zero reward with no account reduction when reward is non zero and user account of geenrator does not exist for the token id', async () => { + it('should not mint or update shared reward and return zero reward with no account reduction when reward is non zero and user account of generator does not exist for the token id', async () => { when(tokenMethod.userSubstoreExists) .calledWith( expect.anything(), @@ -441,9 +438,9 @@ describe('DynamicRewardModule', () => { expect(posMethod.updateSharedRewards).not.toHaveBeenCalled(); }); - it('should store timestamp when end of round', async () => { + it('should store timestamp when it is end of round', async () => { const timestamp = 123456789; - const blockHeader = createBlockHeaderWithDefaults({ + blockHeader = createBlockHeaderWithDefaults({ height: defaultConfig.offset, timestamp, generatorAddress, @@ -465,5 +462,30 @@ describe('DynamicRewardModule', () => { expect(updatedTimestamp).toEqual(timestamp); }); + + it('should store timestamp when it is not end of round', async () => { + const timestamp = 123456789; + blockHeader = createBlockHeaderWithDefaults({ + height: defaultConfig.offset, + timestamp, + generatorAddress, + }); + blockExecuteContext = createBlockContext({ + stateStore, + contextStore, + header: blockHeader, + }).getBlockAfterExecuteContext(); + + (posMethod.isEndOfRound as jest.Mock).mockResolvedValue(false); + + await rewardModule.beforeTransactionsExecute(blockExecuteContext); + await rewardModule.afterTransactionsExecute(blockExecuteContext); + + const { timestamp: updatedTimestamp } = await rewardModule.stores + .get(EndOfRoundTimestampStore) + .get(blockExecuteContext, EMPTY_BYTES); + + expect(updatedTimestamp).not.toEqual(timestamp); + }); }); }); From cf9caadb55a341a9d58ed867b93c611c1a4f1683 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:25:50 +0100 Subject: [PATCH 165/170] Updates unit tests; `ValidatorsMethod` (#9126) * :recycle: :white_check_mark: ValidatorsModuleEndpoint.validateBLSKey * :recycle: :white_check_mark: ValidatorsMethod.registerValidatorKeys * :recycle: :white_check_mark: ValidatorsMethod.setValidatorBLSKey * :recycle: :white_check_mark: ValidatorsMethod.setValidatorGeneratorKey * :recycle: :white_check_mark: ValidatorsMethod.registerValidatorWithoutBLSKey * :recycle: :white_check_mark: ValidatorsModuleEndpoint.setValidatorsParams * :recycle: :white_check_mark: ValidatorsMethod.getGeneratorsBetweenTimestamps * :recycle: ValidatorsMethod.setValidatorBLSKey updates the BLS key if validator already has valid BLS key. * :recycle: :white_check_mark: Updates test descriptions for ValidatorsMethod.getGeneratorsBetweenTimestamps. Co-authored-by: AndreasKendziorra <40799768+AndreasKendziorra@users.noreply.github.com> --------- Co-authored-by: AndreasKendziorra <40799768+AndreasKendziorra@users.noreply.github.com> --- framework/src/modules/validators/method.ts | 3 - .../unit/modules/validators/endpoint.spec.ts | 23 +- .../unit/modules/validators/method.spec.ts | 248 ++++++++++++++++-- 3 files changed, 240 insertions(+), 34 deletions(-) diff --git a/framework/src/modules/validators/method.ts b/framework/src/modules/validators/method.ts index 24a33a1e05c..18180f445ce 100644 --- a/framework/src/modules/validators/method.ts +++ b/framework/src/modules/validators/method.ts @@ -208,9 +208,6 @@ export class ValidatorsMethod extends BaseMethod { } const validatorAccount = await validatorsSubStore.get(methodContext, validatorAddress); - if (!validatorAccount.blsKey.equals(INVALID_BLS_KEY)) { - return false; - } if (!bls.popVerify(blsKey, proofOfPossession)) { this.events.get(BlsKeyRegistrationEvent).log(methodContext, validatorAddress, { diff --git a/framework/test/unit/modules/validators/endpoint.spec.ts b/framework/test/unit/modules/validators/endpoint.spec.ts index 8b933bb67d9..b61339acf42 100644 --- a/framework/test/unit/modules/validators/endpoint.spec.ts +++ b/framework/test/unit/modules/validators/endpoint.spec.ts @@ -35,10 +35,12 @@ describe('ValidatorsModuleEndpoint', () => { const validatorAddress = utils.getRandomBytes(ADDRESS_LENGTH); const blsKey = utils.getRandomBytes(BLS_PUBLIC_KEY_LENGTH); const generatorKey = utils.getRandomBytes(ED25519_PUBLIC_KEY_LENGTH); + const validBLSKey = + 'b301803f8b5ac4a1133581fc676dfedc60d891dd5fa99028805e5ea5b08d3491af75d0707adab3b70c6a6a580217bf81'; const validProof = '88bb31b27eae23038e14f9d9d1b628a39f5881b5278c3c6f0249f81ba0deb1f68aa5f8847854d6554051aa810fdf1cdb02df4af7a5647b1aa4afb60ec6d446ee17af24a8a50876ffdaf9bf475038ec5f8ebeda1c1c6a3220293e23b13a9a5d26'; - beforeAll(() => { + beforeEach(() => { validatorsModule = new ValidatorsModule(); stateStore = new PrefixedStateReadWriter(new InMemoryPrefixedStateDB()); }); @@ -49,14 +51,16 @@ describe('ValidatorsModuleEndpoint', () => { const context = createTransientModuleEndpointContext({ stateStore, params: { - proofOfPossession: proof.toString('hex'), - blsKey: blsKey.toString('hex'), + proofOfPossession: validProof, + blsKey: validBLSKey, }, }); - await validatorsModule.stores.get(BLSKeyStore).set(createStoreGetter(stateStore), blsKey, { - address: utils.getRandomBytes(ADDRESS_LENGTH), - }); + await validatorsModule.stores + .get(BLSKeyStore) + .set(createStoreGetter(stateStore), Buffer.from(validBLSKey, 'hex'), { + address: utils.getRandomBytes(ADDRESS_LENGTH), + }); await expect(validatorsModule.endpoint.validateBLSKey(context)).resolves.toStrictEqual({ valid: false, @@ -81,8 +85,7 @@ describe('ValidatorsModuleEndpoint', () => { stateStore, params: { proofOfPossession: validProof, - blsKey: - 'b301803f8b5ac4a1133581fc676dfedc60d891dd5fa99028805e5ea5b08d3491af75d0707adab3b70c6a6a580217bf81', + blsKey: validBLSKey, }, }); await expect(validatorsModule.endpoint.validateBLSKey(context)).resolves.toStrictEqual({ @@ -91,7 +94,7 @@ describe('ValidatorsModuleEndpoint', () => { }); it('should resolve with false when proof of possession is invalid but bls key has a valid length', async () => { - const validBLSKey = + const anotherValidBLSKey = 'a491d1b0ecd9bb917989f0e74f0dea0422eac4a873e5e2644f368dffb9a6e20fd6e10c1b77654d067c0618f6e5a7f79a'; const invalidProof = 'b803eb0ed93ea10224a73b6b9c725796be9f5fefd215ef7a5b97234cc956cf6870db6127b7e4d824ec62276078e787db05584ce1adbf076bc0808ca0f15b73d59060254b25393d95dfc7abe3cda566842aaedf50bbb062aae1bbb6ef3b1fffff'; @@ -99,7 +102,7 @@ describe('ValidatorsModuleEndpoint', () => { stateStore, params: { proofOfPossession: invalidProof, - blsKey: validBLSKey, + blsKey: anotherValidBLSKey, }, }); await expect(validatorsModule.endpoint.validateBLSKey(context)).resolves.toStrictEqual({ diff --git a/framework/test/unit/modules/validators/method.spec.ts b/framework/test/unit/modules/validators/method.spec.ts index 0c34961381f..6058ea73e3f 100644 --- a/framework/test/unit/modules/validators/method.spec.ts +++ b/framework/test/unit/modules/validators/method.spec.ts @@ -13,7 +13,7 @@ */ import { codec } from '@liskhq/lisk-codec'; -import { utils } from '@liskhq/lisk-cryptography'; +import { utils, address as addressUtils } from '@liskhq/lisk-cryptography'; import { ValidatorsMethod, ValidatorsModule } from '../../../../src/modules/validators'; import { MODULE_NAME_VALIDATORS, @@ -74,6 +74,15 @@ describe('ValidatorsModuleMethod', () => { '88bb31b27eae23038e14f9d9d1b628a39f5881b5278c3c6f0249f81ba0deb1f68aa5f8847854d6554051aa810fdf1cdb02df4af7a5647b1aa4afb60ec6d446ee17af24a8a50876ffdaf9bf475038ec5f8ebeda1c1c6a3220293e23b13a9a5d27', 'hex', ); + const anotherProofOfPossession = Buffer.from( + 'b92b11d66348e197c62d14af1453620d550c21d59ce572d95a03f0eaa0d0d195efbb2f2fd1577dc1a04ecdb453065d9d168ce7648bc5328e5ea47bb07d3ce6fd75f35ee51064a9903da8b90f7dc8ab4f2549b834cb5911b883097133f66b9ab9', + 'hex', + ); + const anotherBLSKey = Buffer.from( + '92f020ce5e37befb86493a82686b0eedddb264350b0873cf1eeaa1fefe39d938f05f272452c1ef5e6ceb4d9b23687e31', + 'hex', + ); + const invalidAddressShort = utils.getRandomBytes(ADDRESS_LENGTH - 1); const invalidAddressLong = utils.getRandomBytes(ADDRESS_LENGTH + 1); const invalidGeneratorKeyShort = utils.getRandomBytes(ED25519_PUBLIC_KEY_LENGTH - 1); @@ -143,6 +152,8 @@ describe('ValidatorsModuleMethod', () => { [address], false, ); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(true); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(true); }); it('should not be able to create new validator account if validator address already exists, bls key is not registered and proof of possession is valid', async () => { @@ -172,6 +183,7 @@ describe('ValidatorsModuleMethod', () => { [address], true, ); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to create new validator account if validator address does not exist, bls key is already registered and proof of possession is valid', async () => { @@ -200,6 +212,7 @@ describe('ValidatorsModuleMethod', () => { [address], true, ); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); }); it('should not be able to create new validator account if validator address does not exist, bls key is not registered and proof of possession is invalid', async () => { @@ -225,6 +238,8 @@ describe('ValidatorsModuleMethod', () => { [address], true, ); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but validatorAddress is shorter than 20 bytes', async () => { @@ -237,8 +252,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(`Validator address must be ${ADDRESS_LENGTH} bytes long.`); - await expect(validatorsSubStore.get(methodContext, invalidAddressShort)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, blsKey)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, invalidAddressShort)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but validatorAddress is longer than 20 bytes', async () => { @@ -251,8 +266,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(`Validator address must be ${ADDRESS_LENGTH} bytes long.`); - await expect(validatorsSubStore.get(methodContext, invalidAddressLong)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, blsKey)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, invalidAddressLong)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but generator key is shorter than 32 bytes', async () => { @@ -265,8 +280,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(); - await expect(validatorsSubStore.get(methodContext, address)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, blsKey)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but generator key is longer than 32 bytes', async () => { @@ -279,8 +294,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(); - await expect(validatorsSubStore.get(methodContext, address)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, blsKey)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but bls key is shorter than 48 bytes', async () => { @@ -293,8 +308,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(); - await expect(validatorsSubStore.get(methodContext, address)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, invalidBlsKeyShort)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); it('should not be able to register validator keys if validator address does not exist, bls key is not registered and proof of possession is valid but bls key is longer than 48 bytes', async () => { @@ -307,8 +322,8 @@ describe('ValidatorsModuleMethod', () => { proofOfPossession, ), ).rejects.toThrow(); - await expect(validatorsSubStore.get(methodContext, address)).rejects.toThrow(); - await expect(blsKeysSubStore.get(methodContext, invalidBlsKeyLong)).rejects.toThrow(); + await expect(validatorsSubStore.has(methodContext, address)).resolves.toBe(false); + await expect(blsKeysSubStore.has(methodContext, blsKey)).resolves.toBe(false); }); }); @@ -351,6 +366,39 @@ describe('ValidatorsModuleMethod', () => { ); }); + it('should be able to correctly set bls key for validator if address exists with valid blsKey, key is not registered and proof of possession is valid', async () => { + const blsEventData = codec.encode(blsKeyRegDataSchema, { + blsKey: anotherBLSKey, + proofOfPossession: anotherProofOfPossession, + result: KeyRegResult.SUCCESS, + }); + const validatorAccount = { + generatorKey, + blsKey, + }; + await validatorsSubStore.set(methodContext, address, validatorAccount); + const isSet = await validatorsModule.method.setValidatorBLSKey( + methodContext, + address, + anotherBLSKey, + anotherProofOfPossession, + ); + + const setValidatorAccount = await validatorsSubStore.get(methodContext, address); + const hasKey = await blsKeysSubStore.has(methodContext, anotherBLSKey); + + expect(isSet).toBe(true); + expect(setValidatorAccount.blsKey).toEqual(anotherBLSKey); + expect(hasKey).toBe(true); + expect(methodContext.eventQueue.add).toHaveBeenCalledWith( + MODULE_NAME_VALIDATORS, + validatorsModule.events.get(BlsKeyRegistrationEvent).name, + blsEventData, + [address], + false, + ); + }); + it('should not be able to set bls key for validator if address does not exist', async () => { const blsEventData = codec.encode(blsKeyRegDataSchema, { blsKey, @@ -492,8 +540,9 @@ describe('ValidatorsModuleMethod', () => { }); it('should be able to correctly set generator key for validator if address exists', async () => { + const anotherGeneratorKey = utils.getRandomBytes(ED25519_PUBLIC_KEY_LENGTH); const generatorEventData = codec.encode(generatorKeyRegDataSchema, { - generatorKey, + generatorKey: anotherGeneratorKey, result: KeyRegResult.SUCCESS, }); const validatorAccount = { @@ -505,12 +554,12 @@ describe('ValidatorsModuleMethod', () => { const isSet = await validatorsModule.method.setValidatorGeneratorKey( methodContext, address, - generatorKey, + anotherGeneratorKey, ); const setValidatorAccount = await validatorsSubStore.get(methodContext, address); expect(isSet).toBe(true); - expect(setValidatorAccount.generatorKey.equals(generatorKey)).toBe(true); + expect(setValidatorAccount.generatorKey.equals(anotherGeneratorKey)).toBe(true); expect(methodContext.eventQueue.add).toHaveBeenCalledWith( MODULE_NAME_VALIDATORS, validatorsModule.events.get(GeneratorKeyRegistrationEvent).name, @@ -632,14 +681,14 @@ describe('ValidatorsModuleMethod', () => { ).resolves.toBeObject(); }); - it('should be able to return generators with at least one generator assigned more than one slot if input timestamps are valid and difference between input timestamps is greater than one round', async () => { + it('should be able to return generators with at least one generator assigned more than one slot if input timestamps are valid and difference between input timestamps is greater than or equal to one round plus two blocks', async () => { const validatorsPerRound = 101; const timePerRound = validatorsPerRound * blockTime; const result = await validatorsModule.method.getGeneratorsBetweenTimestamps( methodContext, 0, - timePerRound + 2 * blockTime + 1, + timePerRound + 2 * blockTime, ); let genWithCountGreaterThanOne = 0; for (const generatorAddress of Object.keys(result)) { @@ -651,14 +700,14 @@ describe('ValidatorsModuleMethod', () => { expect(genWithCountGreaterThanOne).toBeGreaterThan(0); }); - it('should be able to return with all generators assigned at least 2 slots and at least one generator assigned more than 2 slots if input timestamps are valid and difference between input timestamps is greater than 2 rounds', async () => { + it('should be able to return with all generators assigned at least 2 slots and at least one generator assigned more than 2 slots if input timestamps are valid and difference between timestamps is larger or equal to length of two rounds plus two block slots', async () => { const validatorsPerRound = 101; const timePerRound = validatorsPerRound * blockTime; const result = await validatorsModule.method.getGeneratorsBetweenTimestamps( methodContext, 0, - timePerRound * 2 + 2 * blockTime + 1, + timePerRound * 2 + 2 * blockTime, ); let genWithCountGreaterThanOne = 0; @@ -721,11 +770,37 @@ describe('ValidatorsModuleMethod', () => { ).resolves.toEqual({}); }); - it('should return empty result when startSlotNumber equals endSlotNumber but in the same block slot', async () => { + it('should return empty result when startTimestamp equals endTimestamp', async () => { await expect( validatorsModule.method.getGeneratorsBetweenTimestamps(methodContext, 2, 2), ).resolves.toEqual({}); }); + + it('should return 3 generators from indicies 100, 0 and 1 of generator list, all having assigned 1 slot', async () => { + const { validators } = await validatorsParamsSubStore.get(methodContext, EMPTY_KEY); + const generatorAddressesInStore = validators.map(validator => + validator.address.toString('binary'), + ); + const expectedGenerators = [ + generatorAddressesInStore[100], + generatorAddressesInStore[0], + generatorAddressesInStore[1], + ]; + + const result = await validatorsModule.method.getGeneratorsBetweenTimestamps( + methodContext, + 99 * blockTime, + 103 * blockTime, + ); + + const actualGenerators = Object.keys(result); + + for (const generatorAddress of actualGenerators) { + expect(result[generatorAddress]).toBe(1); + } + + expect(expectedGenerators).toEqual(actualGenerators); + }); }); describe('getValidatorKeys', () => { @@ -813,6 +888,7 @@ describe('ValidatorsModuleMethod', () => { expect(isSet).toBe(true); expect(setValidatorAccount.generatorKey.equals(generatorKey)).toBe(true); + expect(setValidatorAccount.blsKey.equals(INVALID_BLS_KEY)).toBe(true); expect(methodContext.eventQueue.add).toHaveBeenCalledWith( MODULE_NAME_VALIDATORS, validatorsModule.events.get(GeneratorKeyRegistrationEvent).name, @@ -933,4 +1009,134 @@ describe('ValidatorsModuleMethod', () => { ); }); }); + + describe('setValidatorsParams', () => { + it('should update ValidatorsParamsStore with the provided validators, preCommitThreshold, certificateThreshold and call setNextValidators', async () => { + const validatorSetter = { + setNextValidators: jest.fn().mockReturnValue(undefined), + }; + + const validators = [ + { + generatorKey: Buffer.from( + '91fdf7f2a3eb93e493f736a4f9fce0e1df082836bf6d06e739bb3b0e1690fada', + 'hex', + ), + blsKey: Buffer.from( + 'a84b3fc0a53fcb07c6057442cf11b37ef0a3d3216fc8e245f9cbf43c13193515f0de3ab9ef4f6b0e04ecdb4df212d96a', + 'hex', + ), + address: addressUtils.getAddressFromLisk32Address( + 'lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk', + ), + bftWeight: BigInt(54), + }, + { + generatorKey: Buffer.from( + 'b53ef930d84d3ce5b4947c2502da06bcbc0fb2c71ee96f3b3a35340516712c71', + 'hex', + ), + blsKey: Buffer.from( + '8d4151757d14b1a30f7088f0bb1505bfd94a471872d565de563dbce32f696cb77afcc026170c343d0329ad554df564f6', + 'hex', + ), + address: addressUtils.getAddressFromLisk32Address( + 'lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6', + ), + bftWeight: BigInt(33), + }, + ]; + + for (const validator of validators) { + await validatorsSubStore.set(methodContext, validator.address, { + generatorKey: validator.generatorKey, + blsKey: validator.blsKey, + }); + } + + const preCommitThreshold = BigInt(100); + const certificateThreshold = BigInt(200); + + await validatorsMethod.setValidatorsParams( + methodContext, + validatorSetter, + preCommitThreshold, + certificateThreshold, + validators, + ); + + const expectedValidatorParams = { + certificateThreshold, + preCommitThreshold, + validators, + }; + + const validatorParams = await validatorsParamsSubStore.get(methodContext, EMPTY_KEY); + + expect(validatorParams).toEqual(expectedValidatorParams); + + expect(validatorSetter.setNextValidators).toHaveBeenNthCalledWith( + 1, + preCommitThreshold, + certificateThreshold, + validators, + ); + }); + + it('should throw if provided validator does not exist in Validator', async () => { + const validatorSetter = { + setNextValidators: jest.fn().mockReturnValue(undefined), + }; + + const validators = [ + { + generatorKey: Buffer.from( + '91fdf7f2a3eb93e493f736a4f9fce0e1df082836bf6d06e739bb3b0e1690fada', + 'hex', + ), + blsKey: Buffer.from( + 'a84b3fc0a53fcb07c6057442cf11b37ef0a3d3216fc8e245f9cbf43c13193515f0de3ab9ef4f6b0e04ecdb4df212d96a', + 'hex', + ), + address: addressUtils.getAddressFromLisk32Address( + 'lsk8kpswabbcjrnfp89demrfvryx9sgjsma87pusk', + ), + bftWeight: BigInt(54), + }, + { + generatorKey: Buffer.from( + 'b53ef930d84d3ce5b4947c2502da06bcbc0fb2c71ee96f3b3a35340516712c71', + 'hex', + ), + blsKey: Buffer.from( + '8d4151757d14b1a30f7088f0bb1505bfd94a471872d565de563dbce32f696cb77afcc026170c343d0329ad554df564f6', + 'hex', + ), + address: addressUtils.getAddressFromLisk32Address( + 'lskkjm548jqdrgzqrozpkew9z82kqfvtpmvavj7d6', + ), + bftWeight: BigInt(33), + }, + ]; + + const preCommitThreshold = BigInt(100); + const certificateThreshold = BigInt(200); + + await expect( + validatorsMethod.setValidatorsParams( + methodContext, + validatorSetter, + preCommitThreshold, + certificateThreshold, + validators, + ), + ).rejects.toThrow('does not exist'); + + const validatorParamsExits = await validatorsParamsSubStore.has(methodContext, EMPTY_KEY); + + expect(validatorParamsExits).toBe(false); + + expect(validatorSetter.setNextValidators).not.toHaveBeenCalled(); + }); + }); }); From dcb7d10986b8eaab71e1a62ea30f705f1fe01125 Mon Sep 17 00:00:00 2001 From: shuse2 Date: Wed, 1 Nov 2023 16:15:11 +0100 Subject: [PATCH 166/170] :recycle: Revert removed test --- .../interoperability/mainchain/module.spec.ts | 74 +++++++++---------- .../interoperability/sidechain/module.spec.ts | 22 ++++++ 2 files changed, 59 insertions(+), 37 deletions(-) diff --git a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts index 7404c48ba47..84b6d333a21 100644 --- a/framework/test/unit/modules/interoperability/mainchain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/mainchain/module.spec.ts @@ -450,6 +450,43 @@ describe('initGenesisState', () => { expect(interopMod['_verifyTerminatedStateAccountsIDs']).toHaveBeenCalledTimes(1); }); + it('_verifyChainID the same number of times as size of terminatedStateAccounts + size of chainInfo', async () => { + jest.spyOn(interopMod, '_verifyChainID' as any); + + const context = createInitGenesisStateContext( + { + ...genesisInteroperability, + chainInfos: [ + { + ...chainInfo, + chainData: { + ...chainData, + status: ChainStatus.TERMINATED, + lastCertificate: { + ...lastCertificate, + validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), + }, + }, + chainValidators: { + activeValidators, + certificateThreshold, + }, + }, + ], + terminatedStateAccounts: [ + { + chainID: chainInfo.chainID, + terminatedStateAccount, + }, + ], + }, + params, + ); + + await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); + expect(interopMod['_verifyChainID']).toHaveBeenCalledTimes(2); + }); + it('should throw error if chainInfo.chainID exists in terminatedStateAccounts & chainInfo.chainData.status is ACTIVE', async () => { const context = createInitGenesisStateContext( { @@ -732,43 +769,6 @@ describe('initGenesisState', () => { 'For each terminatedStateAccount there should be a corresponding chainInfo at TERMINATED state', ); }); - - it('should call _verifyTerminatedStateAccounts', async () => { - jest.spyOn(interopMod, '_verifyTerminatedStateAccounts' as any); - - const context = createInitGenesisStateContext( - { - ...genesisInteroperability, - chainInfos: [ - { - ...chainInfo, - chainData: { - ...chainData, - status: ChainStatus.TERMINATED, - lastCertificate: { - ...lastCertificate, - validatorsHash: computeValidatorsHash(activeValidators, certificateThreshold), - }, - }, - chainValidators: { - activeValidators, - certificateThreshold, - }, - }, - ], - terminatedStateAccounts: [ - { - chainID: chainInfo.chainID, - terminatedStateAccount, - }, - ], - }, - params, - ); - - await expect(interopMod.initGenesisState(context)).resolves.toBeUndefined(); - expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); - }); }); it(`should call _verifyTerminatedOutboxAccounts from initGenesisState `, async () => { diff --git a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts index eb9b66149df..1207637c682 100644 --- a/framework/test/unit/modules/interoperability/sidechain/module.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/module.spec.ts @@ -483,6 +483,28 @@ describe('initGenesisState', () => { expect(interopMod['_verifyTerminatedStateAccounts']).toHaveBeenCalledTimes(1); }); + it('_verifyChainID the same number of times as size of terminatedStateAccounts', async () => { + jest.spyOn(interopMod, '_verifyChainID' as any); + + // const chainIDDefault = getMainchainID(chainID); + const context = createInitGenesisStateContext( + { + ...defaultData, + chainInfos: chainInfosDefault, + terminatedStateAccounts: [ + { + chainID: Buffer.from([1, 1, 2, 3]), + terminatedStateAccount, + }, + ], + }, + params, + ); + + await interopMod.initGenesisState(context); + expect(interopMod['_verifyChainID']).toHaveBeenCalledTimes(1); + }); + it(`should throw error if stateAccount.chainID is equal to OWN_CHAIN_ID`, async () => { const context = createInitGenesisStateContext( { From 88bc30c51dac0308975243e4e3260129c539b380 Mon Sep 17 00:00:00 2001 From: has5aan <50018215+has5aan@users.noreply.github.com> Date: Wed, 1 Nov 2023 18:09:26 +0100 Subject: [PATCH 167/170] transaction:create parameter prompts (#9118) * Updates getParamsFromPrompt * Fixes transaction:create tests * Adds tests for array prompts for transaction:create * :fire: Tests for removed code * Updates helper for transactions * :white_check_mark: Updates transaction:create * :white_check_mark: Type casting prompt input * Updates visibility of getNestedParametersFromPrompt() * Updates helper for transactions * :recycle: getParamsFromPrompt * :recycle: getNestedParametersFromPrompt --- commander/src/utils/reader.ts | 220 +++++++----------- .../commands/transaction/create.spec.ts | 180 +++++++++----- .../commands/transaction/prompt.spec.ts | 145 +++++------- commander/test/helpers/transactions.ts | 130 +++++++++++ 4 files changed, 398 insertions(+), 277 deletions(-) diff --git a/commander/src/utils/reader.ts b/commander/src/utils/reader.ts index 96d025fb0e9..cfd58b014a8 100644 --- a/commander/src/utils/reader.ts +++ b/commander/src/utils/reader.ts @@ -22,20 +22,6 @@ import * as readline from 'readline'; import { FileSystemError, ValidationError } from './error'; -interface PropertyValue { - readonly dataType: string; - readonly type: string; - readonly items: { type: string; properties: Record }; -} - -interface Question { - readonly [key: string]: unknown; -} - -interface NestedPropertyTemplate { - [key: string]: string[]; -} - interface NestedAsset { [key: string]: Array>; } @@ -193,36 +179,7 @@ export const readStdIn = async (): Promise => { return readFromStd; }; -const getNestedPropertyTemplate = (schema: Schema): NestedPropertyTemplate => { - const keyValEntries = Object.entries(schema.properties); - const template: NestedPropertyTemplate = {}; - - // eslint-disable-next-line @typescript-eslint/prefer-for-of - for (let i = 0; i < keyValEntries.length; i += 1) { - const [schemaPropertyName, schemaPropertyValue] = keyValEntries[i]; - if ((schemaPropertyValue as PropertyValue).type === 'array') { - // nested items properties - if ((schemaPropertyValue as PropertyValue).items.type === 'object') { - template[schemaPropertyName] = Object.keys( - (schemaPropertyValue as PropertyValue).items.properties, - ); - } - } - } - return template; -}; - -const castValue = ( - val: string, - schemaType: string, -): number | bigint | string | string[] | Record => { - if (schemaType === 'object') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return JSON.parse(val); - } - if (schemaType === 'array') { - return val !== '' ? val.split(',') : []; - } +const castValue = (val: string, schemaType: string): string | number | bigint => { if (schemaType === 'uint64' || schemaType === 'sint64') { return BigInt(val); } @@ -232,107 +189,108 @@ const castValue = ( return val; }; -export const transformAsset = ( - schema: Schema, - data: Record, -): Record => { - const propertySchema = Object.values(schema.properties); - const assetData = {} as Record; - return Object.entries(data).reduce((acc, curr, index) => { - const propSchema = propertySchema[index] as { type: string; dataType: string }; - // Property schema type can be scalar(string, bool, etc..) or structural(object, array) - const schemaType = propSchema.type || propSchema.dataType; - acc[curr[0]] = castValue(curr[1], schemaType); - return acc; - }, assetData); -}; +const castArray = (items: string[], schemaType: string): string[] | number[] | bigint[] => { + if (schemaType === 'uint64' || schemaType === 'sint64') { + return items.map(i => BigInt(i)); + } -export const transformNestedAsset = ( - schema: Schema, - data: Array>, -): NestedAsset => { - const template = getNestedPropertyTemplate(schema); - const result = {} as NestedAsset; - const items: Array> = []; - for (const assetData of data) { - const [[key, val]] = Object.entries(assetData); - const templateValues = template[key]; - const initData = {} as Record; - const valObject = val.split(',').reduce((acc, curr, index) => { - acc[templateValues[index]] = Number.isInteger(Number(curr)) ? Number(curr) : curr; - return acc; - }, initData); - items.push(valObject); - result[key] = items; + if (schemaType === 'uint32' || schemaType === 'sint32') { + return items.map(i => Number(i)); } + + return items; +}; + +const getNestedParametersFromPrompt = async (property: { + name: string; + items: { properties: Record }; +}) => { + let addMore = false; + const nestedArray: Array> = []; + const nestedProperties = Object.keys(property.items.properties); + const nestedPropertiesCsv = nestedProperties.join(','); + do { + const nestedPropertiesAnswer: Record = await inquirer.prompt({ + type: 'input', + name: property.name, + message: `Please enter: ${property.name}(${nestedPropertiesCsv}): `, + }); + + const properties = nestedPropertiesAnswer[property.name].split(','); + + const nestedObject: Record = {}; + + for (let i = 0; i < nestedProperties.length; i += 1) { + const propertySchema = property.items.properties[nestedProperties[i]] as { dataType: string }; + nestedObject[nestedProperties[i]] = + properties[i] === undefined ? '' : castValue(properties[i], propertySchema.dataType); + } + + nestedArray.push(nestedObject); + + const confirmResponse = await inquirer.prompt({ + type: 'confirm', + name: 'askAgain', + message: `Want to enter another ${property.name})`, + }); + + addMore = confirmResponse.askAgain as boolean; + } while (addMore); + + const result = {} as Record; + result[property.name] = nestedArray; + return result; }; -export const prepareQuestions = (schema: Schema): Question[] => { - const keyValEntries = Object.entries(schema.properties); - const questions: Question[] = []; +export const getParamsFromPrompt = async ( + assetSchema: Schema | { properties: Record }, +): Promise> => { + const result: Record = {}; + for (const propertyName of Object.keys(assetSchema.properties)) { + const property = assetSchema.properties[propertyName] as { + dataType?: string; + type?: 'array'; + items?: { dataType?: string; type?: 'object'; properties?: Record }; + }; - for (const [schemaPropertyName, schemaPropertyValue] of keyValEntries) { - if ((schemaPropertyValue as PropertyValue).type === 'array') { - let commaSeparatedKeys: string[] = []; - // nested items properties - if ((schemaPropertyValue as PropertyValue).items.type === 'object') { - commaSeparatedKeys = Object.keys((schemaPropertyValue as PropertyValue).items.properties); - } - questions.push({ - type: 'input', - name: schemaPropertyName, - message: `Please enter: ${schemaPropertyName}(${ - commaSeparatedKeys.length ? commaSeparatedKeys.join(', ') : 'comma separated values (a,b)' - }): `, - }); - if ((schemaPropertyValue as PropertyValue).items.type === 'object') { - questions.push({ - type: 'confirm', - name: 'askAgain', - message: `Want to enter another ${schemaPropertyName}(${commaSeparatedKeys.join(', ')})`, + if (property.type === 'array') { + if (property.items?.type === 'object' && property.items.properties !== undefined) { + const nestedResult = await getNestedParametersFromPrompt({ + name: propertyName, + items: { + properties: property.items.properties, + }, + }); + + result[propertyName] = nestedResult[propertyName]; + } else if (property.items?.type === undefined && property.items?.dataType !== undefined) { + const answer: Record = await inquirer.prompt({ + type: 'input', + name: propertyName, + message: `Please enter: ${propertyName}(comma separated values (a,b)): `, }); + + result[propertyName] = castArray( + answer[propertyName] === '' ? [] : answer[propertyName].split(','), + property.items.dataType, + ); } } else { - questions.push({ + const answer: Record = await inquirer.prompt({ type: 'input', - name: schemaPropertyName, - message: `Please enter: ${schemaPropertyName}: `, + name: propertyName, + message: `Please enter: ${propertyName}: `, }); - } - } - return questions; -}; -export const getParamsFromPrompt = async ( - assetSchema: Schema, - output: Array<{ [key: string]: string }> = [], -): Promise> => { - // prepare array of questions based on asset schema - const questions = prepareQuestions(assetSchema); - if (questions.length === 0) { - return {}; - } - let isTypeConfirm = false; - // Prompt user with prepared questions - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const result = await inquirer.prompt(questions).then(async answer => { - const inquirerResult = answer as { [key: string]: string }; - isTypeConfirm = typeof inquirerResult.askAgain === 'boolean'; - // if its a multiple questions prompt user again - if (inquirerResult.askAgain) { - output.push(inquirerResult); - return getParamsFromPrompt(assetSchema, output); + result[propertyName] = castValue( + answer[propertyName], + (property as { dataType: string }).dataType, + ); } - output.push(inquirerResult); - return Promise.resolve(answer); - }); - const filteredResult = output.map(({ askAgain, ...assetProps }) => assetProps); + } - // transform asset prompt result according to asset schema - return isTypeConfirm - ? transformNestedAsset(assetSchema, filteredResult) - : transformAsset(assetSchema, result as Record); + return result; }; export const checkFileExtension = (filePath: string): void => { diff --git a/commander/test/bootstrapping/commands/transaction/create.spec.ts b/commander/test/bootstrapping/commands/transaction/create.spec.ts index 78edde36f53..c53c8a438b4 100644 --- a/commander/test/bootstrapping/commands/transaction/create.spec.ts +++ b/commander/test/bootstrapping/commands/transaction/create.spec.ts @@ -22,7 +22,12 @@ import { emptySchema } from '@liskhq/lisk-codec'; import { join } from 'path'; import * as appUtils from '../../../../src/utils/application'; import * as readerUtils from '../../../../src/utils/reader'; -import { tokenTransferParamsSchema, posVoteParamsSchema } from '../../../helpers/transactions'; +import { + tokenTransferParamsSchema, + posVoteParamsSchema, + schemaWithArray, + schemaWithArrayOfObjects, +} from '../../../helpers/transactions'; import { CreateCommand } from '../../../../src/bootstrapping/commands/transaction/create'; import { getConfig } from '../../../helpers/config'; import { PromiseResolvedType } from '../../../../src/types'; @@ -56,6 +61,24 @@ describe('transaction:create command', () => { ], }; + const questionsForTokenTransfer = [ + { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, + { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, + { + message: 'Please enter: recipientAddress: ', + name: 'recipientAddress', + type: 'input', + }, + { message: 'Please enter: data: ', name: 'data', type: 'input' }, + ]; + + const verifyIfInquirerCallsFor = (questions: Array>) => { + expect(inquirer.prompt).toHaveBeenCalledTimes(questions.length); + for (let i = 0; i < 1; i += 1) { + expect(inquirer.prompt).toHaveBeenNthCalledWith(i + 1, questions[i]); + } + }; + let config: Awaited>; let clientMock: PromiseResolvedType>; @@ -99,6 +122,19 @@ describe('transaction:create command', () => { }, ], }, + { + name: 'nft', + commands: [ + { + name: 'arrayOfItems', + params: schemaWithArray, + }, + { + name: 'arrayOfObjects', + params: schemaWithArrayOfObjects, + }, + ], + }, ], node: { getNodeInfo: jest.fn().mockResolvedValue({ @@ -133,6 +169,88 @@ describe('transaction:create command', () => { 'Missing 3 required args:', ); }); + + it('should throw if casting fails', async () => { + jest.spyOn(inquirer, 'prompt').mockResolvedValue({ + attributesArray: 'a,12312321', + }); + + await expect( + CreateCommandExtended.run( + ['nft', 'arrayOfItems', '100000000', `--passphrase=${passphrase}`], + config, + ), + ).rejects.toThrow(); + }); + + describe('prompt for arrays and array of objects', () => { + it('should inquire arrays as CSV', async () => { + jest.spyOn(inquirer, 'prompt').mockResolvedValue({ + attributesArray: '13213213,12312321', + }); + + await CreateCommandExtended.run( + ['nft', 'arrayOfItems', '100000000', `--passphrase=${passphrase}`], + config, + ); + verifyIfInquirerCallsFor([ + { + type: 'input', + name: 'attributesArray', + message: 'Please enter: attributesArray(comma separated values (a,b)): ', + }, + ]); + + expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); + }); + + it('should inquire each item of the array as a CSV and prompt to add more', async () => { + jest + .spyOn(inquirer, 'prompt') + .mockResolvedValueOnce({ + attributesArray: 'pos, 0000', + }) + .mockResolvedValueOnce({ + askAgain: true, + }) + .mockResolvedValueOnce({ + attributesArray: 'token, 0000', + }) + .mockResolvedValue({ + askAgain: false, + }); + + await CreateCommandExtended.run( + ['nft', 'arrayOfObjects', '100000000', `--passphrase=${passphrase}`], + config, + ); + + verifyIfInquirerCallsFor([ + { + type: 'input', + name: 'attributesArray', + message: 'Please enter: attributesArray(module,attributes): ', + }, + { + type: 'confirm', + name: 'askAgain', + message: 'Want to enter another attributesArray', + }, + { + type: 'input', + name: 'attributesArray', + message: 'Please enter: attributesArray(module,attributes): ', + }, + { + type: 'confirm', + name: 'askAgain', + message: 'Want to enter another attributesArray', + }, + ]); + + expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); + }); + }); }); describe('transaction:create 2', () => { @@ -395,17 +513,7 @@ describe('transaction:create command', () => { ], config, ); - expect(inquirer.prompt).toHaveBeenCalledTimes(1); - expect(inquirer.prompt).toHaveBeenCalledWith([ - { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, - { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, - { - message: 'Please enter: recipientAddress: ', - name: 'recipientAddress', - type: 'input', - }, - { message: 'Please enter: data: ', name: 'data', type: 'input' }, - ]); + verifyIfInquirerCallsFor(questionsForTokenTransfer); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledWith(undefined, { transaction: expect.any(String), @@ -426,17 +534,7 @@ describe('transaction:create command', () => { ], config, ); - expect(inquirer.prompt).toHaveBeenCalledTimes(1); - expect(inquirer.prompt).toHaveBeenCalledWith([ - { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, - { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, - { - message: 'Please enter: recipientAddress: ', - name: 'recipientAddress', - type: 'input', - }, - { message: 'Please enter: data: ', name: 'data', type: 'input' }, - ]); + verifyIfInquirerCallsFor(questionsForTokenTransfer); expect(readerUtils.getPassphraseFromPrompt).toHaveBeenCalledWith('passphrase'); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledWith(undefined, { @@ -690,17 +788,7 @@ describe('transaction:create command', () => { ['token', 'transfer', '100000000', `--passphrase=${passphrase}`], config, ); - expect(inquirer.prompt).toHaveBeenCalledTimes(1); - expect(inquirer.prompt).toHaveBeenCalledWith([ - { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, - { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, - { - message: 'Please enter: recipientAddress: ', - name: 'recipientAddress', - type: 'input', - }, - { message: 'Please enter: data: ', name: 'data', type: 'input' }, - ]); + verifyIfInquirerCallsFor(questionsForTokenTransfer); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledWith(undefined, { transaction: mockEncodedTransaction.toString('hex'), @@ -714,17 +802,7 @@ describe('transaction:create command', () => { ['token', 'transfer', '100000000', '--nonce=999'], config, ); - expect(inquirer.prompt).toHaveBeenCalledTimes(1); - expect(inquirer.prompt).toHaveBeenCalledWith([ - { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, - { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, - { - message: 'Please enter: recipientAddress: ', - name: 'recipientAddress', - type: 'input', - }, - { message: 'Please enter: data: ', name: 'data', type: 'input' }, - ]); + verifyIfInquirerCallsFor(questionsForTokenTransfer); expect(readerUtils.getPassphraseFromPrompt).toHaveBeenCalledWith('passphrase'); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledWith(undefined, { @@ -736,17 +814,7 @@ describe('transaction:create command', () => { describe('transaction:create token transfer 100000000', () => { it('should prompt user for params and passphrase.', async () => { await CreateCommandExtended.run(['token', 'transfer', '100000000'], config); - expect(inquirer.prompt).toHaveBeenCalledTimes(1); - expect(inquirer.prompt).toHaveBeenCalledWith([ - { message: 'Please enter: tokenID: ', name: 'tokenID', type: 'input' }, - { message: 'Please enter: amount: ', name: 'amount', type: 'input' }, - { - message: 'Please enter: recipientAddress: ', - name: 'recipientAddress', - type: 'input', - }, - { message: 'Please enter: data: ', name: 'data', type: 'input' }, - ]); + verifyIfInquirerCallsFor(questionsForTokenTransfer); expect(readerUtils.getPassphraseFromPrompt).toHaveBeenCalledWith('passphrase'); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledTimes(1); expect(CreateCommandExtended.prototype.printJSON).toHaveBeenCalledWith(undefined, { diff --git a/commander/test/bootstrapping/commands/transaction/prompt.spec.ts b/commander/test/bootstrapping/commands/transaction/prompt.spec.ts index e787c120f81..3a285c60387 100644 --- a/commander/test/bootstrapping/commands/transaction/prompt.spec.ts +++ b/commander/test/bootstrapping/commands/transaction/prompt.spec.ts @@ -12,101 +12,66 @@ * Removal or modification of this copyright notice is prohibited. * */ +import * as inquirer from 'inquirer'; +import { getParamsFromPrompt } from '../../../../src/utils/reader'; +import { castValidationSchema } from '../../../helpers/transactions'; -import { - prepareQuestions, - transformAsset, - transformNestedAsset, -} from '../../../../src/utils/reader'; -import { - tokenTransferParamsSchema, - registerMultisignatureParamsSchema, - posVoteParamsSchema, -} from '../../../helpers/transactions'; - -describe('prompt', () => { - describe('prepareQuestions', () => { - it('should return array of questions for given asset schema', () => { - const questions = prepareQuestions(tokenTransferParamsSchema); - expect(questions).toEqual([ - { type: 'input', name: 'tokenID', message: 'Please enter: tokenID: ' }, - { type: 'input', name: 'amount', message: 'Please enter: amount: ' }, - { - type: 'input', - name: 'recipientAddress', - message: 'Please enter: recipientAddress: ', - }, - { type: 'input', name: 'data', message: 'Please enter: data: ' }, - ]); - }); - }); - - describe('transformAsset', () => { - it('should transform result according to asset schema', () => { - const questions = prepareQuestions(registerMultisignatureParamsSchema); - const transformedAsset = transformAsset(registerMultisignatureParamsSchema, { - numberOfSignatures: '4', - mandatoryKeys: 'a,b', - optionalKeys: '', - signatures: 'c,d', +describe('getParamsFromPrompt', () => { + it('should cast uint64, sint64 types to BigInt and uint32, sint32 to Number', async () => { + const uInt64 = '12312321'; + const sInt64 = '-12321312'; + const uInt32 = '10'; + const sInt32 = '-10'; + jest + .spyOn(inquirer, 'prompt') + .mockResolvedValueOnce({ + uInt64, + }) + .mockResolvedValueOnce({ + sInt64, + }) + .mockResolvedValueOnce({ + uInt32, + }) + .mockResolvedValueOnce({ + sInt32, + }) + .mockResolvedValueOnce({ + uInt64Array: `${uInt64},${uInt64}`, + }) + .mockResolvedValueOnce({ + sInt64Array: `${sInt64},${sInt64}`, + }) + .mockResolvedValueOnce({ + uInt32Array: `${uInt32},${uInt32}`, + }) + .mockResolvedValueOnce({ + sInt32Array: `${sInt32},${sInt32}`, + }) + .mockResolvedValueOnce({ + nested: `${uInt64},${sInt64},${uInt32},${sInt32}`, + }) + .mockResolvedValue({ + askAgain: false, }); - expect(questions).toEqual([ - { - type: 'input', - name: 'numberOfSignatures', - message: 'Please enter: numberOfSignatures: ', - }, - { - type: 'input', - name: 'mandatoryKeys', - message: 'Please enter: mandatoryKeys(comma separated values (a,b)): ', - }, - { - type: 'input', - name: 'optionalKeys', - message: 'Please enter: optionalKeys(comma separated values (a,b)): ', - }, - { - type: 'input', - name: 'signatures', - message: 'Please enter: signatures(comma separated values (a,b)): ', - }, - ]); - expect(transformedAsset).toEqual({ - numberOfSignatures: 4, - mandatoryKeys: ['a', 'b'], - optionalKeys: [], - signatures: ['c', 'd'], - }); - }); - }); - - describe('transformNestedAsset', () => { - it('should transform result according to nested asset schema', () => { - const questions = prepareQuestions(posVoteParamsSchema); - const transformedAsset = transformNestedAsset(posVoteParamsSchema, [ - { stakes: 'a,100' }, - { stakes: 'b,300' }, - ]); - expect(questions).toEqual([ + await expect(getParamsFromPrompt(castValidationSchema)).resolves.toEqual({ + uInt64: BigInt(uInt64), + sInt64: BigInt(sInt64), + uInt32: Number(uInt32), + sInt32: Number(sInt32), + uInt64Array: [BigInt(uInt64), BigInt(uInt64)], + sInt64Array: [BigInt(sInt64), BigInt(sInt64)], + uInt32Array: [Number(uInt32), Number(uInt32)], + sInt32Array: [Number(sInt32), Number(sInt32)], + nested: [ { - type: 'input', - name: 'stakes', - message: 'Please enter: stakes(validatorAddress, amount): ', + uInt64: BigInt(uInt64), + sInt64: BigInt(sInt64), + uInt32: Number(uInt32), + sInt32: Number(sInt32), }, - { - type: 'confirm', - name: 'askAgain', - message: 'Want to enter another stakes(validatorAddress, amount)', - }, - ]); - expect(transformedAsset).toEqual({ - stakes: [ - { validatorAddress: 'a', amount: 100 }, - { validatorAddress: 'b', amount: 300 }, - ], - }); + ], }); }); }); diff --git a/commander/test/helpers/transactions.ts b/commander/test/helpers/transactions.ts index 6088b04d042..9b97a65148d 100644 --- a/commander/test/helpers/transactions.ts +++ b/commander/test/helpers/transactions.ts @@ -165,6 +165,136 @@ export const posVoteParamsSchema = { }, }; +export const schemaWithArray = { + $id: '/lisk/schemaWithArray', + type: 'object', + required: ['attributesArray'], + properties: { + attributesArray: { + type: 'array', + fieldNumber: 1, + items: { + dataType: 'uint64', + }, + }, + }, +}; + +export const schemaWithArrayOfObjects = { + $id: '/lisk/schemaWithArrayOfObjects', + type: 'object', + required: ['attributesArray'], + properties: { + attributesArray: { + type: 'array', + fieldNumber: 4, + items: { + type: 'object', + required: ['module', 'attributes'], + properties: { + module: { + dataType: 'string', + minLength: 0, + maxLength: 10, + pattern: '^[a-zA-Z0-9]*$', + fieldNumber: 1, + }, + attributes: { + dataType: 'bytes', + fieldNumber: 2, + }, + }, + }, + }, + }, +}; + +export const castValidationSchema = { + $id: '/lisk/castValidation', + type: 'object', + required: [ + 'uInt64', + 'sIn64', + 'uInt32', + 'sInt32', + 'uInt64Array', + 'sInt64Array', + 'uInt32Array', + 'sInt32Array', + ], + properties: { + uInt64: { + dataType: 'uint64', + fieldNumber: 1, + }, + sInt64: { + dataType: 'sint64', + fieldNumber: 2, + }, + uInt32: { + dataType: 'uint32', + fieldNumber: 3, + }, + sInt32: { + dataType: 'sint32', + fieldNumber: 4, + }, + uInt64Array: { + type: 'array', + fieldNumber: 5, + items: { + dataType: 'uint64', + }, + }, + sInt64Array: { + type: 'array', + fieldNumber: 6, + items: { + dataType: 'sint64', + }, + }, + uInt32Array: { + type: 'array', + fieldNumber: 7, + items: { + dataType: 'uint32', + }, + }, + sInt32Array: { + type: 'array', + fieldNumber: 8, + items: { + dataType: 'sint32', + }, + }, + nested: { + type: 'array', + fieldNumber: 9, + items: { + type: 'object', + properties: { + uInt64: { + dataType: 'uint64', + fieldNumber: 1, + }, + sInt64: { + dataType: 'sint64', + fieldNumber: 2, + }, + uInt32: { + dataType: 'uint32', + fieldNumber: 3, + }, + sInt32: { + dataType: 'sint32', + fieldNumber: 4, + }, + }, + }, + }, + }, +}; + export const genesisBlockID = Buffer.from( 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', 'hex', From abb27d3746354ef55c5f8b01ce6fa116e8a6daaf Mon Sep 17 00:00:00 2001 From: sitetester Date: Fri, 3 Nov 2023 17:28:45 +0200 Subject: [PATCH 168/170] Update State recovery commands (#9119) Update tests for State recovery commands --- .../interoperability/base_state_recovery.ts | 6 +- .../commands/initialize_state_recovery.ts | 7 +- .../base_state_recovery.spec.ts | 12 +- .../initialize_state_recovery.spec.ts | 182 ++++++++++++++---- 4 files changed, 160 insertions(+), 47 deletions(-) diff --git a/framework/src/modules/interoperability/base_state_recovery.ts b/framework/src/modules/interoperability/base_state_recovery.ts index 0cd116393d8..62b0f08ac95 100644 --- a/framework/src/modules/interoperability/base_state_recovery.ts +++ b/framework/src/modules/interoperability/base_state_recovery.ts @@ -78,7 +78,7 @@ export class BaseStateRecoveryCommand< if (!moduleMethod.recover) { return { status: VerifyStatus.FAIL, - error: new Error('Module is not recoverable.'), + error: new Error("Module is not recoverable, as it doesn't have a recover method."), }; } @@ -93,7 +93,7 @@ export class BaseStateRecoveryCommand< if (!objectUtils.bufferArrayUniqueItems(queryKeys)) { return { status: VerifyStatus.FAIL, - error: new Error('Recovered store keys are not pairwise distinct.'), + error: new Error('Recoverable store keys are not pairwise distinct.'), }; } @@ -160,7 +160,7 @@ export class BaseStateRecoveryCommand< }); storeQueriesUpdate.push({ key: Buffer.concat([storePrefix, entry.substorePrefix, utils.hash(entry.storeKey)]), - value: RECOVERED_STORE_VALUE, + value: RECOVERED_STORE_VALUE, // The value is set to a constant without known pre-image. bitmap: entry.bitmap, }); } catch (err) { diff --git a/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts b/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts index c4319d01ce4..5533a5e2f80 100644 --- a/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts +++ b/framework/src/modules/interoperability/sidechain/commands/initialize_state_recovery.ts @@ -33,6 +33,7 @@ import { getMainchainID } from '../../utils'; import { SidechainInteroperabilityInternalMethod } from '../internal_method'; import { InvalidSMTVerificationEvent } from '../../events/invalid_smt_verification'; +// https://github.com/LiskHQ/lips/blob/main/proposals/lip-0054.md#state-recovery-initialization-command export class InitializeStateRecoveryCommand extends BaseInteroperabilityCommand { public schema = stateRecoveryInitParamsSchema; @@ -106,19 +107,23 @@ export class InitializeStateRecoveryCommand extends BaseInteroperabilityCommand< const smt = new SparseMerkleTree(); let stateRoot: Buffer; + // it will help whether error is for input chainID or mainchainID + let msg; if (terminatedStateAccountExists) { const terminatedStateAccount = await terminatedStateSubstore.get(context, chainID); stateRoot = terminatedStateAccount.mainchainStateRoot; + msg = `given chainID: ${chainID.toString('hex')}.`; } else { const mainchainID = getMainchainID(context.chainID); const mainchainAccount = await this.stores.get(ChainAccountStore).get(context, mainchainID); stateRoot = mainchainAccount.lastCertificate.stateRoot; + msg = `mainchainID: ${mainchainID.toString('hex')}`; } const verified = await smt.verifyInclusionProof(stateRoot, [queryKey], proofOfInclusion); if (!verified) { this.events.get(InvalidSMTVerificationEvent).error(context); - throw new Error('State recovery initialization proof of inclusion is not valid.'); + throw new Error(`State recovery initialization proof of inclusion is not valid for ${msg}.`); } const deserializedSidechainAccount = codec.decode( diff --git a/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts b/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts index 069aa5407cb..c6e6d6607e1 100644 --- a/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts +++ b/framework/test/unit/modules/interoperability/base_state_recovery.spec.ts @@ -171,7 +171,9 @@ describe('RecoverStateCommand', () => { const result = await stateRecoveryCommand.verify(commandVerifyContext); expect(result.status).toBe(VerifyStatus.FAIL); - expect(result.error?.message).toInclude('Module is not recoverable.'); + expect(result.error?.message).toInclude( + "Module is not recoverable, as it doesn't have a recover method.", + ); }); it('should return error if recovered store keys are not pairwise distinct', async () => { @@ -180,7 +182,7 @@ describe('RecoverStateCommand', () => { const result = await stateRecoveryCommand.verify(commandVerifyContext); expect(result.status).toBe(VerifyStatus.FAIL); - expect(result.error?.message).toInclude('Recovered store keys are not pairwise distinct.'); + expect(result.error?.message).toInclude('Recoverable store keys are not pairwise distinct.'); }); }); @@ -200,6 +202,12 @@ describe('RecoverStateCommand', () => { expect(invalidSMTVerificationEvent.error).toHaveBeenCalled(); }); + it(`should not throw error if recovery is available for "${moduleName}"`, async () => { + await expect(stateRecoveryCommand.execute(commandExecuteContext)).resolves.not.toThrow( + `Recovery failed for module: ${moduleName}`, + ); + }); + it(`should throw error if recovery not available for "${moduleName}"`, async () => { interoperableCCMethods.delete(moduleName); diff --git a/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts b/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts index ec3ef4bce05..ee8b5d20264 100644 --- a/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts +++ b/framework/test/unit/modules/interoperability/sidechain/commands/initialize_state_recovery.spec.ts @@ -55,22 +55,17 @@ import { InvalidSMTVerificationEvent } from '../../../../../../src/modules/inter describe('Sidechain InitializeStateRecoveryCommand', () => { const interopMod = new SidechainInteroperabilityModule(); type StoreMock = Mocked; - const chainAccountStoreMock = { + const getSetHas = () => ({ get: jest.fn(), set: jest.fn(), has: jest.fn(), + }); + const chainAccountStoreMock = { + ...getSetHas(), key: Buffer.from('chainAccount', 'hex'), }; - const ownChainAccountStoreMock = { - get: jest.fn(), - set: jest.fn(), - has: jest.fn(), - }; - const terminatedStateAccountMock = { - get: jest.fn(), - set: jest.fn(), - has: jest.fn(), - }; + const ownChainAccountStoreMock = getSetHas(); + const terminatedStateAccountMock = getSetHas(); let stateRecoveryInitCommand: InitializeStateRecoveryCommand; let commandExecuteContext: CommandExecuteContext; let transaction: Transaction; @@ -85,10 +80,17 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { let commandVerifyContext: CommandVerifyContext; let stateStore: PrefixedStateReadWriter; let mainchainAccount: ChainAccount; + let ownChainAccount: OwnChainAccount; beforeEach(async () => { stateRecoveryInitCommand = interopMod['_stateRecoveryInitCommand']; + ownChainAccount = { + name: 'sidechain', + chainID: utils.intToBuffer(2, 4), + nonce: BigInt('0'), + }; + sidechainChainAccount = { name: 'sidechain1', lastCertificate: { @@ -179,7 +181,6 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { }); describe('verify', () => { - let ownChainAccount: OwnChainAccount; beforeEach(() => { mainchainAccount = { name: 'mainchain', @@ -191,17 +192,9 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { }, status: ChainStatus.ACTIVE, }; - ownChainAccount = { - name: 'sidechain', - chainID: utils.intToBuffer(2, 4), - nonce: BigInt('0'), - }; terminatedStateAccountMock.has.mockResolvedValue(true); ownChainAccountStoreMock.get.mockResolvedValue(ownChainAccount); chainAccountStoreMock.get.mockResolvedValue(mainchainAccount); - interopStoreMock = { - createTerminatedStateAccount: jest.fn(), - }; commandVerifyContext = transactionContext.createCommandVerifyContext( stateRecoveryInitParamsSchema, ); @@ -212,7 +205,15 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { expect(result.status).toBe(VerifyStatus.OK); }); - it('should return error if chain id is same as mainchain id or own chain account id', async () => { + it('should return error if chain id is same as mainchain id', async () => { + commandVerifyContext.params.chainID = getMainchainID(ownChainAccount.chainID); + + await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).rejects.toThrow( + 'Chain ID is not valid.', + ); + }); + + it('should return error if chain id is same as own chain account id', async () => { commandVerifyContext.params.chainID = ownChainAccount.chainID; await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).rejects.toThrow( @@ -220,6 +221,25 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { ); }); + it("should not return error if terminated state account doesn't exist", async () => { + await terminatedStateSubstore.del(createStoreGetter(stateStore), transactionParams.chainID); + + await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).resolves.not.toThrow( + 'Sidechain is already terminated.', + ); + }); + + it('should not return error if terminated state account exists but not initialized', async () => { + await terminatedStateSubstore.set(createStoreGetter(stateStore), transactionParams.chainID, { + ...terminatedStateAccount, + initialized: false, + }); + + await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).resolves.not.toThrow( + 'Sidechain is already terminated.', + ); + }); + it('should return error if terminated state account exists and is initialized', async () => { await terminatedStateSubstore.set(createStoreGetter(stateStore), transactionParams.chainID, { ...terminatedStateAccount, @@ -268,7 +288,7 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { ); }); - it('should return error if the sidechain is active on the mainchain and does not violate the liveness requirement', async () => { + it('should return error if the sidechain has ChainStatus.REGISTERED status', async () => { await terminatedStateSubstore.set(createStoreGetter(stateStore), transactionParams.chainID, { ...terminatedStateAccount, initialized: false, @@ -285,7 +305,7 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { timestamp: 100, validatorsHash: utils.getRandomBytes(32), }, - status: ChainStatus.ACTIVE, + status: ChainStatus.REGISTERED, }; sidechainChainAccountEncoded = codec.encode(chainDataSchema, sidechainChainAccount); transactionParams = { @@ -313,11 +333,11 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { ); await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).rejects.toThrow( - 'Sidechain is still active and obeys the liveness requirement.', + 'Sidechain has status registered.', ); }); - it('should return error if the sidechain has ChainStatus.REGISTERED status', async () => { + it('should return error if the sidechain is active on the mainchain and does not violate the liveness requirement', async () => { await terminatedStateSubstore.set(createStoreGetter(stateStore), transactionParams.chainID, { ...terminatedStateAccount, initialized: false, @@ -334,7 +354,7 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { timestamp: 100, validatorsHash: utils.getRandomBytes(32), }, - status: ChainStatus.REGISTERED, + status: ChainStatus.ACTIVE, }; sidechainChainAccountEncoded = codec.encode(chainDataSchema, sidechainChainAccount); transactionParams = { @@ -362,7 +382,62 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { ); await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).rejects.toThrow( - 'Sidechain has status registered.', + 'Sidechain is still active and obeys the liveness requirement.', + ); + }); + + it('should not return error if the sidechain is active on the mainchain and does violate the liveness requirement', async () => { + await terminatedStateSubstore.set(createStoreGetter(stateStore), transactionParams.chainID, { + ...terminatedStateAccount, + initialized: false, + }); + const mainchainID = getMainchainID(transactionParams.chainID); + when(chainAccountStoreMock.get) + .calledWith(expect.anything(), mainchainID) + .mockResolvedValue({ + ...mainchainAccount, + lastCertificate: { + ...mainchainAccount.lastCertificate, + timestamp: LIVENESS_LIMIT + 50, + }, + } as ChainAccount); + sidechainChainAccount = { + name: 'sidechain1', + lastCertificate: { + height: 10, + stateRoot: utils.getRandomBytes(32), + timestamp: 10, + validatorsHash: utils.getRandomBytes(32), + }, + status: ChainStatus.ACTIVE, + }; + sidechainChainAccountEncoded = codec.encode(chainDataSchema, sidechainChainAccount); + transactionParams = { + chainID: utils.intToBuffer(3, 4), + bitmap: Buffer.alloc(0), + siblingHashes: [], + sidechainAccount: sidechainChainAccountEncoded, + }; + encodedTransactionParams = codec.encode(stateRecoveryInitParamsSchema, transactionParams); + transaction = new Transaction({ + module: MODULE_NAME_INTEROPERABILITY, + command: COMMAND_NAME_STATE_RECOVERY_INIT, + fee: BigInt(100000000), + nonce: BigInt(0), + params: encodedTransactionParams, + senderPublicKey: utils.getRandomBytes(32), + signatures: [], + }); + transactionContext = createTransactionContext({ + transaction, + stateStore, + }); + commandVerifyContext = transactionContext.createCommandVerifyContext( + stateRecoveryInitParamsSchema, + ); + + await expect(stateRecoveryInitCommand.verify(commandVerifyContext)).resolves.not.toThrow( + 'Sidechain is still active and obeys the liveness requirement.', ); }); }); @@ -391,8 +466,9 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { jest.spyOn(SparseMerkleTree.prototype, 'verify').mockResolvedValue(false); jest.spyOn(invalidSMTVerificationEvent, 'error'); + const msg = `given chainID: ${commandExecuteContext.params.chainID.toString('hex')}.`; await expect(stateRecoveryInitCommand.execute(commandExecuteContext)).rejects.toThrow( - 'State recovery initialization proof of inclusion is not valid', + `State recovery initialization proof of inclusion is not valid for ${msg}.`, ); expect(interopStoreMock.createTerminatedStateAccount).not.toHaveBeenCalled(); expect(invalidSMTVerificationEvent.error).toHaveBeenCalled(); @@ -413,15 +489,24 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { transactionParams.chainID, ); + const msg = `mainchainID: ${mainchainID.toString('hex')}`; await expect(stateRecoveryInitCommand.execute(commandExecuteContext)).rejects.toThrow( - 'State recovery initialization proof of inclusion is not valid', + `State recovery initialization proof of inclusion is not valid for ${msg}.`, ); expect(interopStoreMock.createTerminatedStateAccount).not.toHaveBeenCalled(); expect(invalidSMTVerificationEvent.error).toHaveBeenCalled(); }); it('should create a terminated state account when there is none', async () => { - // Arrange & Assign & Act + const mainchainID = getMainchainID(commandExecuteContext.chainID); + + when(chainAccountStoreMock.get) + .calledWith(expect.anything(), mainchainID) + .mockResolvedValue(mainchainAccount); + + jest.spyOn(terminatedStateSubstore, 'has').mockResolvedValue(false); + jest.spyOn(stateRecoveryInitCommand['internalMethod'], 'createTerminatedStateAccount'); + await stateRecoveryInitCommand.execute(commandExecuteContext); const accountFromStore = await terminatedStateSubstore.get( @@ -429,29 +514,44 @@ describe('Sidechain InitializeStateRecoveryCommand', () => { transactionParams.chainID, ); - // Assert expect(accountFromStore).toEqual({ ...terminatedStateAccount, initialized: true }); - expect(interopStoreMock.createTerminatedStateAccount).not.toHaveBeenCalled(); + expect( + stateRecoveryInitCommand['internalMethod'].createTerminatedStateAccount, + ).toHaveBeenCalledTimes(1); }); it('should update the terminated state account when there is one', async () => { - // Arrange & Assign & Act - when(terminatedStateAccountMock.has) - .calledWith(expect.anything(), transactionParams.chainID) - .mockResolvedValue(false); - const terminatedStateStore = interopMod.stores.get(TerminatedStateStore); - terminatedStateStore.get = terminatedStateAccountMock.get; - terminatedStateAccountMock.get.mockResolvedValue(terminatedStateAccount); + + jest.spyOn(stateRecoveryInitCommand['internalMethod'], 'createTerminatedStateAccount'); + jest.spyOn(terminatedStateStore, 'get').mockResolvedValue(terminatedStateAccount); + jest.spyOn(terminatedStateStore, 'set'); + await stateRecoveryInitCommand.execute(commandExecuteContext); + const deserializedSidechainAccount = codec.decode( + chainDataSchema, + commandExecuteContext.params.sidechainAccount, + ); + expect(terminatedStateStore.set).toHaveBeenCalledWith( + commandExecuteContext, + commandExecuteContext.params.chainID, + { + stateRoot: deserializedSidechainAccount.lastCertificate.stateRoot, + mainchainStateRoot: EMPTY_HASH, + initialized: true, + }, + ); + const accountFromStore = await terminatedStateSubstore.get( commandExecuteContext, transactionParams.chainID, ); - - // Assert expect(accountFromStore).toEqual(terminatedStateAccount); + + expect( + stateRecoveryInitCommand['internalMethod'].createTerminatedStateAccount, + ).not.toHaveBeenCalled(); }); }); }); From fef17823eb43545360c106386ff20bf0928636bb Mon Sep 17 00:00:00 2001 From: shuse2 Date: Mon, 6 Nov 2023 17:20:07 +0100 Subject: [PATCH 169/170] :arrow_up: Bump version --- commander/package.json | 22 +-- .../templates/init/package-template.json | 16 +- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 12 +- elements/lisk-chain/package.json | 14 +- elements/lisk-client/package.json | 18 +- elements/lisk-codec/package.json | 8 +- elements/lisk-cryptography/package.json | 4 +- elements/lisk-elements/package.json | 24 +-- elements/lisk-p2p/package.json | 8 +- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +- elements/lisk-transactions/package.json | 8 +- elements/lisk-tree/package.json | 6 +- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../interop/pos-mainchain-fast/package.json | 16 +- .../pos-sidechain-example-one/package.json | 16 +- .../pos-sidechain-example-two/package.json | 16 +- examples/poa-sidechain/package.json | 16 +- examples/pos-mainchain/package.json | 18 +- .../package.json | 4 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 16 +- .../lisk-framework-forger-plugin/package.json | 6 +- .../package.json | 4 +- .../package.json | 6 +- framework/package.json | 24 +-- protocol-specs/package.json | 8 +- sdk/package.json | 26 +-- test/package.json | 2 +- yarn.lock | 174 +----------------- 32 files changed, 179 insertions(+), 335 deletions(-) diff --git a/commander/package.json b/commander/package.json index 55e85af8029..2d6f0fafca0 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -101,16 +101,16 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-chain": "^0.6.0-beta.0", - "@liskhq/lisk-client": "^6.1.0-beta.1", - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-chain": "^0.6.0-rc.0", + "@liskhq/lisk-client": "^6.1.0-rc.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,7 +121,7 @@ "cli-table3": "0.6.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-framework": "^0.12.0-beta.0", + "lisk-framework": "^0.12.0-rc.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.5.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index e69ccf633c2..a5c059ee9b6 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -85,12 +85,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "0.4.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "0.5.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "0.2.0-beta.1", + "@liskhq/lisk-framework-dashboard-plugin": "0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "0.4.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "0.5.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "0.5.0-rc.0", + "@liskhq/lisk-framework-chain-connector-plugin": "0.2.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -98,8 +98,8 @@ "axios": "0.21.2", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "6.1.0-beta.1", - "lisk-sdk": "6.1.0-beta.1", + "lisk-commander": "6.1.0-rc.0", + "lisk-sdk": "6.1.0-rc.0", "tar": "6.1.11", "tslib": "2.4.1" }, diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 2758af86d0d..adbc88b021e 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -28,7 +28,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "lisk-sdk": "^6.1.0-beta.1" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 3a959a27301..1898cfe07ae 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "isomorphic-ws": "4.0.1", "ws": "8.11.0", "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.6.0-beta.0", + "@liskhq/lisk-chain": "^0.6.0-rc.0", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", "@types/node": "18.15.3", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index ca1640eaade..42026602560 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.6.0-beta.0", + "version": "0.6.0-rc.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-tree": "^0.5.0-beta.0", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-tree": "^0.5.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", "@types/debug": "4.1.5", "@types/faker": "4.1.10", "@types/jest": "29.2.3", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index 4d4cfa9bc22..550ab2b1a5a 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -56,14 +56,14 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-tree": "^0.5.0-beta.0", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-tree": "^0.5.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 4212c3c295e..0a5b2f592eb 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.5.0-beta.0", + "version": "0.5.0-rc.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0" + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index 7da9623ef91..2663010baba 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "4.1.0-beta.1", + "version": "4.1.0-rc.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,7 +35,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", "buffer-reverse": "1.0.1", "hash-wasm": "4.9.0", "tweetnacl": "1.0.3" diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index 7021e7939bd..b56886a3f75 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,18 +35,18 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-chain": "^0.6.0-beta.0", - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-chain": "^0.6.0-rc.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.10.0-beta.0", - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-tree": "^0.5.0-beta.0", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0" + "@liskhq/lisk-p2p": "^0.10.0-rc.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-tree": "^0.5.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 5fb0f73eca1..f04b5889734 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.10.0-beta.0", + "version": "0.10.0-rc.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,9 +41,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "lodash.shuffle": "4.2.0", "semver": "7.5.2", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 6bce1a2bac5..4ad128fad5a 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "4.1.0-beta.0", + "version": "4.1.0-rc.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 513e69bf69e..2b280f18182 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.8.0-beta.0", + "version": "0.8.0-rc.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-utils": "^0.4.0-beta.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 28169ad315f..f9850351712 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-validator": "^0.9.0-beta.0" + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 056f2f0ce98..5fca3ffbb45 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.5.0-beta.0", + "version": "0.5.0-rc.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,8 +35,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-utils": "^0.4.0-beta.0" + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index d34e3e7ab82..1219e673ab1 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.4.0-beta.0", + "version": "0.4.0-rc.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index e8c8fb6e0d8..c4d3244484b 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.9.0-beta.0", + "version": "0.9.0-rc.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "ajv": "8.1.0", "ajv-formats": "2.1.1", "debug": "4.3.4", diff --git a/examples/interop/pos-mainchain-fast/package.json b/examples/interop/pos-mainchain-fast/package.json index e950d54d9dd..abb717f6fd3 100644 --- a/examples/interop/pos-mainchain-fast/package.json +++ b/examples/interop/pos-mainchain-fast/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.1", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-commander": "^6.1.0-rc.0", + "lisk-sdk": "^6.1.0-rc.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-one/package.json b/examples/interop/pos-sidechain-example-one/package.json index 3ba9c705106..75d48829c01 100644 --- a/examples/interop/pos-sidechain-example-one/package.json +++ b/examples/interop/pos-sidechain-example-one/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.1", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-commander": "^6.1.0-rc.0", + "lisk-sdk": "^6.1.0-rc.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-two/package.json b/examples/interop/pos-sidechain-example-two/package.json index b30d345d46d..fc436548ba8 100644 --- a/examples/interop/pos-sidechain-example-two/package.json +++ b/examples/interop/pos-sidechain-example-two/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.1", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-commander": "^6.1.0-rc.0", + "lisk-sdk": "^6.1.0-rc.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/poa-sidechain/package.json b/examples/poa-sidechain/package.json index a7a0b64fac7..3138922a691 100755 --- a/examples/poa-sidechain/package.json +++ b/examples/poa-sidechain/package.json @@ -113,12 +113,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -126,8 +126,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.1", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-commander": "^6.1.0-rc.0", + "lisk-sdk": "^6.1.0-rc.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/pos-mainchain/package.json b/examples/pos-mainchain/package.json index d76dbb8997b..f6d41b26d05 100755 --- a/examples/pos-mainchain/package.json +++ b/examples/pos-mainchain/package.json @@ -114,13 +114,13 @@ } }, "dependencies": { - "@liskhq/lisk-validator": "^0.8.0-beta.0", - "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-beta.0", - "@liskhq/lisk-framework-forger-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-beta.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-beta.1", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.2.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-forger-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.5.0-rc.0", + "@liskhq/lisk-validator": "^0.8.0-rc.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -128,8 +128,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.1.0-beta.0", - "lisk-sdk": "^6.1.0-beta.0", + "lisk-commander": "^6.1.0-rc.0", + "lisk-sdk": "^6.1.0-rc.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/package.json b/framework-plugins/lisk-framework-chain-connector-plugin/package.json index cae8723acb5..44e38d5e9d3 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/package.json +++ b/framework-plugins/lisk-framework-chain-connector-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-chain-connector-plugin", - "version": "0.2.0-beta.1", + "version": "0.2.0-rc.0", "description": "A plugin used by a relayer node to automatically create and submit Cross Chain Transaction by aggregating off-chain information of a chain", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -34,7 +34,7 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.1" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 4d8e8d3b1a1..fa58c3f2114 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.4.0-beta.0", + "version": "0.4.0-rc.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -46,10 +46,10 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-client": "^6.1.0-beta.1", + "@liskhq/lisk-client": "^6.1.0-rc.0", "express": "4.18.2", "json-format-highlight": "1.0.4", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-sdk": "^6.1.0-rc.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index b2daa1b99bd..671d42c727f 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.4.0-beta.0", + "version": "0.4.0-rc.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -47,15 +47,15 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-client": "^6.1.0-beta.1", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-client": "^6.1.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "axios": "1.2.0", "express": "4.18.2", - "lisk-sdk": "^6.1.0-beta.1", + "lisk-sdk": "^6.1.0-rc.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index 43c9ae11d5c..97ccbea5c66 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.5.0-beta.0", + "version": "0.5.0-rc.0", "description": "A plugin for lisk-framework that monitors configured validators forging activity and stakers information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,10 +40,10 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.1" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.1", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", "@types/debug": "4.1.5", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 1f70f973bba..37e8b420c2f 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.5.0-beta.0", + "version": "0.5.0-rc.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,7 +40,7 @@ "express": "4.18.2", "express-rate-limit": "6.7.0", "ip": "1.1.5", - "lisk-sdk": "^6.1.0-beta.1" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { "@types/cors": "2.8.12", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index d06184f3450..f6004a01db4 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.5.0-beta.0", + "version": "0.5.0-rc.0", "description": "A plugin for lisk-framework that provides automatic detection of validator misbehavior and sends a reportValidatorMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,9 +38,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "fs-extra": "11.1.0", - "lisk-sdk": "^6.1.0-beta.1" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework/package.json b/framework/package.json index 769c65e5ef4..d875120eb52 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.12.0-beta.0", + "version": "0.12.0-rc.0", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,17 +42,17 @@ }, "dependencies": { "@chainsafe/blst": "0.2.9", - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-chain": "^0.6.0-beta.0", - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-chain": "^0.6.0-rc.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.10.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-tree": "^0.5.0-beta.0", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", + "@liskhq/lisk-p2p": "^0.10.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-tree": "^0.5.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.9", @@ -64,7 +64,7 @@ "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", "@types/bunyan": "1.8.6", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 21d67b5187d..dfe85b3a5e4 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.5.0-beta.0", - "@liskhq/lisk-cryptography": "4.1.0-beta.1", - "@liskhq/lisk-passphrase": "4.1.0-beta.0", - "@liskhq/lisk-validator": "0.9.0-beta.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", + "@liskhq/lisk-passphrase": "4.1.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", "protobufjs": "7.2.4" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 39ee0c20173..b1967a04919 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "6.1.0-beta.1", + "version": "6.1.0-rc.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,19 +29,19 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.1.0-beta.1", - "@liskhq/lisk-chain": "^0.6.0-beta.0", - "@liskhq/lisk-codec": "^0.5.0-beta.0", - "@liskhq/lisk-cryptography": "^4.1.0-beta.1", + "@liskhq/lisk-api-client": "^6.1.0-rc.0", + "@liskhq/lisk-chain": "^0.6.0-rc.0", + "@liskhq/lisk-codec": "^0.5.0-rc.0", + "@liskhq/lisk-cryptography": "^4.1.0-rc.0", "@liskhq/lisk-db": "0.3.7", - "@liskhq/lisk-p2p": "^0.10.0-beta.0", - "@liskhq/lisk-passphrase": "^4.1.0-beta.0", - "@liskhq/lisk-transaction-pool": "^0.8.0-beta.0", - "@liskhq/lisk-transactions": "^6.1.0-beta.1", - "@liskhq/lisk-tree": "^0.5.0-beta.0", - "@liskhq/lisk-utils": "^0.4.0-beta.0", - "@liskhq/lisk-validator": "^0.9.0-beta.0", - "lisk-framework": "^0.12.0-beta.0" + "@liskhq/lisk-p2p": "^0.10.0-rc.0", + "@liskhq/lisk-passphrase": "^4.1.0-rc.0", + "@liskhq/lisk-transaction-pool": "^0.8.0-rc.0", + "@liskhq/lisk-transactions": "^6.1.0-rc.0", + "@liskhq/lisk-tree": "^0.5.0-rc.0", + "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-validator": "^0.9.0-rc.0", + "lisk-framework": "^0.12.0-rc.0" }, "devDependencies": { "eslint": "8.28.0", diff --git a/test/package.json b/test/package.json index f9c2d1dd0e6..0e79ca8437e 100644 --- a/test/package.json +++ b/test/package.json @@ -32,7 +32,7 @@ }, "dependencies": { "debug": "4.3.4", - "lisk-sdk": "^6.0.0-alpha.0" + "lisk-sdk": "^6.1.0-rc.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/yarn.lock b/yarn.lock index c8dbb3b721e..6be1f9fee56 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1855,55 +1855,10 @@ dependencies: "@types/node" "11.11.2" -"@liskhq/lisk-api-client@^6.0.0-beta.6": - version "6.0.0-beta.6" - resolved "https://npm.lisk.com/@liskhq/lisk-api-client/-/lisk-api-client-6.0.0-beta.6.tgz#9377c1f0af898c4f1ca49babd606faa4ba5f6e0e" - integrity sha512-Dhd7yWw2N7DlSrhTiQyitZ/Y8OSLCl1vlyWJx2GQXm+hbDf4nezcSdoAO3Z1W/hYFbvOckl+JKgZtALydOZogA== - dependencies: - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-transactions" "^6.0.0-beta.5" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - isomorphic-ws "4.0.1" - ws "8.11.0" - zeromq "6.0.0-beta.6" - -"@liskhq/lisk-chain@^0.4.0-beta.6": - version "0.4.0-beta.6" - resolved "https://npm.lisk.com/@liskhq/lisk-chain/-/lisk-chain-0.4.0-beta.6.tgz#6ea5fbe9769975e7f73665a676643c8553d90e04" - integrity sha512-3ttMQkWQn4ZJe/STzF8ZD4tjPV0Mt/4eNBHBjkrilEcN/J0YQUGN9ORh3pxhupBXfGt88Ht+jqdq+ZAnm5dEOA== - dependencies: - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-db" "0.3.7" - "@liskhq/lisk-tree" "^0.3.0-beta.5" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - debug "4.3.4" - -"@liskhq/lisk-codec@^0.3.0-beta.5": - version "0.3.0-beta.5" - resolved "https://npm.lisk.com/@liskhq/lisk-codec/-/lisk-codec-0.3.0-beta.5.tgz#39599debb707574a851e1697c992747d1e3318d9" - integrity sha512-VAtJH5C2RNNkQ4751NRIl7HfpoojSmfuGEI3jHSq9SbtXKYigkXSzkh6FhiwnSLIuEZNtCEoO1kyla6WQox3aQ== - dependencies: - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - -"@liskhq/lisk-cryptography@^4.0.0-beta.4": - version "4.0.0-beta.4" - resolved "https://npm.lisk.com/@liskhq/lisk-cryptography/-/lisk-cryptography-4.0.0-beta.4.tgz#36f3c18ee806a8e56e7ec6e1b6775ffa89215159" - integrity sha512-m+rwGPqVT4WmdHJ8HB6fCUyY4iCPweWf/DZBqmVmO3Mqnrxq0E0FjSXutofWiBrzLxtyNmolpGSENrMKvIdaSw== - dependencies: - "@liskhq/lisk-passphrase" "^4.0.0-beta.2" - buffer-reverse "1.0.1" - hash-wasm "4.9.0" - tweetnacl "1.0.3" - -"@liskhq/lisk-cryptography@^4.0.0-rc.0": - version "4.0.0-rc.0" - resolved "https://npm.lisk.com/@liskhq/lisk-cryptography/-/lisk-cryptography-4.0.0-rc.0.tgz#0cf5e1b8f67116c8ff258e62d5da3d23a5cef185" - integrity sha512-OIqxD9oNcY2OlFNkI+Ay3Mex+EEt3AcmDFKpkshGBieuMvZzgfOAupPfaB3L36q6pMxGHpRc9Nzz6VQt0vNCYQ== +"@liskhq/lisk-cryptography@^4.0.0-rc.2": + version "4.0.0-rc.2" + resolved "https://registry.yarnpkg.com/@liskhq/lisk-cryptography/-/lisk-cryptography-4.0.0-rc.2.tgz#042191f8c02834388157469015b73318df097964" + integrity sha512-kxHQ1fGiOC5WaqRRKaJJIOI6EqljyYZuDl7zg+Ihwz36ZDTRtGNLpUCA8+1GPYBwRghp2QobRibUBZh649Hh5A== dependencies: "@liskhq/lisk-passphrase" "^4.0.0-rc.0" buffer-reverse "1.0.1" @@ -1920,26 +1875,6 @@ cargo-cp-artifact "^0.1" shelljs "^0.8.5" -"@liskhq/lisk-p2p@^0.8.0-beta.6": - version "0.8.0-beta.6" - resolved "https://npm.lisk.com/@liskhq/lisk-p2p/-/lisk-p2p-0.8.0-beta.6.tgz#3a30c3ef6774042b213f2ca94ee620ba74178e1d" - integrity sha512-aN50jZV8iwhT13GK5ZwguW5oj/5IKb/HV5LnEcDUhV7FeQig3DAnnBJJhxzcHZtUrGgSmRkiPKrDmT+w0XwAmw== - dependencies: - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - lodash.shuffle "4.2.0" - semver "7.5.2" - socketcluster-client "14.3.1" - socketcluster-server "14.6.0" - -"@liskhq/lisk-passphrase@^4.0.0-beta.2": - version "4.0.0-canary.33179" - resolved "https://npm.lisk.com/@liskhq/lisk-passphrase/-/lisk-passphrase-4.0.0-canary.33179.tgz#8e04848238e72eafa2f2bb80ea91cb0e69bbedc3" - integrity sha512-EPXgHb4YBme/FoS7CLEbvPU0pshKH9FRDL9qUn8Po2tB72IdIfDh+wP9nAD/92v4JcsVVGtPgX5MWiiYHb1GCw== - dependencies: - bip39 "3.0.3" - "@liskhq/lisk-passphrase@^4.0.0-rc.0": version "4.0.0-rc.0" resolved "https://npm.lisk.com/@liskhq/lisk-passphrase/-/lisk-passphrase-4.0.0-rc.0.tgz#78fe583229c96d76258906375e34ff84a413be05" @@ -1947,57 +1882,12 @@ dependencies: bip39 "3.0.3" -"@liskhq/lisk-transaction-pool@^0.6.0-beta.6": - version "0.6.0-beta.6" - resolved "https://npm.lisk.com/@liskhq/lisk-transaction-pool/-/lisk-transaction-pool-0.6.0-beta.6.tgz#85c36789c45cffbbd73f90a961710763cdd857aa" - integrity sha512-OG/RFVj/uEEdNgKdE1KEipkNqPhwW6QlwRlzTmwIQQ9m+O9dHW3f0xoPufFJVqR8BtFwRDlctRv6HTfJWUUdcA== - dependencies: - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - debug "4.3.4" - -"@liskhq/lisk-transactions@^6.0.0-beta.5": - version "6.0.0-beta.5" - resolved "https://npm.lisk.com/@liskhq/lisk-transactions/-/lisk-transactions-6.0.0-beta.5.tgz#cb00e54dc417c138e53da2aec89a65768860e8ca" - integrity sha512-vgdGjw4D1wrZVa0YAOc5YlvrfjYarHbKo9gx8CUrEJv72M0B/UImMyVQQbVODLT9YeIvJ1ajUWLYl372b9ZFow== - dependencies: - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - -"@liskhq/lisk-tree@^0.3.0-beta.5": - version "0.3.0-beta.5" - resolved "https://npm.lisk.com/@liskhq/lisk-tree/-/lisk-tree-0.3.0-beta.5.tgz#eaca57ba3942c97b23c13a92723f2cff3303848f" - integrity sha512-XUrTVmRpx/bn96TJOdrqbJG5sglfbeKk7+39o6BWH2KVFacyPAbEJi0bRmCCsEVjK9dzazxjWKjRp+qlqbjQcw== - dependencies: - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - -"@liskhq/lisk-utils@^0.3.0-beta.3": - version "0.3.0-canary.33253" - resolved "https://npm.lisk.com/@liskhq/lisk-utils/-/lisk-utils-0.3.0-canary.33253.tgz#07f3c1274a4660bc2f13280ca11f783a4434ade6" - integrity sha512-hIaYerKi0gC/c7nj9hn9CQfKxih8kSNkinS3N/2au2qs4LWNDQOKk0aUynMpQej1Xv26WcF51tY//Je/+k3wbg== - dependencies: - lodash.clonedeep "4.5.0" - -"@liskhq/lisk-validator@^0.7.0-beta.5": - version "0.7.0-beta.5" - resolved "https://npm.lisk.com/@liskhq/lisk-validator/-/lisk-validator-0.7.0-beta.5.tgz#d8a7fdcb5eaa45c2cd01b23d1fde0c365e5cff1a" - integrity sha512-oySl7xe84Fnh7gUtZwrHPCtpLruPfFo//Owbf0spW56QAkYZWu+1eeGOICgKW8RzxPyFX9WAjWwK3P8Md8tNTg== +"@liskhq/lisk-validator@^0.8.0-rc.0": + version "0.8.0-rc.2" + resolved "https://registry.yarnpkg.com/@liskhq/lisk-validator/-/lisk-validator-0.8.0-rc.2.tgz#4c81e83027c5c832f7a2915ce0bdc79614387707" + integrity sha512-b4V0z2TlOxEosAKpxgR/lgobvpw5Lc2/ZAuBxDuwlvuXh5WSoz+VaPMf2Ry7N9TQwXtvHx3MO8chraJN+F5bEg== dependencies: - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - ajv "8.1.0" - ajv-formats "2.1.1" - debug "4.3.4" - semver "7.5.2" - validator "13.7.0" - -"@liskhq/lisk-validator@^0.8.0-beta.0": - version "0.8.0-rc.0" - resolved "https://npm.lisk.com/@liskhq/lisk-validator/-/lisk-validator-0.8.0-rc.0.tgz#fb136717f71ce35c7937d79d3b857d26195ff8c8" - integrity sha512-ymKW2eRw4KVBXJLN7co2iJv9WmkytL4IyxXiwJq5xvSzAN/YIx7NYViT8WCcGmUA/ryd55kwdKX/9kKkhafiDw== - dependencies: - "@liskhq/lisk-cryptography" "^4.0.0-rc.0" + "@liskhq/lisk-cryptography" "^4.0.0-rc.2" ajv "8.1.0" ajv-formats "2.1.1" debug "4.3.4" @@ -11125,52 +11015,6 @@ lint-staged@13.0.3: string-argv "^0.3.1" yaml "^2.1.1" -lisk-framework@^0.10.0-beta.7: - version "0.10.0-beta.7" - resolved "https://npm.lisk.com/lisk-framework/-/lisk-framework-0.10.0-beta.7.tgz#ec405c771d2de2e228b286d7ba04aa9330b0d4b7" - integrity sha512-BytJ2/l3CvN6nBOjlAZ6oP5db+ksbFYejjT9oICw4facKuKMO+dEDl2OMAH8gAn6wsPD9VgFT1UUy+FuY0k+Rw== - dependencies: - "@chainsafe/blst" "0.2.9" - "@liskhq/lisk-api-client" "^6.0.0-beta.6" - "@liskhq/lisk-chain" "^0.4.0-beta.6" - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-db" "0.3.7" - "@liskhq/lisk-p2p" "^0.8.0-beta.6" - "@liskhq/lisk-transaction-pool" "^0.6.0-beta.6" - "@liskhq/lisk-transactions" "^6.0.0-beta.5" - "@liskhq/lisk-tree" "^0.3.0-beta.5" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - bunyan "1.8.15" - debug "4.3.4" - eventemitter2 "6.4.9" - fs-extra "11.1.0" - prom-client "14.2.0" - ps-list "7.2.0" - sodium-native "3.2.1" - ws "8.11.0" - zeromq "6.0.0-beta.6" - -lisk-sdk@^6.0.0-alpha.0: - version "6.0.0-beta.7" - resolved "https://npm.lisk.com/lisk-sdk/-/lisk-sdk-6.0.0-beta.7.tgz#35157d05d80cb82f65a72240b266b32cb4d6285d" - integrity sha512-GuIEgZ4HmRKt6GXSAkkDJed9JOJIJdKjdSJxqfbIO2B8wYrMQv10bpmbVEXli1QP8B8jsGq7K3XCr+yFXeNvVg== - dependencies: - "@liskhq/lisk-api-client" "^6.0.0-beta.6" - "@liskhq/lisk-chain" "^0.4.0-beta.6" - "@liskhq/lisk-codec" "^0.3.0-beta.5" - "@liskhq/lisk-cryptography" "^4.0.0-beta.4" - "@liskhq/lisk-db" "0.3.7" - "@liskhq/lisk-p2p" "^0.8.0-beta.6" - "@liskhq/lisk-passphrase" "^4.0.0-beta.2" - "@liskhq/lisk-transaction-pool" "^0.6.0-beta.6" - "@liskhq/lisk-transactions" "^6.0.0-beta.5" - "@liskhq/lisk-tree" "^0.3.0-beta.5" - "@liskhq/lisk-utils" "^0.3.0-beta.3" - "@liskhq/lisk-validator" "^0.7.0-beta.5" - lisk-framework "^0.10.0-beta.7" - listr-silent-renderer@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/listr-silent-renderer/-/listr-silent-renderer-1.1.1.tgz#924b5a3757153770bf1a8e3fbf74b8bbf3f9242e" From 87d6b877b35c5bf9d393e6720cd80bfcd4f5a08a Mon Sep 17 00:00:00 2001 From: shuse2 Date: Tue, 7 Nov 2023 08:15:01 +0100 Subject: [PATCH 170/170] :arrow_up: Bump version --- commander/oclif.manifest.json | 683 ------------------ commander/package.json | 22 +- .../templates/init/package-template.json | 16 +- .../templates/init_plugin/package.json | 2 +- elements/lisk-api-client/package.json | 12 +- elements/lisk-chain/package.json | 14 +- elements/lisk-client/package.json | 18 +- elements/lisk-codec/package.json | 8 +- elements/lisk-cryptography/package.json | 4 +- elements/lisk-elements/package.json | 24 +- elements/lisk-p2p/package.json | 8 +- elements/lisk-passphrase/package.json | 2 +- elements/lisk-transaction-pool/package.json | 6 +- elements/lisk-transactions/package.json | 8 +- elements/lisk-tree/package.json | 6 +- elements/lisk-utils/package.json | 2 +- elements/lisk-validator/package.json | 4 +- .../interop/pos-mainchain-fast/package.json | 16 +- .../pos-sidechain-example-one/package.json | 16 +- .../pos-sidechain-example-two/package.json | 16 +- examples/pos-mainchain/package.json | 14 +- .../package.json | 4 +- .../package.json | 6 +- .../lisk-framework-faucet-plugin/package.json | 16 +- .../lisk-framework-forger-plugin/package.json | 6 +- .../package.json | 4 +- .../package.json | 6 +- framework/package.json | 24 +- protocol-specs/package.json | 8 +- sdk/package.json | 26 +- 30 files changed, 159 insertions(+), 842 deletions(-) delete mode 100644 commander/oclif.manifest.json diff --git a/commander/oclif.manifest.json b/commander/oclif.manifest.json deleted file mode 100644 index dcca4a28e10..00000000000 --- a/commander/oclif.manifest.json +++ /dev/null @@ -1,683 +0,0 @@ -{ - "version": "6.0.0-rc.3", - "commands": { - "console": { - "id": "console", - "description": "Lisk interactive REPL session to run commands.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "console", - "console --api-ws=ws://localhost:8080", - "console --api-ipc=/path/to/server" - ], - "flags": { - "api-ipc": { - "name": "api-ipc", - "type": "option", - "description": "Enable api-client with IPC communication.", - "multiple": false, - "exclusive": ["api-ws"] - }, - "api-ws": { - "name": "api-ws", - "type": "option", - "description": "Enable api-client with Websocket communication.", - "multiple": false, - "exclusive": ["api-ipc"] - } - }, - "args": [] - }, - "hash-onion": { - "id": "hash-onion", - "description": "Create hash onions to be used by the forger.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "hash-onion --count=1000000 --distance=2000 --pretty", - "hash-onion --count=1000000 --distance=2000 --output ~/my_onion.json" - ], - "flags": { - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "Output file path", - "multiple": false - }, - "count": { - "name": "count", - "type": "option", - "char": "c", - "description": "Total number of hashes to produce", - "multiple": false, - "default": 1000000 - }, - "distance": { - "name": "distance", - "type": "option", - "char": "d", - "description": "Distance between each hash", - "multiple": false, - "default": 1000 - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed.", - "allowNo": false - } - }, - "args": [] - }, - "init": { - "id": "init", - "description": "Bootstrap a blockchain application using Lisk SDK.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "init", - "init --template lisk-ts", - "init --template @some-global-npm-package", - "init /project/path", - "init /project/path --template lisk-ts" - ], - "flags": { - "template": { - "name": "template", - "type": "option", - "char": "t", - "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", - "multiple": false - }, - "registry": { - "name": "registry", - "type": "option", - "description": "URL of a registry to download dependencies from.", - "multiple": false - } - }, - "args": [ - { - "name": "projectPath", - "description": "Path to create the project.", - "default": "/Users/ishan/repos/lisk-sdk/commander" - } - ] - }, - "endpoint:invoke": { - "id": "endpoint:invoke", - "description": "Invokes the provided endpoint.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "endpoint:invoke {endpoint} {parameters}", - "endpoint:invoke --data-path --file", - "endpoint:invoke generator_getAllKeys", - "endpoint:invoke consensus_getBFTParameters '{\"height\": 2}' -d ~/.lisk/pos-mainchain --pretty", - "endpoint:invoke consensus_getBFTParameters -f ./input.json" - ], - "flags": { - "data-path": { - "name": "data-path", - "type": "option", - "char": "d", - "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", - "multiple": false - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed.", - "allowNo": false - }, - "file": { - "name": "file", - "type": "option", - "char": "f", - "description": "Input file.", - "multiple": false - } - }, - "args": [ - { "name": "endpoint", "description": "Endpoint to invoke", "required": true }, - { "name": "params", "description": "Endpoint parameters (Optional)", "required": false } - ] - }, - "generate:command": { - "id": "generate:command", - "description": "Creates an command skeleton for the given module name, name and id.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "generate:command moduleName commandName commandID", - "generate:command nft transfer 1" - ], - "flags": { - "template": { - "name": "template", - "type": "option", - "char": "t", - "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", - "multiple": false - } - }, - "args": [ - { "name": "moduleName", "description": "Module name.", "required": true }, - { "name": "commandName", "description": "Asset name.", "required": true } - ] - }, - "generate:module": { - "id": "generate:module", - "description": "Creates a module skeleton for the given name.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": ["generate:module nft"], - "flags": { - "template": { - "name": "template", - "type": "option", - "char": "t", - "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", - "multiple": false - } - }, - "args": [{ "name": "moduleName", "description": "Module name.", "required": true }] - }, - "generate:plugin": { - "id": "generate:plugin", - "description": "Creates custom plugin.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "generate:plugin myPlugin", - "generate:plugin myPlugin --standalone --output ./my_plugin" - ], - "flags": { - "template": { - "name": "template", - "type": "option", - "char": "t", - "description": "Template to bootstrap the application. It will read from `.liskrc.json` or use `lisk-ts` if not found.", - "multiple": false - }, - "standalone": { - "name": "standalone", - "type": "boolean", - "description": "Create a standalone plugin package.", - "allowNo": false - }, - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "Path to create the plugin.", - "multiple": false, - "dependsOn": ["standalone"] - }, - "registry": { - "name": "registry", - "type": "option", - "description": "URL of a registry to download dependencies from.", - "multiple": false, - "dependsOn": ["standalone"] - } - }, - "args": [{ "name": "name", "description": "Name of the plugin.", "required": true }] - }, - "keys:create": { - "id": "keys:create", - "description": "Return keys corresponding to the given passphrase.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "keys:create", - "keys:create --passphrase your-passphrase", - "keys:create --passphrase your-passphrase --no-encrypt", - "keys:create --passphrase your-passphrase --password your-password", - "keys:create --passphrase your-passphrase --password your-password --count 2", - "keys:create --passphrase your-passphrase --no-encrypt --count 2 --offset 1", - "keys:create --passphrase your-passphrase --no-encrypt --count 2 --offset 1 --chainid 1", - "keys:create --passphrase your-passphrase --password your-password --count 2 --offset 1 --chainid 1 --output /mypath/keys.json" - ], - "flags": { - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "The output directory. Default will set to current working directory.", - "multiple": false - }, - "passphrase": { - "name": "passphrase", - "type": "option", - "char": "p", - "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", - "multiple": false - }, - "no-encrypt": { - "name": "no-encrypt", - "type": "boolean", - "char": "n", - "description": "No encrypted message object to be created", - "allowNo": false - }, - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - }, - "count": { - "name": "count", - "type": "option", - "char": "c", - "description": "Number of keys to create", - "multiple": false, - "default": 1 - }, - "offset": { - "name": "offset", - "type": "option", - "char": "f", - "description": "Offset for the key derivation path", - "multiple": false, - "default": 0 - }, - "chainid": { - "name": "chainid", - "type": "option", - "char": "i", - "description": "Chain id", - "multiple": false, - "default": 0 - }, - "add-legacy": { - "name": "add-legacy", - "type": "boolean", - "description": "Add legacy key derivation path to the result", - "allowNo": false - } - }, - "args": [] - }, - "keys:encrypt": { - "id": "keys:encrypt", - "description": "Encrypt keys from a file and overwrite the file", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "keys:encrypt --file-path ./my/path/keys.json", - "keys:encrypt --file-path ./my/path/keys.json --password mypass" - ], - "flags": { - "file-path": { - "name": "file-path", - "type": "option", - "char": "f", - "description": "Path of the file to encrypt from", - "required": true, - "multiple": false - }, - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - } - }, - "args": [] - }, - "keys:export": { - "id": "keys:export", - "description": "Export to .", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "keys:export --output /mypath/keys.json", - "keys:export --output /mypath/keys.json --data-path ./data " - ], - "flags": { - "data-path": { - "name": "data-path", - "type": "option", - "char": "d", - "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", - "multiple": false - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed.", - "allowNo": false - }, - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "The output directory. Default will set to current working directory.", - "required": true, - "multiple": false - } - }, - "args": [] - }, - "keys:import": { - "id": "keys:import", - "description": "Import from .", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "keys:import --file-path ./my/path/keys.json", - "keys:import --file-path ./my/path/keys.json --data-path ./data " - ], - "flags": { - "data-path": { - "name": "data-path", - "type": "option", - "char": "d", - "description": "Directory path to specify where node data is stored. Environment variable \"LISK_DATA_PATH\" can also be used.", - "multiple": false - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed.", - "allowNo": false - }, - "file-path": { - "name": "file-path", - "type": "option", - "char": "f", - "description": "Path of the file to import from", - "required": true, - "multiple": false - } - }, - "args": [] - }, - "message:decrypt": { - "id": "message:decrypt", - "description": "\n\tDecrypts a previously encrypted message using your the password used to encrypt.\n\t", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": ["message:decrypt "], - "flags": { - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - }, - "message": { - "name": "message", - "type": "option", - "char": "m", - "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", - "multiple": false - } - }, - "args": [{ "name": "message", "description": "Encrypted message." }] - }, - "message:encrypt": { - "id": "message:encrypt", - "description": "\n\tEncrypts a message with a password provided.\n\t", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": ["message:encrypt \"Hello world\""], - "flags": { - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - }, - "message": { - "name": "message", - "type": "option", - "char": "m", - "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", - "multiple": false - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed.", - "allowNo": false - }, - "stringify": { - "name": "stringify", - "type": "boolean", - "char": "s", - "description": "Display encrypted message in stringified format", - "allowNo": false - } - }, - "args": [{ "name": "message", "description": "Message to encrypt." }] - }, - "message:sign": { - "id": "message:sign", - "description": "\n\tSigns a message using your secret passphrase.\n\t", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": ["message:sign \"Hello world\""], - "flags": { - "json": { - "name": "json", - "type": "boolean", - "char": "j", - "description": "Prints output in JSON format. You can change the default behavior in your config.json file.", - "allowNo": true - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behavior in your config.json file.", - "allowNo": true - }, - "passphrase": { - "name": "passphrase", - "type": "option", - "char": "p", - "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", - "multiple": false - }, - "message": { - "name": "message", - "type": "option", - "char": "m", - "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", - "multiple": false - } - }, - "args": [{ "name": "message", "description": "Message to sign." }] - }, - "message:verify": { - "id": "message:verify", - "description": "\n\tVerifies a signature for a message using the signer’s public key.\n\t", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "message:verify 647aac1e2df8a5c870499d7ddc82236b1e10936977537a3844a6b05ea33f9ef6 2a3ca127efcf7b2bf62ac8c3b1f5acf6997cab62ba9fde3567d188edcbacbc5dc8177fb88d03a8691ce03348f569b121bca9e7a3c43bf5c056382f35ff843c09 \"Hello world\"" - ], - "flags": { - "json": { - "name": "json", - "type": "boolean", - "char": "j", - "description": "Prints output in JSON format. You can change the default behavior in your config.json file.", - "allowNo": true - }, - "pretty": { - "name": "pretty", - "type": "boolean", - "description": "Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behavior in your config.json file.", - "allowNo": true - }, - "message": { - "name": "message", - "type": "option", - "char": "m", - "description": "Specifies a source for providing a message to the command. If a string is provided directly as an argument, this option will be ignored. The message must be provided via an argument or via this option. Sources must be one of `file` or `stdin`. In the case of `file`, a corresponding identifier must also be provided.\n\tNote: if both secret passphrase and message are passed via stdin, the passphrase must be the first line.\n\tExamples:\n\t- --message=file:/path/to/my/message.txt\n\t- --message=\"hello world\"\n", - "multiple": false - } - }, - "args": [ - { - "name": "publicKey", - "description": "Public key of the signer of the message.", - "required": true - }, - { "name": "signature", "description": "Signature to verify.", "required": true }, - { "name": "message", "description": "Message to verify." } - ] - }, - "passphrase:create": { - "id": "passphrase:create", - "description": "Returns a randomly generated 24 words mnemonic passphrase.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": ["passphrase:create", "passphrase:create --output /mypath/passphrase.json"], - "flags": { - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "The output directory. Default will set to current working directory.", - "multiple": false - } - }, - "args": [] - }, - "passphrase:decrypt": { - "id": "passphrase:decrypt", - "description": "Decrypt secret passphrase using the password provided at the time of encryption.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "passphrase:decrypt --file-path ./my/path/output.json", - "passphrase:decrypt --file-path ./my/path/output.json --password your-password" - ], - "flags": { - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - }, - "file-path": { - "name": "file-path", - "type": "option", - "char": "f", - "description": "Path of the file to import from", - "required": true, - "multiple": false - } - }, - "args": [] - }, - "passphrase:encrypt": { - "id": "passphrase:encrypt", - "description": "Encrypt secret passphrase using password.", - "strict": true, - "pluginName": "lisk-commander", - "pluginAlias": "lisk-commander", - "pluginType": "core", - "aliases": [], - "examples": [ - "passphrase:encrypt", - "passphrase:encrypt --passphrase your-passphrase --output /mypath/keys.json", - "passphrase:encrypt --password your-password", - "passphrase:encrypt --password your-password --passphrase your-passphrase --output /mypath/keys.json", - "passphrase:encrypt --output-public-key --output /mypath/keys.json" - ], - "flags": { - "password": { - "name": "password", - "type": "option", - "char": "w", - "description": "Specifies a source for your secret password. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --password=pass:password123 (should only be used where security is not important)\n", - "multiple": false - }, - "passphrase": { - "name": "passphrase", - "type": "option", - "char": "p", - "description": "Specifies a source for your secret passphrase. Command will prompt you for input if this option is not set.\n\tExamples:\n\t- --passphrase='my secret passphrase' (should only be used where security is not important)\n", - "multiple": false - }, - "output-public-key": { - "name": "output-public-key", - "type": "boolean", - "description": "Includes the public key in the output. This option is provided for the convenience of node operators.", - "allowNo": false - }, - "output": { - "name": "output", - "type": "option", - "char": "o", - "description": "The output directory. Default will set to current working directory.", - "multiple": false - } - }, - "args": [] - } - } -} diff --git a/commander/package.json b/commander/package.json index 5d095484feb..0e0461ec357 100644 --- a/commander/package.json +++ b/commander/package.json @@ -1,6 +1,6 @@ { "name": "lisk-commander", - "version": "6.0.0-rc.6", + "version": "6.0.0", "description": "A command line interface for Lisk", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -101,16 +101,16 @@ "/docs" ], "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-chain": "^0.5.0-rc.4", - "@liskhq/lisk-client": "^6.0.0-rc.4", - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-chain": "^0.5.0", + "@liskhq/lisk-client": "^6.0.0", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", "@liskhq/lisk-db": "0.3.10", - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-passphrase": "^4.0.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,7 +121,7 @@ "cli-table3": "0.6.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-framework": "^0.11.0-rc.5", + "lisk-framework": "^0.11.0", "listr": "0.14.3", "progress": "2.0.3", "semver": "7.5.2", diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json index 12794990e76..1be130bc60c 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init/package-template.json @@ -85,12 +85,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "0.3.0-rc.5", - "@liskhq/lisk-framework-faucet-plugin": "0.3.0-rc.5", - "@liskhq/lisk-framework-monitor-plugin": "0.4.0-rc.5", - "@liskhq/lisk-framework-forger-plugin": "0.4.0-rc.5", - "@liskhq/lisk-framework-report-misbehavior-plugin": "0.4.0-rc.5", - "@liskhq/lisk-framework-chain-connector-plugin": "0.1.0-rc.5", + "@liskhq/lisk-framework-dashboard-plugin": "0.3.0", + "@liskhq/lisk-framework-faucet-plugin": "0.3.0", + "@liskhq/lisk-framework-monitor-plugin": "0.4.0", + "@liskhq/lisk-framework-forger-plugin": "0.4.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "0.4.0", + "@liskhq/lisk-framework-chain-connector-plugin": "0.1.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -98,8 +98,8 @@ "axios": "0.21.2", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "6.0.0-rc.6", - "lisk-sdk": "6.0.0-rc.5", + "lisk-commander": "6.0.0", + "lisk-sdk": "6.0.0", "tar": "6.1.11", "tslib": "2.4.1" }, diff --git a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json index 6b7ba039875..bd198704341 100644 --- a/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json +++ b/commander/src/bootstrapping/templates/lisk-template-ts/templates/init_plugin/package.json @@ -28,7 +28,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "lisk-sdk": "^6.0.0-rc.5" + "lisk-sdk": "^6.0.0" }, "devDependencies": { "@types/jest": "26.0.21", diff --git a/elements/lisk-api-client/package.json b/elements/lisk-api-client/package.json index 5cf49fd4caf..663a023946a 100644 --- a/elements/lisk-api-client/package.json +++ b/elements/lisk-api-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-api-client", - "version": "6.0.0-rc.4", + "version": "6.0.0", "description": "An API client for the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-validator": "^0.8.0", "isomorphic-ws": "4.0.1", "ws": "8.11.0", "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-chain": "^0.5.0-rc.4", + "@liskhq/lisk-chain": "^0.5.0", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", "@types/node": "18.15.3", diff --git a/elements/lisk-chain/package.json b/elements/lisk-chain/package.json index a4e48b48377..ea74bf5bc0a 100644 --- a/elements/lisk-chain/package.json +++ b/elements/lisk-chain/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-chain", - "version": "0.5.0-rc.4", + "version": "0.5.0", "description": "Blocks and state management implementation that are used for block processing according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,16 +35,16 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", "@liskhq/lisk-db": "0.3.10", - "@liskhq/lisk-tree": "^0.4.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-tree": "^0.4.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", "debug": "4.3.4" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", + "@liskhq/lisk-passphrase": "^4.0.0", "@types/debug": "4.1.5", "@types/faker": "4.1.10", "@types/jest": "29.2.3", diff --git a/elements/lisk-client/package.json b/elements/lisk-client/package.json index bfc06230538..617fef8e29d 100644 --- a/elements/lisk-client/package.json +++ b/elements/lisk-client/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-client", - "version": "6.0.0-rc.4", + "version": "6.0.0", "description": "A default set of Elements for use by clients of the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -56,14 +56,14 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-tree": "^0.4.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-passphrase": "^4.0.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-tree": "^0.4.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", "buffer": "6.0.3" }, "devDependencies": { diff --git a/elements/lisk-codec/package.json b/elements/lisk-codec/package.json index 24c71845042..6591fcff8fe 100644 --- a/elements/lisk-codec/package.json +++ b/elements/lisk-codec/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-codec", - "version": "0.4.0-rc.2", + "version": "0.4.0", "description": "Implementation of decoder and encoder using Lisk JSON schema according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2" + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-cryptography/package.json b/elements/lisk-cryptography/package.json index f05e75e53a6..3edff071f19 100644 --- a/elements/lisk-cryptography/package.json +++ b/elements/lisk-cryptography/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-cryptography", - "version": "4.0.0-rc.2", + "version": "4.0.0", "description": "General cryptographic functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,7 +35,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", + "@liskhq/lisk-passphrase": "^4.0.0", "buffer-reverse": "1.0.1", "hash-wasm": "4.9.0", "tweetnacl": "1.0.3" diff --git a/elements/lisk-elements/package.json b/elements/lisk-elements/package.json index cb3a2c04796..3fb86288fb7 100644 --- a/elements/lisk-elements/package.json +++ b/elements/lisk-elements/package.json @@ -1,6 +1,6 @@ { "name": "lisk-elements", - "version": "6.0.0-rc.4", + "version": "6.0.0", "description": "Elements for building blockchain applications in the Lisk network", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,18 +35,18 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-chain": "^0.5.0-rc.4", - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-chain": "^0.5.0", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", "@liskhq/lisk-db": "0.3.10", - "@liskhq/lisk-p2p": "^0.9.0-rc.2", - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", - "@liskhq/lisk-transaction-pool": "^0.7.0-rc.2", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-tree": "^0.4.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2" + "@liskhq/lisk-p2p": "^0.9.0", + "@liskhq/lisk-passphrase": "^4.0.0", + "@liskhq/lisk-transaction-pool": "^0.7.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-tree": "^0.4.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-p2p/package.json b/elements/lisk-p2p/package.json index 416e15de252..2bab3d90eae 100644 --- a/elements/lisk-p2p/package.json +++ b/elements/lisk-p2p/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-p2p", - "version": "0.9.0-rc.2", + "version": "0.9.0", "description": "Unstructured P2P library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -41,9 +41,9 @@ "disableLocalIPs": "./scripts/disableTestLocalIPs.sh 2 19" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-validator": "^0.8.0", "lodash.shuffle": "4.2.0", "semver": "7.5.2", "socketcluster-client": "14.3.1", diff --git a/elements/lisk-passphrase/package.json b/elements/lisk-passphrase/package.json index 0e576976167..27e6b781cbd 100644 --- a/elements/lisk-passphrase/package.json +++ b/elements/lisk-passphrase/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-passphrase", - "version": "4.0.0-rc.0", + "version": "4.0.0", "description": "Mnemonic passphrase helpers for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-transaction-pool/package.json b/elements/lisk-transaction-pool/package.json index 3007999d4f6..8d4f35ee37d 100644 --- a/elements/lisk-transaction-pool/package.json +++ b/elements/lisk-transaction-pool/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transaction-pool", - "version": "0.7.0-rc.2", + "version": "0.7.0", "description": "Transaction pool library for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,8 +36,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-utils": "^0.4.0", "debug": "4.3.4" }, "devDependencies": { diff --git a/elements/lisk-transactions/package.json b/elements/lisk-transactions/package.json index 01f821250e3..a7d0317ddf9 100644 --- a/elements/lisk-transactions/package.json +++ b/elements/lisk-transactions/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-transactions", - "version": "6.0.0-rc.2", + "version": "6.0.0", "description": "Utility functions related to transactions according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,9 +35,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-validator": "^0.8.0-rc.2" + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-validator": "^0.8.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-tree/package.json b/elements/lisk-tree/package.json index 8b804cf9dbe..af5ff60e15a 100644 --- a/elements/lisk-tree/package.json +++ b/elements/lisk-tree/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-tree", - "version": "0.4.0-rc.2", + "version": "0.4.0", "description": "Library containing Merkle tree implementations for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -35,8 +35,8 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0" + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-utils": "^0.4.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/elements/lisk-utils/package.json b/elements/lisk-utils/package.json index 1219e673ab1..cfc48d298d0 100644 --- a/elements/lisk-utils/package.json +++ b/elements/lisk-utils/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-utils", - "version": "0.4.0-rc.0", + "version": "0.4.0", "description": "Library containing generic utility functions for use with Lisk-related software", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", diff --git a/elements/lisk-validator/package.json b/elements/lisk-validator/package.json index edcd426bafd..d047c5fde3c 100644 --- a/elements/lisk-validator/package.json +++ b/elements/lisk-validator/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-validator", - "version": "0.8.0-rc.2", + "version": "0.8.0", "description": "Validation library according to the Lisk protocol", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -36,7 +36,7 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-cryptography": "^4.0.0", "ajv": "8.1.0", "ajv-formats": "2.1.1", "debug": "4.3.4", diff --git a/examples/interop/pos-mainchain-fast/package.json b/examples/interop/pos-mainchain-fast/package.json index c1201940363..9157e0ceb98 100644 --- a/examples/interop/pos-mainchain-fast/package.json +++ b/examples/interop/pos-mainchain-fast/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-rc.0", - "lisk-sdk": "^6.0.0-rc.0", + "lisk-commander": "^6.0.0", + "lisk-sdk": "^6.0.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-one/package.json b/examples/interop/pos-sidechain-example-one/package.json index b16c841b411..97efdb2214b 100644 --- a/examples/interop/pos-sidechain-example-one/package.json +++ b/examples/interop/pos-sidechain-example-one/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-rc.0", - "lisk-sdk": "^6.0.0-rc.0", + "lisk-commander": "^6.0.0", + "lisk-sdk": "^6.0.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/interop/pos-sidechain-example-two/package.json b/examples/interop/pos-sidechain-example-two/package.json index 6e70f660c5b..81167a121a6 100644 --- a/examples/interop/pos-sidechain-example-two/package.json +++ b/examples/interop/pos-sidechain-example-two/package.json @@ -108,12 +108,12 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0", + "@liskhq/lisk-framework-chain-connector-plugin": "^0.1.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -121,8 +121,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-rc.0", - "lisk-sdk": "^6.0.0-rc.0", + "lisk-commander": "^6.0.0", + "lisk-sdk": "^6.0.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/examples/pos-mainchain/package.json b/examples/pos-mainchain/package.json index 1fe23bb0767..000ddc1df06 100755 --- a/examples/pos-mainchain/package.json +++ b/examples/pos-mainchain/package.json @@ -114,11 +114,11 @@ } }, "dependencies": { - "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-faucet-plugin": "^0.3.0-rc.0", - "@liskhq/lisk-framework-forger-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-monitor-plugin": "^0.4.0-rc.0", - "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0-rc.0", + "@liskhq/lisk-framework-dashboard-plugin": "^0.3.0", + "@liskhq/lisk-framework-faucet-plugin": "^0.3.0", + "@liskhq/lisk-framework-forger-plugin": "^0.4.0", + "@liskhq/lisk-framework-monitor-plugin": "^0.4.0", + "@liskhq/lisk-framework-report-misbehavior-plugin": "^0.4.0", "@oclif/core": "1.20.4", "@oclif/plugin-autocomplete": "1.3.6", "@oclif/plugin-help": "5.1.19", @@ -126,8 +126,8 @@ "axios": "1.2.0", "fs-extra": "11.1.0", "inquirer": "8.2.5", - "lisk-commander": "^6.0.0-rc.0", - "lisk-sdk": "^6.0.0-rc.0", + "lisk-commander": "^6.0.0", + "lisk-sdk": "^6.0.0", "tar": "6.1.12", "tslib": "2.4.1" }, diff --git a/framework-plugins/lisk-framework-chain-connector-plugin/package.json b/framework-plugins/lisk-framework-chain-connector-plugin/package.json index 809c64596f3..a38b341b124 100644 --- a/framework-plugins/lisk-framework-chain-connector-plugin/package.json +++ b/framework-plugins/lisk-framework-chain-connector-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-chain-connector-plugin", - "version": "0.1.0-rc.5", + "version": "0.1.0", "description": "A plugin used by a relayer node to automatically create and submit Cross Chain Transaction by aggregating off-chain information of a chain", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -34,7 +34,7 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-rc.5" + "lisk-sdk": "^6.0.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework-plugins/lisk-framework-dashboard-plugin/package.json b/framework-plugins/lisk-framework-dashboard-plugin/package.json index 7712fe0b35b..87e1ef8df6c 100644 --- a/framework-plugins/lisk-framework-dashboard-plugin/package.json +++ b/framework-plugins/lisk-framework-dashboard-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-dashboard-plugin", - "version": "0.3.0-rc.5", + "version": "0.3.0", "description": "A plugin for interacting with a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -46,10 +46,10 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-client": "^6.0.0-rc.4", + "@liskhq/lisk-client": "^6.0.0", "express": "4.18.2", "json-format-highlight": "1.0.4", - "lisk-sdk": "^6.0.0-rc.5", + "lisk-sdk": "^6.0.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0", diff --git a/framework-plugins/lisk-framework-faucet-plugin/package.json b/framework-plugins/lisk-framework-faucet-plugin/package.json index 8658e9b788b..eb782ce2e4a 100644 --- a/framework-plugins/lisk-framework-faucet-plugin/package.json +++ b/framework-plugins/lisk-framework-faucet-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-faucet-plugin", - "version": "0.3.0-rc.5", + "version": "0.3.0", "description": "A plugin for distributing testnet tokens from a newly developed blockchain application.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -47,15 +47,15 @@ }, "dependencies": { "@csstools/normalize.css": "12.0.0", - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-client": "^6.0.0-rc.4", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-client": "^6.0.0", + "@liskhq/lisk-cryptography": "^4.0.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", "axios": "1.2.0", "express": "4.18.2", - "lisk-sdk": "^6.0.0-rc.5", + "lisk-sdk": "^6.0.0", "react": "^17.0.1", "react-dom": "^17.0.1", "react-router-dom": "^5.2.0" diff --git a/framework-plugins/lisk-framework-forger-plugin/package.json b/framework-plugins/lisk-framework-forger-plugin/package.json index ff624cff1b3..c7b880d545e 100644 --- a/framework-plugins/lisk-framework-forger-plugin/package.json +++ b/framework-plugins/lisk-framework-forger-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-forger-plugin", - "version": "0.4.0-rc.5", + "version": "0.4.0", "description": "A plugin for lisk-framework that monitors configured validators forging activity and stakers information.", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,10 +40,10 @@ "dependencies": { "debug": "4.3.4", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-rc.5" + "lisk-sdk": "^6.0.0" }, "devDependencies": { - "@liskhq/lisk-api-client": "^6.0.0-rc.4", + "@liskhq/lisk-api-client": "^6.0.0", "@types/debug": "4.1.5", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/framework-plugins/lisk-framework-monitor-plugin/package.json b/framework-plugins/lisk-framework-monitor-plugin/package.json index 8b120f46ae5..eb0fffe27d1 100644 --- a/framework-plugins/lisk-framework-monitor-plugin/package.json +++ b/framework-plugins/lisk-framework-monitor-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-monitor-plugin", - "version": "0.4.0-rc.5", + "version": "0.4.0", "description": "A plugin for lisk-framework that provides network statistics of the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -40,7 +40,7 @@ "express": "4.18.2", "express-rate-limit": "6.7.0", "ip": "1.1.5", - "lisk-sdk": "^6.0.0-rc.5" + "lisk-sdk": "^6.0.0" }, "devDependencies": { "@types/cors": "2.8.12", diff --git a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json index 303dc25ad50..a72fbe4b745 100644 --- a/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json +++ b/framework-plugins/lisk-framework-report-misbehavior-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@liskhq/lisk-framework-report-misbehavior-plugin", - "version": "0.4.0-rc.5", + "version": "0.4.0", "description": "A plugin for lisk-framework that provides automatic detection of validator misbehavior and sends a reportValidatorMisbehaviorTransaction to the running node", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -38,9 +38,9 @@ "build:check": "node -e \"require('./dist-node')\"" }, "dependencies": { - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-cryptography": "^4.0.0", "fs-extra": "11.1.0", - "lisk-sdk": "^6.0.0-rc.5" + "lisk-sdk": "^6.0.0" }, "devDependencies": { "@types/jest": "29.2.3", diff --git a/framework/package.json b/framework/package.json index cedecb538ff..74a5b4cba0f 100644 --- a/framework/package.json +++ b/framework/package.json @@ -1,6 +1,6 @@ { "name": "lisk-framework", - "version": "0.11.0-rc.5", + "version": "0.11.0", "description": "Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -42,17 +42,17 @@ }, "dependencies": { "@chainsafe/blst": "0.2.9", - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-chain": "^0.5.0-rc.4", - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-chain": "^0.5.0", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", "@liskhq/lisk-db": "0.3.10", - "@liskhq/lisk-p2p": "^0.9.0-rc.2", - "@liskhq/lisk-transaction-pool": "^0.7.0-rc.2", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-tree": "^0.4.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", + "@liskhq/lisk-p2p": "^0.9.0", + "@liskhq/lisk-transaction-pool": "^0.7.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-tree": "^0.4.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", "bunyan": "1.8.15", "debug": "4.3.4", "eventemitter2": "6.4.9", @@ -64,7 +64,7 @@ "zeromq": "6.0.0-beta.6" }, "devDependencies": { - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", + "@liskhq/lisk-passphrase": "^4.0.0", "@types/bunyan": "1.8.6", "@types/jest": "29.2.3", "@types/jest-when": "3.5.2", diff --git a/protocol-specs/package.json b/protocol-specs/package.json index 88e7a031fbb..76594a2bf29 100644 --- a/protocol-specs/package.json +++ b/protocol-specs/package.json @@ -19,10 +19,10 @@ }, "dependencies": { "@liskhq/bignum": "1.3.1", - "@liskhq/lisk-codec": "0.4.0-rc.2", - "@liskhq/lisk-cryptography": "4.0.0-rc.2", - "@liskhq/lisk-passphrase": "4.0.0-rc.0", - "@liskhq/lisk-validator": "0.8.0-rc.2", + "@liskhq/lisk-codec": "0.4.0", + "@liskhq/lisk-cryptography": "4.0.0", + "@liskhq/lisk-passphrase": "4.0.0", + "@liskhq/lisk-validator": "0.8.0", "protobufjs": "6.11.3" }, "devDependencies": { diff --git a/sdk/package.json b/sdk/package.json index 2057c0c9a9d..6cbe455ea49 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,6 +1,6 @@ { "name": "lisk-sdk", - "version": "6.0.0-rc.5", + "version": "6.0.0", "description": "Official SDK for the Lisk blockchain application platform", "author": "Lisk Foundation , lightcurve GmbH ", "license": "Apache-2.0", @@ -29,19 +29,19 @@ "build": "tsc" }, "dependencies": { - "@liskhq/lisk-api-client": "^6.0.0-rc.4", - "@liskhq/lisk-chain": "^0.5.0-rc.4", - "@liskhq/lisk-codec": "^0.4.0-rc.2", - "@liskhq/lisk-cryptography": "^4.0.0-rc.2", + "@liskhq/lisk-api-client": "^6.0.0", + "@liskhq/lisk-chain": "^0.5.0", + "@liskhq/lisk-codec": "^0.4.0", + "@liskhq/lisk-cryptography": "^4.0.0", "@liskhq/lisk-db": "0.3.10", - "@liskhq/lisk-p2p": "^0.9.0-rc.2", - "@liskhq/lisk-passphrase": "^4.0.0-rc.0", - "@liskhq/lisk-transaction-pool": "^0.7.0-rc.2", - "@liskhq/lisk-transactions": "^6.0.0-rc.2", - "@liskhq/lisk-tree": "^0.4.0-rc.2", - "@liskhq/lisk-utils": "^0.4.0-rc.0", - "@liskhq/lisk-validator": "^0.8.0-rc.2", - "lisk-framework": "^0.11.0-rc.5" + "@liskhq/lisk-p2p": "^0.9.0", + "@liskhq/lisk-passphrase": "^4.0.0", + "@liskhq/lisk-transaction-pool": "^0.7.0", + "@liskhq/lisk-transactions": "^6.0.0", + "@liskhq/lisk-tree": "^0.4.0", + "@liskhq/lisk-utils": "^0.4.0", + "@liskhq/lisk-validator": "^0.8.0", + "lisk-framework": "^0.11.0" }, "devDependencies": { "eslint": "8.28.0",